From e485f2a8ae95664b03764af8702a8e4bebcadb7e Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Tue, 22 Apr 2025 15:09:16 +0300 Subject: [PATCH 001/176] test CI --- .../emails/addons_boa_job_complete.html.mako | 24 ------- .../emails/addons_boa_job_failure.html.mako | 65 ------------------- .../emails/conference_deprecation.html.mako | 17 ----- .../emails/confirm_agu_conference.html.mako | 26 -------- .../confirm_agu_conference_2023.html.mako | 25 ------- .../emails/institution_deactivation.html.mako | 28 -------- .../emails/quickfiles_migrated.html.mako | 31 --------- ...n_bulk_upload_failure_duplicates.html.mako | 28 -------- ...ration_bulk_upload_product_owner.html.mako | 23 ------- ...n_bulk_upload_unexpected_failure.html.mako | 21 ------ ...age_institutional_access_request.html.mako | 26 -------- 11 files changed, 314 deletions(-) delete mode 100644 website/templates/emails/addons_boa_job_complete.html.mako delete mode 100644 website/templates/emails/addons_boa_job_failure.html.mako delete mode 100644 website/templates/emails/conference_deprecation.html.mako delete mode 100644 website/templates/emails/confirm_agu_conference.html.mako delete mode 100644 website/templates/emails/confirm_agu_conference_2023.html.mako delete mode 100644 website/templates/emails/institution_deactivation.html.mako delete mode 100644 website/templates/emails/quickfiles_migrated.html.mako delete mode 100644 website/templates/emails/registration_bulk_upload_failure_duplicates.html.mako delete mode 100644 website/templates/emails/registration_bulk_upload_product_owner.html.mako delete mode 100644 website/templates/emails/registration_bulk_upload_unexpected_failure.html.mako delete mode 100644 website/templates/emails/user_message_institutional_access_request.html.mako diff --git a/website/templates/emails/addons_boa_job_complete.html.mako b/website/templates/emails/addons_boa_job_complete.html.mako deleted file mode 100644 index 738b5cb04eb..00000000000 --- a/website/templates/emails/addons_boa_job_complete.html.mako +++ /dev/null @@ -1,24 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - -

Your submission to Boa [${job_id}] is successful

- - - - - Hello ${fullname},
-
- Your submission [${job_id}] of file [${query_file_full_path}] to Boa is successful.
-
- The result has been uploaded to OSF and stored in file [${output_file_name}] under the same folder where you submit the file. - Visit your project to access the result.
-
- In addition, the Boa job ID for this submission is [${job_id}]. Visit Boa's job list page for more information.
-
- Sincerely,
-
- The OSF Team
- - - diff --git a/website/templates/emails/addons_boa_job_failure.html.mako b/website/templates/emails/addons_boa_job_failure.html.mako deleted file mode 100644 index 5ed46a042d7..00000000000 --- a/website/templates/emails/addons_boa_job_failure.html.mako +++ /dev/null @@ -1,65 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - -

Your submission to Boa has failed

- - - - - Hello ${fullname},
-
- Your submission of file [${query_file_full_path}] from your OSF project to Boa has failed.
-
- % if code == 1: - OSF can not log in to Boa. Please fix your Boa addon configuration on OSF and try again.
-
- For details, visit Boa's job list page. The Boa job ID for this submission is [${job_id}].
- % elif code == 2: - The query you submitted encountered compile or run-time error. Please fix your query file and try again.
-
- For details, visit Boa's job list page. The Boa job ID for this submission is [${job_id}].
- % elif code == 3: - Your query has completed on Boa and the job ID is [${job_id}].
-
- However, we were not able to upload the result to your OSF project because an existing output file [${output_file_name}] already exists.
-
- Please either rename your query file or remove the existing result file and try again.
-
- In addition, you can visit Boa's job list page to retrieve the results.
- % elif code == 4: - Your query has completed on Boa and the job ID is [${job_id}]. However, we were not able to upload the result to OSF.
-
- Visit Boa's job list page to retrieve the results.
- % elif code == 5: - Your query has completed on Boa and the job ID is [${job_id}]. However, we were not able to retrieve the output from Boa.
-
- A common cause of this failure is that the output is empty. Visit Boa's job list page to check if the output is empty.
-
- If you believe this is in error, contact Boa Support at ${boa_support_email}.
- % elif code == 6: - OSF cannot submit your query file to Boa since it is too large: [${file_size} Bytes] is over the maximum allowed threshold [${max_file_size} Bytes].
-
- If you believe this is in error, contact OSF Help Desk at ${osf_support_email}.
- % elif code == 7: - It's been ${max_job_wait_hours} hours since we submitted your query job [${job_id}] to Boa.
-
- However, OSF haven't received confirmation from Boa that the job has been finished.
-
- Visit Boa's job list page to check it's status.
-
- If you believe this is in error, contact OSF Help Desk at ${osf_support_email}.
- % else: - OSF encountered an unexpected error when connecting to Boa. Please try again later.
-
- If this issue persists, contact OSF Help Desk at ${osf_support_email} and attach the following error message.
-
- ${message}
- % endif -
- Sincerely,
-
- The OSF Team
- - - diff --git a/website/templates/emails/conference_deprecation.html.mako b/website/templates/emails/conference_deprecation.html.mako deleted file mode 100644 index 4453c4db36d..00000000000 --- a/website/templates/emails/conference_deprecation.html.mako +++ /dev/null @@ -1,17 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
-
- You recently attempted to interact with the Meeting service via email, but this service has been discontinued and is no longer available for new interactions.
-
- Existing meetings and past submissions remain unchanged. If you have any questions or need further assistance, please contact our support team at [ ${support_email} ].
-
- Sincerely yours,
-
- The OSF Robot
- - - \ No newline at end of file diff --git a/website/templates/emails/confirm_agu_conference.html.mako b/website/templates/emails/confirm_agu_conference.html.mako deleted file mode 100644 index 603e2c39e8d..00000000000 --- a/website/templates/emails/confirm_agu_conference.html.mako +++ /dev/null @@ -1,26 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${user.fullname},
-
- - Thank you for joining us at the AGU Open Science Pavilion, and welcome to the Open Science Framework (OSF). - - We are pleased to offer a special AGU attendees exclusive 1:1 consultation to continue our conversation and to help - you get oriented on the OSF. This is an opportunity for us to show you useful OSF features, talk about - open science in Earth and space sciences, and for you to ask any questions you may have. - You can sign up to participate by completing this form, and a member of our team will be in touch to - determine your availability: -
- https://docs.google.com/forms/d/e/1FAIpQLSeJ23YPaEMdbLY1OqbcP85Tt6rhLpFoOtH0Yg4vY_wSKULRcw/viewform?usp=sf_link -

- To confirm your OSF account, please verify your email address by visiting this link:
-
- ${confirmation_url}
-
- From the team at the Center for Open Science
- - - diff --git a/website/templates/emails/confirm_agu_conference_2023.html.mako b/website/templates/emails/confirm_agu_conference_2023.html.mako deleted file mode 100644 index 429ec911410..00000000000 --- a/website/templates/emails/confirm_agu_conference_2023.html.mako +++ /dev/null @@ -1,25 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${user.fullname},
-
- - Thank you for joining us at the AGU Open Science Pavilion, and welcome to the Open Science Framework. - - We are pleased to offer a special AGU attendees exclusive community call to continue our conversation and to help - you get oriented on the OSF. This is an opportunity for us to show you useful OSF features, talk about - open science in your domains, and for you to ask any questions you may have. - You can register for this free event here: -
- https://cos-io.zoom.us/meeting/register/tZAuceCvrjotHNG3n6XzLFDv1Rnn2hkjczHr -

- To confirm your OSF account, please verify your email address by visiting this link:
-
- ${confirmation_url}
-
- From the team at the Center for Open Science
- - - diff --git a/website/templates/emails/institution_deactivation.html.mako b/website/templates/emails/institution_deactivation.html.mako deleted file mode 100644 index f1f64a2cba9..00000000000 --- a/website/templates/emails/institution_deactivation.html.mako +++ /dev/null @@ -1,28 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - -

Your OSF login has changed - here's what you need to know!

- - - - - Hello, ${user.fullname},
-
- Starting today, you can no longer sign into OSF using your institution's SSO. However, you will not lose access to your account or your OSF content.
-
- You can still access your OSF account using your institutional email by adding a password, or using your ORCID credentials (if your institutional email address is associated with your ORCID record). - We also recommend having multiple ways to access your account by connecting your ORCID - or alternate email addresses with your account.
-
- Click here to set a password
-
- If you have any issues, questions or need our help, contact ${osf_support_email} and we will be happy to assist. - You may find this help guide useful.
-
- Sincerely,
-
- The OSF Team
- - - diff --git a/website/templates/emails/quickfiles_migrated.html.mako b/website/templates/emails/quickfiles_migrated.html.mako deleted file mode 100644 index 94948b2a066..00000000000 --- a/website/templates/emails/quickfiles_migrated.html.mako +++ /dev/null @@ -1,31 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%!from website import settings%> - Hello ${user.fullname}, -

- The Quick Files feature has been discontinued and your files have been migrated into an OSF Project. You can find the new Project on your My Projects page, entitled "${user.fullname}'s Quick Files". Your favorite Quick Files features are still present; you can view, download, and share your files from their new location. Your file URL's will also continue to resolve properly, and you can still move your files between Projects by linking your Projects. Contact ${settings.OSF_CONTACT_EMAIL} if you have any questions or concerns. -

-

- Thank you for partnering with us as a stakeholder in open science and in the success of the infrastructure that help make it possible. -

-

- The Center for Open Science Team -

-

- Sincerely,
- The OSF Team -

-

- Want more information? Visit ${settings.DOMAIN} to learn about the OSF, - or https://cos.io/ for information about its supporting organization, - the Center for Open Science. -

-

- Questions? Email ${settings.OSF_CONTACT_EMAIL} -

- - - diff --git a/website/templates/emails/registration_bulk_upload_failure_duplicates.html.mako b/website/templates/emails/registration_bulk_upload_failure_duplicates.html.mako deleted file mode 100644 index 1c5431b9f32..00000000000 --- a/website/templates/emails/registration_bulk_upload_failure_duplicates.html.mako +++ /dev/null @@ -1,28 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - -

Registrations Were Not Bulk Uploaded to your Community's Registry

- - - - - Hello ${fullname},
-
- All ${count} registrations could not be uploaded due to duplicate rows found either within the uploaded csv file - or in our system. Duplicates are listed below. Review the file and try to upload the registrations again after - removing duplicates. Contact the Help Desk at ${osf_support_email} if - you continue to have issues.
-
- -
- Sincerely,
-
- The OSF Team
- - - diff --git a/website/templates/emails/registration_bulk_upload_product_owner.html.mako b/website/templates/emails/registration_bulk_upload_product_owner.html.mako deleted file mode 100644 index 879b215475f..00000000000 --- a/website/templates/emails/registration_bulk_upload_product_owner.html.mako +++ /dev/null @@ -1,23 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - -

Registry Could Not Bulk Upload Registrations

- - - - - Hello,
-
- [${user}] from registry [${provider_name}] attempted to upload the registrations from a csv file. Review the - file and inform the engineers of the issue. The registry has been notified of the problem and is waiting on a - response. Below is the error message provided by the system.
-
- ${message}
-
- Sincerely,
-
- The OSF Team
- - - diff --git a/website/templates/emails/registration_bulk_upload_unexpected_failure.html.mako b/website/templates/emails/registration_bulk_upload_unexpected_failure.html.mako deleted file mode 100644 index 0b1b032d759..00000000000 --- a/website/templates/emails/registration_bulk_upload_unexpected_failure.html.mako +++ /dev/null @@ -1,21 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - -

Registrations Were Not Bulk Uploaded to your Community's Registry

- - - - - Hello ${fullname},
-
- Your registrations were not uploaded. Our team was notified of the issue and will follow up after they start - looking into the issue. Contact the Help Desk at ${osf_support_email} - if you continue to have questions.
-
- Sincerely,
-
- The OSF Team
- - - diff --git a/website/templates/emails/user_message_institutional_access_request.html.mako b/website/templates/emails/user_message_institutional_access_request.html.mako deleted file mode 100644 index 1e314f91e4e..00000000000 --- a/website/templates/emails/user_message_institutional_access_request.html.mako +++ /dev/null @@ -1,26 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%!from website import settings%> - Hello ${recipient.fullname}, -

- This message is coming from an Institutional administrator within your Institution. -

- % if message_text: -

- ${message_text} -

- % endif -

- Want more information? Visit ${settings.DOMAIN} to learn about OSF, or - https://cos.io/ for information about its supporting organization, the Center - for Open Science. -

-

- Questions? Email ${settings.OSF_CONTACT_EMAIL} -

- - - From a968f71f1d5af17c8bd23fc4bda21935c7dd57db Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 1 May 2025 13:37:46 +0300 Subject: [PATCH 002/176] remove osf groups --- admin/base/urls.py | 1 - admin/common_auth/forms.py | 2 +- admin/nodes/templatetags/node_extras.py | 5 - admin/osf_groups/__init__.py | 0 admin/osf_groups/forms.py | 6 - admin/osf_groups/urls.py | 10 - admin/osf_groups/views.py | 77 -- admin/templates/base.html | 11 - admin/templates/osf_groups/osf_groups.html | 98 -- .../templates/osf_groups/osf_groups_list.html | 40 - admin/templates/osf_groups/search.html | 33 - admin_tests/osf_groups/__init__.py | 0 admin_tests/osf_groups/test_views.py | 67 - api/base/urls.py | 1 - api/nodes/permissions.py | 3 +- api/nodes/serializers.py | 122 +- api/nodes/urls.py | 2 - api/nodes/views.py | 113 -- api/osf_groups/__init__.py | 0 api/osf_groups/permissions.py | 39 - api/osf_groups/serializers.py | 200 --- api/osf_groups/urls.py | 12 - api/osf_groups/views.py | 243 ---- api/users/urls.py | 1 - api/users/views.py | 30 - ...t_draft_registration_contributor_detail.py | 16 - ...est_draft_registration_contributor_list.py | 14 - .../nodes/views/test_node_children_list.py | 28 - api_tests/nodes/views/test_node_citations.py | 8 - .../nodes/views/test_node_comments_list.py | 25 - ...ode_contributors_and_group_members_list.py | 22 +- .../views/test_node_contributors_detail.py | 54 - .../test_node_contributors_detail_update.py | 45 - .../views/test_node_contributors_list.py | 53 - api_tests/nodes/views/test_node_detail.py | 124 -- .../test_node_draft_registration_list.py | 12 +- api_tests/nodes/views/test_node_files_list.py | 23 - api_tests/nodes/views/test_node_forks_list.py | 23 - api_tests/nodes/views/test_node_groups.py | 454 ------- .../test_node_implicit_contributors_list.py | 14 - .../views/test_node_institutions_list.py | 10 +- .../nodes/views/test_node_linked_nodes.py | 29 - .../views/test_node_linked_registrations.py | 34 - .../nodes/views/test_node_links_detail.py | 19 - api_tests/nodes/views/test_node_links_list.py | 29 - api_tests/nodes/views/test_node_list.py | 158 --- api_tests/nodes/views/test_node_logs.py | 8 - .../views/test_node_registrations_list.py | 8 - .../views/test_node_reorder_components.py | 11 +- api_tests/nodes/views/test_node_settings.py | 53 +- api_tests/nodes/views/test_node_wiki_list.py | 25 - api_tests/osf_groups/__init__.py | 0 api_tests/osf_groups/views/__init__.py | 0 .../osf_groups/views/test_osf_group_detail.py | 209 --- .../views/test_osf_group_members_detail.py | 259 ---- .../views/test_osf_group_members_list.py | 626 --------- .../osf_groups/views/test_osf_groups_list.py | 151 --- .../views/test_registration_detail.py | 22 - .../views/test_registration_list.py | 20 - api_tests/sparse/test_sparse_node_list.py | 10 - .../users/serializers/test_serializers.py | 15 +- api_tests/users/views/test_user_list.py | 42 - api_tests/users/views/test_user_nodes_list.py | 40 - .../users/views/test_user_osf_groups_list.py | 119 -- .../views/test_user_registrations_list.py | 20 - framework/auth/oauth_scopes.py | 16 +- osf/admin.py | 8 +- ...jectpermission_unique_together_and_more.py | 65 + osf/models/__init__.py | 3 - osf/models/node.py | 46 +- osf/models/osf_group.py | 576 --------- osf/models/osf_grouplog.py | 52 - osf/models/user.py | 52 +- osf_tests/factories.py | 9 - osf_tests/test_analytics.py | 22 +- osf_tests/test_comment.py | 16 - osf_tests/test_draft_registration.py | 15 +- osf_tests/test_elastic_search.py | 72 -- osf_tests/test_node.py | 161 +-- osf_tests/test_osfgroup.py | 1124 ----------------- osf_tests/test_user.py | 105 +- tasks/__init__.py | 1 - tests/test_misc_views.py | 19 - tests/test_project_creation_view.py | 10 - tests/test_registrations/base.py | 5 +- tests/test_registrations/test_retractions.py | 15 +- tests/test_serializers.py | 18 - tests/test_user_profile_view.py | 1 - website/osf_groups/__init__.py | 0 website/osf_groups/signals.py | 7 - website/osf_groups/views.py | 135 -- website/project/decorators.py | 6 +- website/project/views/contributor.py | 20 +- website/project/views/node.py | 5 - website/search/elastic_search.py | 20 - website/search_migration/migrate.py | 12 +- website/views.py | 17 - 97 files changed, 100 insertions(+), 6481 deletions(-) delete mode 100644 admin/osf_groups/__init__.py delete mode 100644 admin/osf_groups/forms.py delete mode 100644 admin/osf_groups/urls.py delete mode 100644 admin/osf_groups/views.py delete mode 100644 admin/templates/osf_groups/osf_groups.html delete mode 100644 admin/templates/osf_groups/osf_groups_list.html delete mode 100644 admin/templates/osf_groups/search.html delete mode 100644 admin_tests/osf_groups/__init__.py delete mode 100644 admin_tests/osf_groups/test_views.py delete mode 100644 api/osf_groups/__init__.py delete mode 100644 api/osf_groups/permissions.py delete mode 100644 api/osf_groups/serializers.py delete mode 100644 api/osf_groups/urls.py delete mode 100644 api/osf_groups/views.py delete mode 100644 api_tests/nodes/views/test_node_groups.py delete mode 100644 api_tests/osf_groups/__init__.py delete mode 100644 api_tests/osf_groups/views/__init__.py delete mode 100644 api_tests/osf_groups/views/test_osf_group_detail.py delete mode 100644 api_tests/osf_groups/views/test_osf_group_members_detail.py delete mode 100644 api_tests/osf_groups/views/test_osf_group_members_list.py delete mode 100644 api_tests/osf_groups/views/test_osf_groups_list.py delete mode 100644 api_tests/users/views/test_user_osf_groups_list.py create mode 100644 osf/migrations/0030_alter_osfgroupgroupobjectpermission_unique_together_and_more.py delete mode 100644 osf/models/osf_group.py delete mode 100644 osf/models/osf_grouplog.py delete mode 100644 osf_tests/test_osfgroup.py delete mode 100644 website/osf_groups/__init__.py delete mode 100644 website/osf_groups/signals.py delete mode 100644 website/osf_groups/views.py diff --git a/admin/base/urls.py b/admin/base/urls.py index 332ddcff88f..d19d2dc638b 100644 --- a/admin/base/urls.py +++ b/admin/base/urls.py @@ -30,7 +30,6 @@ re_path(r'^maintenance/', include('admin.maintenance.urls', namespace='maintenance')), re_path(r'^meetings/', include('admin.meetings.urls', namespace='meetings')), re_path(r'^metrics/', include('admin.metrics.urls', namespace='metrics')), - re_path(r'^osf_groups/', include('admin.osf_groups.urls', namespace='osf_groups')), re_path(r'^management/', include('admin.management.urls', namespace='management')), re_path(r'^internet_archive/', include('admin.internet_archive.urls', namespace='internet_archive')), re_path(r'^schema_responses/', include('admin.schema_responses.urls', namespace='schema_responses')), diff --git a/admin/common_auth/forms.py b/admin/common_auth/forms.py index a13905bd572..aed87e67a6d 100644 --- a/admin/common_auth/forms.py +++ b/admin/common_auth/forms.py @@ -22,7 +22,7 @@ class UserRegistrationForm(forms.Form): # TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups group_perms = forms.ModelMultipleChoiceField( - queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='reviews_') | Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='osfgroup_') | Q(name__startswith='draft_registration_')), + queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='reviews_') | Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='draft_registration_')), required=False, widget=forms.CheckboxSelectMultiple ) diff --git a/admin/nodes/templatetags/node_extras.py b/admin/nodes/templatetags/node_extras.py index 801c7004f39..4fb9606f22e 100644 --- a/admin/nodes/templatetags/node_extras.py +++ b/admin/nodes/templatetags/node_extras.py @@ -39,11 +39,6 @@ def reverse_user(user): return reverse('users:user', kwargs={'guid': user._id}) -@register.filter -def reverse_osf_group(value): - return reverse('osf_groups:osf_group', kwargs={'id': value._id}) - - @register.filter def reverse_registration_provider(value): return reverse('registration_providers:detail', kwargs={'registration_provider_id': value.provider.id}) diff --git a/admin/osf_groups/__init__.py b/admin/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/admin/osf_groups/forms.py b/admin/osf_groups/forms.py deleted file mode 100644 index 3e52ec2de9e..00000000000 --- a/admin/osf_groups/forms.py +++ /dev/null @@ -1,6 +0,0 @@ -from django import forms - - -class OSFGroupSearchForm(forms.Form): - name = forms.CharField(label='name', required=False) - id = forms.CharField(label='id', required=False) diff --git a/admin/osf_groups/urls.py b/admin/osf_groups/urls.py deleted file mode 100644 index 15250df2012..00000000000 --- a/admin/osf_groups/urls.py +++ /dev/null @@ -1,10 +0,0 @@ -from django.urls import re_path -from admin.osf_groups import views - -app_name = 'admin' - -urlpatterns = [ - re_path(r'^$', views.OSFGroupsListView.as_view(), name='osf_groups_list'), - re_path(r'^search/$', views.OSFGroupsFormView.as_view(), name='search'), - re_path(r'^(?P[a-z0-9]+)/$', views.OSFGroupsView.as_view(), name='osf_group'), -] diff --git a/admin/osf_groups/views.py b/admin/osf_groups/views.py deleted file mode 100644 index 8e3a9345709..00000000000 --- a/admin/osf_groups/views.py +++ /dev/null @@ -1,77 +0,0 @@ -from django.contrib.auth.mixins import PermissionRequiredMixin -from django.urls import reverse -from django.views.generic import FormView, ListView - -from osf.models import OSFGroup -from admin.osf_groups.forms import OSFGroupSearchForm -from admin.base.views import GuidView - - -class OSFGroupsView(PermissionRequiredMixin, GuidView): - """ Allow authorized admin user to view an osf group - """ - template_name = 'osf_groups/osf_groups.html' - context_object_name = 'group' - permission_required = 'osf.view_group' - raise_exception = True - - def get_object(self, queryset=None): - id = self.kwargs.get('id') - osf_group = OSFGroup.objects.get(_id=id) - return osf_group - - -class OSFGroupsFormView(PermissionRequiredMixin, FormView): - template_name = 'osf_groups/search.html' - object_type = 'osf_group' - permission_required = 'osf.view_group' - raise_exception = True - form_class = OSFGroupSearchForm - - def __init__(self): - self.redirect_url = None - super().__init__() - - def form_valid(self, form): - id = form.data.get('id').strip() - name = form.data.get('name').strip() - self.redirect_url = reverse('osf_groups:search') - - if id: - self.redirect_url = reverse('osf_groups:osf_group', kwargs={'id': id}) - elif name: - self.redirect_url = reverse('osf_groups:osf_groups_list',) + f'?name={name}' - - return super().form_valid(form) - - @property - def success_url(self): - return self.redirect_url - - -class OSFGroupsListView(PermissionRequiredMixin, ListView): - """ Allow authorized admin user to view list of osf groups - """ - template_name = 'osf_groups/osf_groups_list.html' - paginate_by = 10 - paginate_orphans = 1 - permission_required = 'osf.view_group' - raise_exception = True - - def get_queryset(self): - name = self.request.GET.get('name') - if name: - return OSFGroup.objects.filter(name__icontains=name) - - return OSFGroup.objects.all() - - def get_context_data(self, **kwargs): - query_set = kwargs.pop('object_list', self.object_list) - page_size = self.get_paginate_by(query_set) - paginator, page, query_set, is_paginated = self.paginate_queryset( - query_set, page_size) - - return { - 'groups': query_set, - 'page': page, - } diff --git a/admin/templates/base.html b/admin/templates/base.html index 2fdb5e7cb12..a4e6d3b52e3 100644 --- a/admin/templates/base.html +++ b/admin/templates/base.html @@ -300,17 +300,6 @@ {% if perms.osf.view_management%}
  • Management Commands
  • {% endif %} - {% if perms.osf.view_osf_groups %} -
  • - OSF Groups -
  • - - {% endif %} {% if perms.osf.view_scheduledbanner %}
  • Banners diff --git a/admin/templates/osf_groups/osf_groups.html b/admin/templates/osf_groups/osf_groups.html deleted file mode 100644 index fefcfd759d9..00000000000 --- a/admin/templates/osf_groups/osf_groups.html +++ /dev/null @@ -1,98 +0,0 @@ -{% extends 'base.html' %} -{% load static %} -{% block title %} -{% load node_extras %} -OSF Group -{% endblock title %} -{% block content %} -
    - -
    -
    -

    OSF Group Detail

    -
    -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - {% if group.members %} - - {% endif %} - - {% if group.nodes %} - - - - - - {% endif %} - -
    FieldValue
    _id{{ group.id }}
    Name{{ group.name }}
    Date Created{{ group.created }}
    Date Modified{{ group.modified }}
    Creator{{ group.creator.name }}
    Managers - - - -
    Members - - - -
    Nodes - - - -
    -
    -
    -{% endblock content %} diff --git a/admin/templates/osf_groups/osf_groups_list.html b/admin/templates/osf_groups/osf_groups_list.html deleted file mode 100644 index 48bf3b0eb10..00000000000 --- a/admin/templates/osf_groups/osf_groups_list.html +++ /dev/null @@ -1,40 +0,0 @@ -{% extends "base.html" %} -{% load node_extras %} - -{% load static %} -{% block title %} - OSF Groups -{% endblock title %} -{% block content %} -

    List of Groups

    -{% include "util/pagination.html" with items=page status=status %} - - - - - - - - - - - {% for group in groups %} - - - - - - - {% endfor %} - -
    NameDate CreatedDate ModifiedCreator
    - {{ group.name }} - - {{ group.created }} - - {{ group.modified }} - - {{ group.creator.name }} -
    - -{% endblock content %} diff --git a/admin/templates/osf_groups/search.html b/admin/templates/osf_groups/search.html deleted file mode 100644 index e11d19f20c7..00000000000 --- a/admin/templates/osf_groups/search.html +++ /dev/null @@ -1,33 +0,0 @@ -{% extends 'base.html' %} -{% load static %} -{% block title %} -OSF Groups Search -{% endblock title %} -{% block content %} -
    -
    -
      - {% for message in messages %} - {{ message }} - {% endfor %} -
    -
    -
    -
    - {% csrf_token %} - {% if form.errors %} -
    {{ form.errors }}
    - {% endif %} -
    - - {{ form.id }} -
    -
    - - {{ form.name }} -
    - -
    -
    -
    -{% endblock content %} diff --git a/admin_tests/osf_groups/__init__.py b/admin_tests/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/admin_tests/osf_groups/test_views.py b/admin_tests/osf_groups/test_views.py deleted file mode 100644 index 12063a93c05..00000000000 --- a/admin_tests/osf_groups/test_views.py +++ /dev/null @@ -1,67 +0,0 @@ -from admin.osf_groups.views import ( - OSFGroupsListView, - OSFGroupsFormView -) -from admin_tests.utilities import setup_log_view -from django.test import RequestFactory - -from tests.base import AdminTestCase -from osf_tests.factories import UserFactory, OSFGroupFactory - - -class TestOSFGroupsListView(AdminTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.group = OSFGroupFactory(name='Brian Dawkins', creator=self.user) - self.group2 = OSFGroupFactory(name='Brian Westbrook', creator=self.user) - self.group3 = OSFGroupFactory(name='Darren Sproles', creator=self.user) - self.request = RequestFactory().post('/fake_path') - self.view = OSFGroupsListView() - - def test_get_default_queryset(self): - view = setup_log_view(self.view, self.request) - - queryset = view.get_queryset() - - assert len(queryset) == 3 - - assert self.group in queryset - assert self.group2 in queryset - assert self.group3 in queryset - - def test_get_queryset_by_name(self): - request = RequestFactory().post('/fake_path/?name=Brian') - view = setup_log_view(self.view, request) - - queryset = view.get_queryset() - - assert len(queryset) == 2 - - assert self.group in queryset - assert self.group2 in queryset - - -class TestOSFGroupsFormView(AdminTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.group = OSFGroupFactory(name='Brian Dawkins', creator=self.user) - self.group2 = OSFGroupFactory(name='Brian Westbrook', creator=self.user) - self.view = OSFGroupsFormView() - - def test_post_id(self): - request = RequestFactory().post('/fake_path', data={'id': self.group._id, 'name': ''}) - view = setup_log_view(self.view, request) - - redirect = view.post(request) - assert redirect.url == f'/osf_groups/{self.group._id}/' - - def test_post_name(self): - request = RequestFactory().post('/fake_path', data={'id': '', 'name': 'Brian'}) - view = setup_log_view(self.view, request) - - redirect = view.post(request) - assert redirect.url == '/osf_groups/?name=Brian' diff --git a/api/base/urls.py b/api/base/urls.py index f7e4cb74e71..142e2df34c2 100644 --- a/api/base/urls.py +++ b/api/base/urls.py @@ -53,7 +53,6 @@ re_path(r'^draft_nodes/', include('api.draft_nodes.urls', namespace='draft_nodes')), re_path(r'^draft_registrations/', include('api.draft_registrations.urls', namespace='draft_registrations')), re_path(r'^files/', include('api.files.urls', namespace='files')), - re_path(r'^groups/', include('api.osf_groups.urls', namespace='groups')), re_path(r'^guids/', include('api.guids.urls', namespace='guids')), re_path(r'^identifiers/', include('api.identifiers.urls', namespace='identifiers')), re_path(r'^institutions/', include('api.institutions.urls', namespace='institutions')), diff --git a/api/nodes/permissions.py b/api/nodes/permissions.py index cf42b5a501e..0b74d61c645 100644 --- a/api/nodes/permissions.py +++ b/api/nodes/permissions.py @@ -10,7 +10,6 @@ Institution, Node, NodeRelation, - OSFGroup, OSFUser, Preprint, PrivateLink, @@ -218,7 +217,7 @@ class NodeGroupDetailPermissions(permissions.BasePermission): """Permissions for node group detail - involving who can update the relationship between a node and an OSF Group.""" - acceptable_models = (OSFGroup, AbstractNode) + acceptable_models = (AbstractNode, ) def load_resource(self, context, view): return AbstractNode.load(context[view.node_lookup_url_kwarg]) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 473b439ffd5..fd67d6f85f7 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -37,7 +37,7 @@ from osf.models import ( Comment, DraftRegistration, ExternalAccount, RegistrationSchema, AbstractNode, PrivateLink, Preprint, - RegistrationProvider, OSFGroup, NodeLicense, DraftNode, + RegistrationProvider, NodeLicense, DraftNode, Registration, Node, ) from website.project import new_private_link @@ -803,9 +803,6 @@ def create(self, validated_data): except ValidationError as e: raise InvalidModelValueError(detail=list(e)[0]) node.add_contributors(contributors, auth=auth, log=True, save=True) - for group in parent.osf_groups: - if group.is_manager(user): - node.add_osf_group(group, group.get_permission_to_node(parent), auth=auth) if is_truthy(request.GET.get('inherit_subjects')) and validated_data['parent'].has_permission(user, osf_permissions.WRITE): parent = validated_data['parent'] node.subjects.add(parent.subjects.all()) @@ -1890,120 +1887,3 @@ def enable_or_disable_addon(self, obj, should_enable, addon_name, auth): if isinstance(addon, bool): addon = None return addon - - -class NodeGroupsSerializer(JSONAPISerializer): - filterable_fields = frozenset([ - 'name', - 'permission', - 'date_created', - ]) - - writeable_method_fields = frozenset([ - 'permission', - ]) - - non_anonymized_fields = [ - 'type', - 'permission', - ] - - id = CompoundIDField(source='_id', read_only=True) - type = TypeField() - permission = ser.SerializerMethodField() - name = ser.CharField(read_only=True) - date_created = VersionedDateTimeField(source='created', read_only=True) - date_modified = VersionedDateTimeField(source='modified', read_only=True) - - groups = RelationshipField( - related_view='groups:group-detail', - related_view_kwargs={'group_id': '<_id>'}, - required=False, - ) - - links = LinksField({ - 'self': 'get_absolute_url', - }) - - def get_absolute_url(self, obj): - node = self.context['node'] - return absolute_reverse( - 'nodes:node-group-detail', kwargs={ - 'group_id': obj._id, - 'node_id': node._id, - 'version': self.context['request'].parser_context['kwargs']['version'], - }, - ) - - def get_permission(self, obj): - node = self.context['node'] - return obj.get_permission_to_node(node) - - class Meta: - type_ = 'node-groups' - - -class NodeGroupsCreateSerializer(NodeGroupsSerializer): - """ - Overrides NodeGroupSerializer so groups relationship is properly parsed - (JSONAPIParser will flatten groups relationship into {'_id': 'group_id'}, - so _id field needs to be writeable so it's not dropped from validated_data) - - """ - id = IDField(source='_id', required=False, allow_null=True) - - groups = RelationshipField( - related_view='groups:group-detail', - related_view_kwargs={'group_id': '<_id>'}, - required=False, - ) - - def load_osf_group(self, _id): - if not _id: - raise exceptions.ValidationError(detail='Group relationship must be specified.') - try: - osf_group = OSFGroup.objects.get(_id=_id) - except OSFGroup.DoesNotExist: - raise exceptions.NotFound(detail=f'Group {_id} is invalid.') - return osf_group - - def create(self, validated_data): - auth = get_user_auth(self.context['request']) - node = self.context['node'] - permission = validated_data.get('permission', osf_permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS) - group = self.load_osf_group(validated_data.get('_id')) - if group in node.osf_groups: - raise exceptions.ValidationError( - f'The group {group._id} has already been added to the node {node._id}', - ) - - try: - node.add_osf_group(group, permission, auth) - except PermissionsError as e: - raise exceptions.PermissionDenied(detail=str(e)) - except ValueError as e: - # permission is in writeable_method_fields, so validation happens on OSF Group model - raise exceptions.ValidationError(detail=str(e)) - return group - - -class NodeGroupsDetailSerializer(NodeGroupsSerializer): - """ - Overrides NodeGroupsSerializer to make id required. Adds update method here. - """ - id = CompoundIDField(source='_id', required=True) - - def update(self, obj, validated_data): - auth = get_user_auth(self.context['request']) - node = self.context['node'] - permission = validated_data.get('permission') - if not permission: - return obj - try: - node.update_osf_group(obj, permission, auth) - except PermissionsError as e: - raise exceptions.PermissionDenied(detail=str(e.message)) - except ValueError as e: - # permission is in writeable_method_fields, so validation happens on OSF Group model - raise exceptions.ValidationError(detail=str(e)) - return obj diff --git a/api/nodes/urls.py b/api/nodes/urls.py index a10e2b1355a..e607b909898 100644 --- a/api/nodes/urls.py +++ b/api/nodes/urls.py @@ -30,8 +30,6 @@ re_path(r'^(?P\w+)/files/(?P[a-zA-Z0-9\-]*)(?P/(?:.*/)?)$', views.NodeFilesList.as_view(), name=views.NodeFilesList.view_name), re_path(r'^(?P\w+)/files/(?P[a-zA-Z0-9\-]*)(?P/.+[^/])$', views.NodeFileDetail.as_view(), name=views.NodeFileDetail.view_name), re_path(r'^(?P\w+)/forks/$', views.NodeForksList.as_view(), name=views.NodeForksList.view_name), - re_path(r'^(?P\w+)/groups/$', views.NodeGroupsList.as_view(), name=views.NodeGroupsList.view_name), - re_path(r'^(?P\w+)/groups/(?P\w+)/$', views.NodeGroupsDetail.as_view(), name=views.NodeGroupsDetail.view_name), re_path(r'^(?P\w+)/identifiers/$', views.NodeIdentifierList.as_view(), name=views.NodeIdentifierList.view_name), re_path(r'^(?P\w+)/institutions/$', views.NodeInstitutionsList.as_view(), name=views.NodeInstitutionsList.view_name), re_path(r'^(?P\w+)/linked_nodes/$', views.LinkedNodesList.as_view(), name=views.LinkedNodesList.view_name), diff --git a/api/nodes/views.py b/api/nodes/views.py index 7bc3ad929da..95470be9e75 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -61,7 +61,6 @@ LinkedRegistrationsRelationship, WaterButlerMixin, ) -from api.base.waffle_decorators import require_flag from api.base.permissions import WriteOrPublicForRelationshipInstitutions from api.cedar_metadata_records.serializers import CedarMetadataRecordsListSerializer from api.cedar_metadata_records.utils import can_view_record @@ -90,7 +89,6 @@ RegistrationAndPermissionCheckForPointers, ContributorDetailPermissions, ReadOnlyIfRegistration, - NodeGroupDetailPermissions, IsContributorOrGroupMember, AdminDeletePermissions, ExcludeWithdrawals, @@ -118,12 +116,8 @@ NodeStorageSerializer, NodeCitationSerializer, NodeCitationStyleSerializer, - NodeGroupsSerializer, - NodeGroupsCreateSerializer, - NodeGroupsDetailSerializer, ) from api.nodes.utils import NodeOptimizationMixin, enforce_no_children -from api.osf_groups.views import OSFGroupMixin from api.preprints.serializers import PreprintSerializer from api.registrations import annotations as registration_annotations from api.registrations.serializers import ( @@ -141,7 +135,6 @@ from framework.exceptions import HTTPError, PermissionsError from framework.auth.oauth_scopes import CoreScopes from framework.sentry import log_exception -from osf.features import OSF_GROUPS from osf.models import ( AbstractNode, OSFUser, @@ -152,7 +145,6 @@ DraftRegistration, Registration, BaseFileNode, - OSFGroup, NodeRelation, Guid, File, @@ -1245,111 +1237,6 @@ def get_object(self): return fobj -class NodeGroupsBase(JSONAPIBaseView, NodeMixin, OSFGroupMixin): - model_class = OSFGroup - - required_read_scopes = [CoreScopes.NODE_OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.NODE_OSF_GROUPS_WRITE] - view_category = 'nodes' - - -class NodeGroupsList(NodeGroupsBase, generics.ListCreateAPIView, ListFilterMixin): - """ The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_groups_list) - - """ - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - AdminOrPublic, - base_permissions.TokenHasScope, - ) - - serializer_class = NodeGroupsSerializer - view_name = 'node-groups' - - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - return self.get_node().osf_groups - - def get_queryset(self): - return self.get_queryset_from_request() - - # overrides FilterMixin - def build_query_from_field(self, field_name, operation): - if field_name == 'permission': - node = self.get_node() - try: - groups_with_perm_ids = node.get_osf_groups_with_perms(operation['value']).values_list('id', flat=True) - except ValueError: - raise ValidationError('{} is not a filterable permission.'.format(operation['value'])) - return Q(id__in=groups_with_perm_ids) - - return super().build_query_from_field(field_name, operation) - - # overrides ListCreateAPIView - def get_serializer_class(self): - if self.request.method == 'POST': - return NodeGroupsCreateSerializer - else: - return NodeGroupsSerializer - - # overrides ListCreateAPIView - def get_serializer_context(self): - """ - Extra context for NodeGroupsSerializer - """ - context = super().get_serializer_context() - context['node'] = self.get_node(check_object_permissions=False) - return context - - @require_flag(OSF_GROUPS) - def perform_create(self, serializer): - return super().perform_create(serializer) - - -class NodeGroupsDetail(NodeGroupsBase, generics.RetrieveUpdateDestroyAPIView): - """ The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_groups_read) - - """ - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - NodeGroupDetailPermissions, - base_permissions.TokenHasScope, - ) - - serializer_class = NodeGroupsDetailSerializer - - view_name = 'node-group-detail' - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def get_object(self): - node = self.get_node(check_object_permissions=False) - # Node permissions checked when group is loaded - group = self.get_osf_group(self.kwargs.get('group_id')) - if not group.get_permission_to_node(node): - raise NotFound(f'Group {group._id} does not have permissions to node {node._id}.') - return group - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def perform_destroy(self, instance): - node = self.get_node(check_object_permissions=False) - auth = get_user_auth(self.request) - try: - node.remove_osf_group(instance, auth) - except PermissionsError: - raise PermissionDenied('Not authorized to remove this group.') - - # Overrides RetrieveUpdateDestroyAPIView - def get_serializer_context(self): - """ - Extra context for NodeGroupsSerializer - """ - context = super().get_serializer_context() - context['node'] = self.get_node(check_object_permissions=False) - return context - - class NodeAddonList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, NodeMixin, AddonSettingsMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_addons_list). diff --git a/api/osf_groups/__init__.py b/api/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/api/osf_groups/permissions.py b/api/osf_groups/permissions.py deleted file mode 100644 index b9601f05d7d..00000000000 --- a/api/osf_groups/permissions.py +++ /dev/null @@ -1,39 +0,0 @@ -from rest_framework import permissions - -from api.base.utils import assert_resource_type, get_user_auth -from osf.utils.permissions import MANAGE -from osf.models import OSFGroup, OSFUser - - -class IsGroupManager(permissions.BasePermission): - - acceptable_models = (OSFGroup,) - - def has_object_permission(self, request, view, obj): - assert_resource_type(obj, self.acceptable_models) - auth = get_user_auth(request) - - if request.method in permissions.SAFE_METHODS: - return True - else: - return auth.user and obj.has_permission(auth.user, MANAGE) - - -class GroupMemberManagement(permissions.BasePermission): - - acceptable_models = (OSFGroup, OSFUser) - - def has_object_permission(self, request, view, obj): - if not isinstance(obj, OSFGroup): - obj = OSFGroup.load(request.parser_context['kwargs']['group_id']) - assert_resource_type(obj, self.acceptable_models) - auth = get_user_auth(request) - if request.method in permissions.SAFE_METHODS: - return True - elif request.method == 'DELETE': - user = OSFUser.load(request.parser_context['kwargs']['user_id']) - # You must have manage permissions on the OSFGroup to remove a member, - # unless you are removing yourself - return obj.has_permission(auth.user, MANAGE) or auth.user == user - else: - return auth.user and obj.has_permission(auth.user, MANAGE) diff --git a/api/osf_groups/serializers.py b/api/osf_groups/serializers.py deleted file mode 100644 index 353aa121da1..00000000000 --- a/api/osf_groups/serializers.py +++ /dev/null @@ -1,200 +0,0 @@ -from rest_framework import serializers as ser, exceptions -from django.core.exceptions import ValidationError - -from framework.auth.core import Auth -from api.base.exceptions import InvalidModelValueError -from api.base.serializers import ( - IDField, - LinksField, - JSONAPISerializer, - RelationshipField, - TypeField, - VersionedDateTimeField, -) -from api.base.utils import absolute_reverse -from api.nodes.serializers import CompoundIDField -from osf.models import OSFUser -from osf.models.osf_group import OSFGroup -from osf.utils.permissions import GROUP_ROLES, MEMBER, MANAGER - - -class GroupSerializer(JSONAPISerializer): - filterable_fields = frozenset([ - 'name', - ]) - - non_anonymized_fields = [ - 'type', - ] - - id = IDField(source='_id', read_only=True) - type = TypeField() - name = ser.CharField(required=True) - date_created = VersionedDateTimeField(source='created', read_only=True) - date_modified = VersionedDateTimeField(source='modified', read_only=True) - - links = LinksField({ - 'self': 'get_absolute_url', - }) - - def get_absolute_url(self, obj): - return obj.get_absolute_url() - - members = RelationshipField( - related_view='groups:group-members', - related_view_kwargs={'group_id': '<_id>'}, - ) - - class Meta: - type_ = 'groups' - - def create(self, validated_data): - group = OSFGroup(creator=validated_data['creator'], name=validated_data['name']) - group.save() - return group - - def update(self, instance, validated_data): - if 'name' in validated_data: - instance.set_group_name(validated_data.get('name')) - instance.save() - return instance - - -class GroupDetailSerializer(GroupSerializer): - """ - Overrides GroupSerializer to make id required. - """ - id = IDField(source='_id', required=True) - - -class GroupCompoundIDField(CompoundIDField): - def _get_resource_id(self): - return self.context['request'].parser_context['kwargs']['group_id'] - - -class GroupMemberSerializer(JSONAPISerializer): - filterable_fields = frozenset([ - 'role', - 'full_name', - ]) - writeable_method_fields = frozenset([ - 'role', - ]) - non_anonymized_fields = [ - 'type', - 'role', - ] - - id = GroupCompoundIDField(source='_id', read_only=True) - type = TypeField() - role = ser.SerializerMethodField() - unregistered_member = ser.SerializerMethodField() - full_name = ser.CharField(read_only=True, source='fullname') - - users = RelationshipField( - related_view='users:user-detail', - related_view_kwargs={'user_id': '<_id>'}, - ) - - links = LinksField({ - 'self': 'get_absolute_url', - }) - - def get_role(self, user): - return user.group_role(self.context['group']) - - def get_unregistered_member(self, obj): - unclaimed_records = obj.unclaimed_records.get(self.context['group']._id, None) - if unclaimed_records: - return unclaimed_records.get('name', None) - - def get_member_method(self, group, role): - methods = { - MANAGER: group.make_manager, - MEMBER: group.make_member, - } - return methods[role] - - def get_group_role(self, validated_data, default_role): - role = validated_data.get('role', default_role) - if role not in GROUP_ROLES: - raise exceptions.ValidationError(f'{role} is not a valid role; choose manager or member.') - return role - - class Meta: - type_ = 'group-members' - - def get_absolute_url(self, obj): - return absolute_reverse( - 'groups:group-member-detail', - kwargs={ - 'user_id': obj._id, - 'group_id': self.context['request'].parser_context['kwargs']['group_id'], - 'version': self.context['request'].parser_context['kwargs']['version'], - }, - ) - - -class GroupMemberCreateSerializer(GroupMemberSerializer): - id = GroupCompoundIDField(source='_id', required=False, allow_null=True) - type = TypeField() - full_name = ser.CharField(required=False) - email = ser.EmailField(required=False, write_only=True) - - def to_representation(self, instance, envelope='data'): - """ - Use GroupMemberSerializer for the response, but GroupMemberCreateSerializer - for the request. We only want full_name to be writable on create member (for unregistered members). - User serializer endpoints should be used to edit user's full_name. - """ - return GroupMemberSerializer(instance=instance, context=self.context).data - - def get_user_object(self, user_id, group): - if user_id: - user = OSFUser.load(user_id) - if not user: - raise exceptions.NotFound(detail=f'User with id {user_id} not found.') - if group.has_permission(user, 'member'): - raise exceptions.ValidationError(detail='User is already a member of this group.') - return user - return user_id - - def create(self, validated_data): - group = self.context['group'] - user = self.get_user_object(validated_data.get('_id', None), group) - auth = Auth(self.context['request'].user) - full_name = validated_data.get('full_name', None) - email = validated_data.get('email', None) - role = self.get_group_role(validated_data, MEMBER) - - try: - if user: - self.get_member_method(group, role)(user, auth) - else: - if not full_name or not email: - raise exceptions.ValidationError(detail='You must provide a full_name/email combination to add an unconfirmed member.') - else: - user = group.add_unregistered_member(full_name, email, auth, role) - except ValueError as e: - raise exceptions.ValidationError(detail=str(e)) - except ValidationError as e: - raise InvalidModelValueError(detail=list(e)[0]) - - return user - - -class GroupMemberDetailSerializer(GroupMemberSerializer): - id = GroupCompoundIDField(source='_id', required=True) - - def update(self, user, validated_data): - group = self.context['group'] - role = self.get_group_role(validated_data, user.group_role(group)) - auth = Auth(self.context['request'].user) - - try: - # Making sure the one-manager rule isn't violated - self.get_member_method(self.context['group'], role)(user, auth) - except ValueError as e: - raise exceptions.ValidationError(detail=str(e)) - - return user diff --git a/api/osf_groups/urls.py b/api/osf_groups/urls.py deleted file mode 100644 index ad80a9a5bc8..00000000000 --- a/api/osf_groups/urls.py +++ /dev/null @@ -1,12 +0,0 @@ -from django.urls import re_path - -from api.osf_groups import views - -app_name = 'osf' - -urlpatterns = [ - re_path(r'^$', views.GroupList.as_view(), name=views.GroupList.view_name), - re_path(r'^(?P\w+)/$', views.GroupDetail.as_view(), name=views.GroupDetail.view_name), - re_path(r'^(?P\w+)/members/$', views.GroupMembersList.as_view(), name=views.GroupMembersList.view_name), - re_path(r'^(?P\w+)/members/(?P\w+)/$', views.GroupMemberDetail.as_view(), name=views.GroupMemberDetail.view_name), -] diff --git a/api/osf_groups/views.py b/api/osf_groups/views.py deleted file mode 100644 index 7593803e4f4..00000000000 --- a/api/osf_groups/views.py +++ /dev/null @@ -1,243 +0,0 @@ -from django.apps import apps -from django.db.models import Q - -from rest_framework import generics, permissions as drf_permissions -from rest_framework.exceptions import NotFound, ValidationError - -from api.base import permissions as base_permissions -from api.base.exceptions import InvalidFilterOperator, InvalidFilterValue -from api.base.filters import ListFilterMixin -from api.base.utils import get_object_or_error, get_user_auth, is_bulk_request -from api.base.views import JSONAPIBaseView -from api.base import generic_bulk_views as bulk_views -from api.base.waffle_decorators import require_flag -from api.osf_groups.permissions import IsGroupManager, GroupMemberManagement -from api.osf_groups.serializers import ( - GroupSerializer, - GroupDetailSerializer, - GroupMemberSerializer, - GroupMemberDetailSerializer, - GroupMemberCreateSerializer, -) -from api.users.views import UserMixin -from framework.auth.oauth_scopes import CoreScopes -from osf.features import OSF_GROUPS -from osf.models import OSFGroup, OSFUser -from osf.utils.permissions import MANAGER, GROUP_ROLES - - -class OSFGroupMixin: - """ - Mixin with convenience method for retrieving the current OSF Group - """ - group_lookup_url_kwarg = 'group_id' - - def get_osf_group(self, check_object_permissions=True): - - group = get_object_or_error( - OSFGroup, - self.kwargs[self.group_lookup_url_kwarg], - self.request, - display_name='osf_group', - ) - - if check_object_permissions: - self.check_object_permissions(self.request, group) - return group - - -class GroupBaseView(JSONAPIBaseView, OSFGroupMixin): - required_read_scopes = [CoreScopes.OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.OSF_GROUPS_WRITE] - model_class = apps.get_model('osf.OSFGroup') - - view_category = 'groups' - - -class GroupList(GroupBaseView, generics.ListCreateAPIView, ListFilterMixin): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - ) - - serializer_class = GroupSerializer - view_name = 'group-list' - ordering = ('-modified',) - - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - user = self.request.user - if user.is_anonymous: - return OSFGroup.objects.none() - return user.osf_groups - - # overrides ListCreateAPIView - def get_queryset(self): - return self.get_queryset_from_request() - - # overrides ListCreateAPIView - @require_flag(OSF_GROUPS) - def perform_create(self, serializer): - """Create an OSFGroup. - - :param serializer: - """ - # On creation, logged in user is the creator - user = self.request.user - serializer.save(creator=user) - - -class GroupDetail(GroupBaseView, generics.RetrieveUpdateDestroyAPIView): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - IsGroupManager, - ) - - serializer_class = GroupDetailSerializer - view_name = 'group-detail' - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def get_object(self): - return self.get_osf_group() - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def perform_destroy(self, instance): - auth = get_user_auth(self.request) - instance.remove_group(auth=auth) - - -class OSFGroupMemberBaseView(JSONAPIBaseView, OSFGroupMixin): - """ - Base group used for OSFGroupMemberList and OSFGroupMemberDetail - """ - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - IsGroupManager, - ) - required_read_scopes = [CoreScopes.OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.OSF_GROUPS_WRITE] - - model_class = apps.get_model('osf.OSFUser') - serializer_class = GroupMemberSerializer - view_category = 'groups' - ordering = ('-modified',) - - def _assert_member_belongs_to_group(self, user): - group = self.get_osf_group() - # Checking group membership instead of permissions, so unregistered members are - # recognized as group members - if not group.is_member(user): - raise NotFound(f'{user._id} cannot be found in this OSFGroup') - - def get_serializer_class(self): - if self.request.method in ('PUT', 'PATCH', 'DELETE'): - return GroupMemberDetailSerializer - elif self.request.method == 'POST': - return GroupMemberCreateSerializer - else: - return GroupMemberSerializer - - # overrides DestroyAPIView - @require_flag(OSF_GROUPS) - def perform_destroy(self, instance): - group = self.get_osf_group() - auth = get_user_auth(self.request) - try: - group.remove_member(instance, auth) - except ValueError as e: - raise ValidationError(detail=str(e)) - - -class GroupMembersList(OSFGroupMemberBaseView, bulk_views.BulkUpdateJSONAPIView, bulk_views.BulkDestroyJSONAPIView, bulk_views.ListBulkCreateJSONAPIView, ListFilterMixin): - view_name = 'group-members' - - # Overrides ListBulkCreateJSONAPIView - def get_queryset(self): - queryset = self.get_queryset_from_request() - if is_bulk_request(self.request): - user_ids = [] - for user in self.request.data: - try: - user_id = user['id'].split('-')[1] - except AttributeError: - raise ValidationError('Member identifier not provided.') - except IndexError: - raise ValidationError('Member identifier incorrectly formatted.') - else: - user_ids.append(user_id) - queryset = queryset.filter(guids___id__in=user_ids) - return queryset - - # Overrides ListFilterMixin - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - # Returns all members and managers of the OSF Group (User objects) - return self.get_osf_group().members - - # Overrides ListBulkCreateJSONAPIView - def get_serializer_context(self): - context = super().get_serializer_context() - # Permissions check handled here - needed when performing write operations - context['group'] = self.get_osf_group() - return context - - # Overrides BulkDestroyJSONAPIView - def get_requested_resources(self, request, request_data): - requested_ids = [] - for data in request_data: - try: - requested_ids.append(data['id'].split('-')[1]) - except IndexError: - raise ValidationError('Member identifier incorrectly formatted.') - - resource_object_list = OSFUser.objects.filter(guids___id__in=requested_ids) - for resource in resource_object_list: - self._assert_member_belongs_to_group(resource) - - if len(resource_object_list) != len(request_data): - raise ValidationError({'non_field_errors': 'Could not find all objects to delete.'}) - - return resource_object_list - - # Overrides ListFilterMixin - def build_query_from_field(self, field_name, operation): - if field_name == 'role': - if operation['op'] != 'eq': - raise InvalidFilterOperator(value=operation['op'], valid_operators=['eq']) - # operation['value'] should be 'member' or 'manager' - role = operation['value'].lower().strip() - if role not in GROUP_ROLES: - raise InvalidFilterValue(value=operation['value']) - group = self.get_osf_group(check_object_permissions=False) - return Q(id__in=group.managers if role == MANAGER else group.members_only) - return super().build_query_from_field(field_name, operation) - - @require_flag(OSF_GROUPS) - def perform_create(self, serializer): - return super().perform_create(serializer) - - -class GroupMemberDetail(OSFGroupMemberBaseView, generics.RetrieveUpdateDestroyAPIView, UserMixin): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - GroupMemberManagement, - ) - view_name = 'group-member-detail' - - # Overrides RetrieveUpdateDestroyAPIView - @require_flag(OSF_GROUPS) - def get_object(self): - user = self.get_user() - self._assert_member_belongs_to_group(user) - return user - - # Overrides RetrieveUpdateDestroyAPIView - def get_serializer_context(self): - context = super().get_serializer_context() - context['group'] = self.get_osf_group(check_object_permissions=False) - return context diff --git a/api/users/urls.py b/api/users/urls.py index bd7eaa6b3cb..f66939e3690 100644 --- a/api/users/urls.py +++ b/api/users/urls.py @@ -17,7 +17,6 @@ re_path(r'^(?P\w+)/draft_registrations/$', views.UserDraftRegistrations.as_view(), name=views.UserDraftRegistrations.view_name), re_path(r'^(?P\w+)/institutions/$', views.UserInstitutions.as_view(), name=views.UserInstitutions.view_name), re_path(r'^(?P\w+)/nodes/$', views.UserNodes.as_view(), name=views.UserNodes.view_name), - re_path(r'^(?P\w+)/groups/$', views.UserGroups.as_view(), name=views.UserGroups.view_name), re_path(r'^(?P\w+)/preprints/$', views.UserPreprints.as_view(), name=views.UserPreprints.view_name), re_path(r'^(?P\w+)/draft_preprints/$', views.UserDraftPreprints.as_view(), name=views.UserDraftPreprints.view_name), re_path(r'^(?P\w+)/registrations/$', views.UserRegistrations.as_view(), name=views.UserRegistrations.view_name), diff --git a/api/users/views.py b/api/users/views.py index 4da2f5102d2..7228e99546e 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -40,7 +40,6 @@ from api.nodes.filters import NodesFilterMixin, UserNodesFilterMixin from api.nodes.serializers import DraftRegistrationLegacySerializer from api.nodes.utils import NodeOptimizationMixin -from api.osf_groups.serializers import GroupSerializer from api.preprints.serializers import PreprintSerializer, PreprintDraftSerializer from api.registrations import annotations as registration_annotations from api.registrations.serializers import RegistrationSerializer @@ -83,7 +82,6 @@ from framework.utils import throttle_period_expired from framework.sessions.utils import remove_sessions_for_user from framework.exceptions import PermissionsError, HTTPError -from osf.features import OSF_GROUPS from rest_framework import permissions as drf_permissions from rest_framework import generics from rest_framework import status @@ -97,7 +95,6 @@ Preprint, Node, Registration, - OSFGroup, OSFUser, Email, Tag, @@ -356,33 +353,6 @@ def get_queryset(self): ) -class UserGroups(JSONAPIBaseView, generics.ListAPIView, UserMixin, ListFilterMixin): - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - ) - required_read_scopes = [CoreScopes.OSF_GROUPS_READ] - required_write_scopes = [CoreScopes.NULL] - - model_class = apps.get_model('osf.OSFGroup') - serializer_class = GroupSerializer - view_category = 'users' - view_name = 'user-groups' - ordering = ('-modified',) - - @require_flag(OSF_GROUPS) - def get_default_queryset(self): - requested_user = self.get_user() - current_user = self.request.user - if current_user.is_anonymous: - return OSFGroup.objects.none() - return requested_user.osf_groups.filter(id__in=current_user.osf_groups.values_list('id', flat=True)) - - # overrides ListAPIView - def get_queryset(self): - return self.get_queryset_from_request() - - class UserQuickFiles(JSONAPIBaseView, generics.ListAPIView): view_category = 'users' view_name = 'user-quickfiles' diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py index 38b4156e116..0c2dce3501b 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_detail.py @@ -165,14 +165,6 @@ def url_contrib(self, project, contrib): return '/{}draft_registrations/{}/contributors/{}/'.format( API_BASE, project._id, contrib._id) - def test_change_contributor_non_admin_osf_group_member_auth(self, project, contrib): - # Overrides TestNodeContributorUpdate - drafts have no group perms - return - - def test_change_contributor_admin_osf_group_permissions(self, project, contrib): - # Overrides TestNodeContributorUpdate - drafts have no group perms - return - class TestDraftRegistrationContributorPartialUpdate(TestNodeContributorPartialUpdate): @pytest.fixture() @@ -259,14 +251,6 @@ def url_user_non_contrib(self, project, user_non_contrib): return '/{}draft_registrations/{}/contributors/{}/'.format( API_BASE, project._id, user_non_contrib._id) - def test_remove_contributor_osf_group_member_read(self): - # Overrides TestNodeContributorDelete - drafts don't have group members - return - - def test_remove_contributor_osf_group_member_admin(self): - # Overrides TestNodeContributorDelete - drafts don't have group members - return - @pytest.mark.django_db class TestDraftBibliographicContributorDetail(): diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index 6fd38e1737f..54b2d23ad5d 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -199,20 +199,6 @@ def url_public(self, project_public): def url_private(self, project_private): return f'/{API_BASE}draft_registrations/{project_private._id}/contributors/?send_email=false' - # Overrides TestNodeContributorAdd - def test_adds_contributor_public_project_non_admin_osf_group( - self, app, user, user_two, user_three, - project_public, data_user_three, url_public): - # Draft registrations don't have groups - return - - # Overrides TestNodeContributorAdd - def test_adds_contributor_private_project_osf_group_admin_perms( - self, app, user, user_two, user_three, project_private, - data_user_two, url_private): - # Draft registrations don't have groups - return - class TestDraftRegistrationContributorCreateValidation(DraftRegistrationCRUDTestCase, TestNodeContributorCreateValidation): diff --git a/api_tests/nodes/views/test_node_children_list.py b/api_tests/nodes/views/test_node_children_list.py index a6a891a5b8b..c1375ed1cc7 100644 --- a/api_tests/nodes/views/test_node_children_list.py +++ b/api_tests/nodes/views/test_node_children_list.py @@ -8,7 +8,6 @@ from osf_tests.factories import ( NodeFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, AuthUserFactory, PrivateLinkFactory, @@ -108,16 +107,6 @@ def test_return_private_node_children_list( assert len(res.json['data']) == 1 assert res.json['data'][0]['id'] == component._id - # test_return_private_node_children_osf_group_member_admin - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, permissions.ADMIN) - res = app.get(private_project_url, auth=group_mem.auth) - assert res.status_code == 200 - # Can view node children that you have implict admin permissions - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == component._id - def test_node_children_list_does_not_include_pointers( self, app, user, component, private_project_url): res = app.get(private_project_url, auth=user.auth) @@ -391,23 +380,6 @@ def test_creates_child(self, app, user, project, child, url): project.reload() assert len(project.nodes) == 0 - # test_creates_child_group_member_read - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.READ) - res = app.post_json_api( - url, child, auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - - project.update_osf_group(group, permissions.WRITE) - res = app.post_json_api( - url, child, auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 201 - # test_creates_child_no_type child = { 'data': { diff --git a/api_tests/nodes/views/test_node_citations.py b/api_tests/nodes/views/test_node_citations.py index 22e877b523e..9e0b0b9e6bf 100644 --- a/api_tests/nodes/views/test_node_citations.py +++ b/api_tests/nodes/views/test_node_citations.py @@ -7,7 +7,6 @@ from osf_tests.factories import ( ProjectFactory, AuthUserFactory, - OSFGroupFactory ) @@ -119,13 +118,6 @@ def test_node_citations( assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - # test_read_group_mem_can_view_private_project_citations - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth) - assert res.status_code == 200 - # test_unauthenticated_can_view_public_project_citations res = app.get(public_url) assert res.status_code == 200 diff --git a/api_tests/nodes/views/test_node_comments_list.py b/api_tests/nodes/views/test_node_comments_list.py index c8e01a62e0b..e8cbf9c0e6b 100644 --- a/api_tests/nodes/views/test_node_comments_list.py +++ b/api_tests/nodes/views/test_node_comments_list.py @@ -11,7 +11,6 @@ from osf_tests.factories import ( ProjectFactory, RegistrationFactory, - OSFGroupFactory, AuthUserFactory, CommentFactory, ) @@ -355,18 +354,6 @@ def test_node_comments( assert res.status_code == 403 assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail - # test_private_node_private_comment_level_osf_group_member_can_comment - project_dict = project_private_comment_private - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_dict['project'].add_osf_group(group, READ) - res = app.post_json_api( - project_dict['url'], - project_dict['payload'], - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 201 - # test_private_node_private_comment_level_logged_out_user_cannot_comment project_dict = project_private_comment_private res = app.post_json_api( @@ -544,18 +531,6 @@ def test_node_comments_disabled( expect_errors=True) assert res.status_code == 501 - # test_private_node_private_comment_level_osf_group_member_can_comment - project_dict = project_private_comment_private - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_dict['project'].add_osf_group(group, READ) - res = app.post_json_api( - project_dict['url'], - project_dict['payload'], - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 501 - # test_private_node_with_public_comment_level_admin_can_comment project_dict = project_private_comment_public res = app.post_json_api( diff --git a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py index 1cf6739aa44..6c7d6657660 100644 --- a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py +++ b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py @@ -3,7 +3,6 @@ from api.base.settings.defaults import API_BASE from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, ) from osf.utils.permissions import READ, WRITE @@ -32,24 +31,12 @@ def group_member(): return AuthUserFactory() @pytest.fixture() -def group_member_and_contributor(): - return AuthUserFactory() - -@pytest.fixture() -def group(group_manager, group_member, group_member_and_contributor): - group = OSFGroupFactory(creator=group_manager) - group.make_member(group_member) - group.make_member(group_member_and_contributor) - return group - -@pytest.fixture() -def project(group, admin_contributor, write_contributor, group_member_and_contributor): +def project(admin_contributor, write_contributor, group_member_and_contributor): project = ProjectFactory( creator=admin_contributor ) project.add_contributor(write_contributor, WRITE) project.add_contributor(group_member_and_contributor, READ) - project.add_osf_group(group) return project @@ -72,10 +59,6 @@ def test_list_and_filter_contributors_and_group_members( res = app.get(url, auth=write_contributor.auth, expect_errors=True) assert res.status_code == 200 - # group_member - res = app.get(url, auth=group_member.auth, expect_errors=True) - assert res.status_code == 200 - # assert all contributors and group members appear, no duplicates res = app.get(url, auth=admin_contributor.auth) assert res.status_code == 200 @@ -84,9 +67,6 @@ def test_list_and_filter_contributors_and_group_members( expected = { admin_contributor._id, write_contributor._id, - group_manager._id, - group_member._id, - group_member_and_contributor._id } actual = {node['id'] for node in res.json['data']} diff --git a/api_tests/nodes/views/test_node_contributors_detail.py b/api_tests/nodes/views/test_node_contributors_detail.py index 623c0c15803..57f7e41444f 100644 --- a/api_tests/nodes/views/test_node_contributors_detail.py +++ b/api_tests/nodes/views/test_node_contributors_detail.py @@ -5,7 +5,6 @@ from osf.models import NodeLog from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, ) from tests.utils import assert_latest_log @@ -89,17 +88,6 @@ def test_get_private_node_contributor_detail_contributor_auth(self, app, user, p assert res.status_code == 200 assert res.json['data']['id'] == f'{project_private._id}-{user._id}' - # test_get_private_node_osf_group_member - group_mem = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_mem) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.get( - self.url_private(project_private, user._id), - auth=group_mem.auth - ) - assert res.status_code == 200 - assert res.json['data']['id'] == f'{project_private._id}-{user._id}' - def test_get_private_node_contributor_detail_non_contributor(self, app, user, project_private): non_contrib = AuthUserFactory() res = app.get( @@ -159,22 +147,6 @@ def test_unregistered_contributor_detail_show_up_as_name_associated_with_project assert res.json['data']['embeds']['users']['data']['attributes']['full_name'] == 'Rheisen Dennis' assert res.json['data']['attributes'].get('unregistered_contributor') == 'Nesiehr Sinned' - def test_node_contributor_detail_serializes_contributor_perms(self, app, user, project_public): - user_two = AuthUserFactory() - project_public.add_contributor(user_two, permissions.WRITE) - project_public.save() - - osf_group = OSFGroupFactory(creator=user) - osf_group.make_member(user_two) - project_public.add_osf_group(osf_group, permissions.ADMIN) - - url = self.make_resource_url(project_public._id, user_two._id) - res = app.get(url, auth=user.auth) - # Even though user_two has admin perms through group membership, - # contributor endpoints return contributor permissions - assert res.json['data']['attributes']['permission'] == permissions.WRITE - assert project_public.has_permission(user_two, permissions.ADMIN) is True - def test_detail_includes_index(self, app, user, project_public, url_public): res = app.get(url_public, auth=user.auth) data = res.json['data'] @@ -390,18 +362,6 @@ def test_remove_self_contributor_unique_admin(self, app, user, user_write_contri assert res.status_code == 400 assert user in project.contributors - def test_remove_contributor_osf_group_member_read(self, app, user, user_write_contrib, user_non_contrib, - project, url_user, url_user_write_contrib, url_user_non_contrib): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.READ) - res = app.delete( - url_user_write_contrib, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - def test_can_not_remove_only_bibliographic_contributor(self, app, user, project, user_write_contrib, url_user): project.add_permission( user_write_contrib, @@ -448,20 +408,6 @@ def test_remove_contributor_admin(self, app, user, user_write_contrib, project, assert res.status_code == 204 assert user_write_contrib not in project.contributors - def test_remove_contributor_osf_group_member_admin(self, app, user, user_write_contrib, project, - url_user_write_contrib): - with assert_latest_log(NodeLog.CONTRIB_REMOVED, project): - # Disconnect contributor_removed so that we don't check in files - # We can remove this when StoredFileNode is implemented in - # osf-models - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.ADMIN) - with disconnected_from_listeners(contributor_removed): - res = app.delete(url_user_write_contrib, auth=group_mem.auth) - assert res.status_code == 204 - assert user_write_contrib not in project.contributors - def test_remove_self_non_admin(self, app, user_non_contrib, project, url_user_non_contrib): with assert_latest_log(NodeLog.CONTRIB_REMOVED, project): project.add_contributor( diff --git a/api_tests/nodes/views/test_node_contributors_detail_update.py b/api_tests/nodes/views/test_node_contributors_detail_update.py index eed746be90a..0e183c97345 100644 --- a/api_tests/nodes/views/test_node_contributors_detail_update.py +++ b/api_tests/nodes/views/test_node_contributors_detail_update.py @@ -4,7 +4,6 @@ from osf.models import NodeLog from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, ) from rest_framework import exceptions @@ -169,29 +168,6 @@ def test_change_contributor_non_admin_auth(self, app, user, contrib, project, ur assert project.get_permissions(contrib) == [permissions.READ, permissions.WRITE] assert project.get_visible(contrib) - def test_change_contributor_non_admin_osf_group_member_auth(self, app, user, contrib, project, url_contrib): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.WRITE) - res = app.put_json_api( - url_contrib, - { - 'data': { - 'id': contrib._id, - 'type': 'contributors', - 'attributes': { - 'permission': permissions.READ, - 'bibliographic': False - } - } - }, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - assert project.get_permissions(contrib) == [permissions.READ, permissions.WRITE] - assert project.get_visible(contrib) - def test_change_admin_self_without_other_admin(self, app, user, project, url_creator): res = app.put_json_api( url_creator, @@ -250,27 +226,6 @@ def test_change_contributor_correct_id(self, app, user, contrib, project, url_co ) assert res.status_code == 200 - def test_change_contributor_admin_osf_group_permissions(self, app, user, contrib, project, url_contrib): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project.add_osf_group(group, permissions.ADMIN) - res = app.put_json_api( - url_contrib, - { - 'data': { - 'id': f'{project._id}-{contrib._id}', - 'type': 'contributors', - 'attributes': { - 'permission': permissions.ADMIN, - 'bibliographic': True - } - } - }, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 200 - def test_remove_all_bibliographic_statuses_contributors(self, app, user, contrib, project, url_creator): project.set_visible(contrib, False, save=True) res = app.put_json_api( diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index a6a915aba4d..c11389fea97 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -9,7 +9,6 @@ from osf_tests.factories import ( fake_email, AuthUserFactory, - OSFGroupFactory, ProjectFactory, UnconfirmedUserFactory, UserFactory, @@ -176,18 +175,6 @@ def test_return( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_return_private_contributor_list_logged_in_osf_group_member - res = app.get(url_private, auth=user_two.auth, expect_errors=True) - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.get(url_private, auth=user_two.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == make_contrib_id( - project_private._id, user._id - ) - def test_return_public_contributor_list_logged_out( self, app, user, user_two, project_public, url_public, make_contrib_id ): @@ -646,25 +633,6 @@ def test_adds_contributor_public_project_non_admin( project_public.reload() assert user_three not in project_public.contributors.all() - def test_adds_contributor_public_project_non_admin_osf_group( - self, - app, - user, - user_two, - user_three, - project_public, - data_user_three, - url_public, - ): - group = OSFGroupFactory(creator=user_two) - project_public.add_osf_group(group, permissions.WRITE) - res = app.post_json_api( - url_public, data_user_three, auth=user_two.auth, expect_errors=True - ) - assert res.status_code == 403 - project_public.reload() - assert user_three not in project_public.contributors.all() - def test_adds_contributor_public_project_non_contributor( self, app, user_two, user_three, project_public, data_user_three, url_public ): @@ -693,27 +661,6 @@ def test_adds_contributor_private_project_admin( project_private.reload() assert user_two in project_private.contributors - def test_adds_contributor_private_project_osf_group_admin_perms( - self, - app, - user, - user_two, - user_three, - project_private, - data_user_two, - url_private, - ): - osf_group = OSFGroupFactory(creator=user_three) - project_private.add_osf_group(osf_group, permissions.ADMIN) - res = app.post_json_api(url_private, data_user_two, auth=user_three.auth) - assert res.status_code == 201 - assert res.json['data']['id'] == '{}-{}'.format( - project_private._id, user_two._id - ) - - project_private.reload() - assert user_two in project_private.contributors - def test_adds_contributor_without_bibliographic_private_project_admin( self, app, user, user_two, project_private, url_private ): diff --git a/api_tests/nodes/views/test_node_detail.py b/api_tests/nodes/views/test_node_detail.py index ab2e90db5ab..d7279c73b5c 100644 --- a/api_tests/nodes/views/test_node_detail.py +++ b/api_tests/nodes/views/test_node_detail.py @@ -28,7 +28,6 @@ IdentifierFactory, InstitutionFactory, ForkFactory, - OSFGroupFactory, WithdrawnRegistrationFactory, DraftNodeFactory, ) @@ -152,13 +151,6 @@ def test_return_project_details( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_return_project_where_you_have_osf_group_membership - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.WRITE) - res = app.get(url_private, auth=user_two.auth) - assert res.status_code == 200 - assert project_private.has_permission(user_two, permissions.WRITE) is True - # test_draft_node_not_returned_under_node_detail_endpoint draft_node_url = f'/{API_BASE}nodes/{draft_node._id}/' res = app.get(draft_node_url, auth=user.auth, expect_errors=True) @@ -412,28 +404,6 @@ def test_node_show_correct_children_count(self, app, user, user_two, project_pub res = app.get(node_children_url, auth=user_two.auth) assert len(res.json['data']) == 1 - # Explicit Member of OSFGroup can view child count - user_three = AuthUserFactory() - group = OSFGroupFactory(creator=user_three) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 0 - child.add_osf_group(group, permissions.READ) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 1 - res = app.get(node_children_url, auth=user_three.auth) - assert len(res.json['data']) == 1 - - # Implicit admin group member can view child count - child.remove_osf_group(group) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 0 - - project_public.add_osf_group(group, permissions.ADMIN) - res = app.get(url, auth=user_three.auth) - assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 1 - res = app.get(node_children_url, auth=user_three.auth) - assert len(res.json['data']) == 1 - # Grandchildren not shown. Children show one level. grandparent = AuthUserFactory() NodeFactory(parent=child, creator=user) @@ -574,46 +544,6 @@ def test_current_user_permissions(self, app, user, url_public, project_public, u assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is False - # Read group member has "read" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - project_public.add_osf_group(osf_group, permissions.READ) - res = app.get(url, auth=group_member.auth) - assert project_public.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is True - - # Write group member has "read" and "write" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - project_public.add_osf_group(osf_group, permissions.WRITE) - res = app.get(url, auth=group_member.auth) - assert res.json['data']['attributes']['current_user_permissions'] == [permissions.WRITE, permissions.READ] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is True - - # Admin group member has "read" and "write" and "admin" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - project_public.add_osf_group(osf_group, permissions.ADMIN) - res = app.get(url, auth=group_member.auth) - assert res.json['data']['attributes']['current_user_permissions'] == [permissions.ADMIN, permissions.WRITE, permissions.READ] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is True - - # make sure 'read' is there for implicit read group members - comp = NodeFactory(parent=project_public, is_public=True) - comp_url = f'/{API_BASE}nodes/{comp._id}/?version=2.11' - res = app.get(comp_url, auth=group_member.auth) - assert project_public.has_permission(user, permissions.ADMIN) - assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is False - - # ensure 'read' is still included with older versions - project_public.remove_osf_group(osf_group) - res = app.get(url_public, auth=group_member.auth) - assert not project_public.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data']['attributes']['current_user_permissions'] - assert res.json['data']['attributes']['current_user_is_contributor_or_group_member'] is False - # superusers current permissions are None superuser = AuthUserFactory() superuser.is_superuser = True @@ -1052,25 +982,6 @@ def test_update_errors( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_update_private_project_group_has_read_perms - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.put_json_api(url_private, { - 'data': { - 'id': project_private._id, - 'type': 'nodes', - 'attributes': { - 'title': title_new, - 'description': description_new, - 'category': category_new, - 'public': False - } - } - }, auth=user_two.auth, expect_errors=True) - assert project_private.has_permission(user_two, permissions.READ) is True - assert res.status_code == 403 - assert 'detail' in res.json['errors'][0] - def test_update_public_project_logged_in( self, app, user, title_new, description_new, category_new, project_public, url_public): @@ -1096,32 +1007,6 @@ def test_update_public_project_logged_in( assert NodeLog.EDITED_DESCRIPTION in log_actions assert NodeLog.CATEGORY_UPDATED in log_actions - def test_update_public_project_osf_group_member( - self, app, user_two, title_new, description_new, - category_new, project_public, url_public): - osf_group = OSFGroupFactory(creator=user_two) - project_public.add_osf_group(osf_group, permissions.WRITE) - res = app.put_json_api(url_public, { - 'data': { - 'id': project_public._id, - 'type': 'nodes', - 'attributes': { - 'title': title_new, - 'description': description_new, - 'category': category_new, - } - } - }, auth=user_two.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert res.json['data']['attributes']['title'] == title_new - assert res.json['data']['attributes']['description'] == description_new - assert res.json['data']['attributes']['category'] == category_new - log_actions = project_public.logs.values_list('action', flat=True) - assert NodeLog.CATEGORY_UPDATED in log_actions - assert NodeLog.EDITED_TITLE in log_actions - assert NodeLog.EDITED_DESCRIPTION in log_actions - def test_cannot_update_a_registration(self, app, user, project_public): registration = RegistrationFactory( project=project_public, creator=user) @@ -1500,15 +1385,6 @@ def test_deletes_node_errors( assert res.status_code == 404 assert 'detail' in res.json['errors'][0] - # test_delete_osf_group_improper_permissions - osf_group = OSFGroupFactory(creator=user_two) - project_private.add_osf_group(osf_group, permissions.READ) - res = app.delete(url_private, auth=user_two.auth, expect_errors=True) - project_private.reload() - assert res.status_code == 403 - assert project_private.is_deleted is False - assert 'detail' in res.json['errors'][0] - def test_deletes_private_node_logged_in_read_only_contributor( self, app, user_two, project_private, url_private): project_private.add_contributor( diff --git a/api_tests/nodes/views/test_node_draft_registration_list.py b/api_tests/nodes/views/test_node_draft_registration_list.py index 5e46b46b4c0..08099337dfd 100644 --- a/api_tests/nodes/views/test_node_draft_registration_list.py +++ b/api_tests/nodes/views/test_node_draft_registration_list.py @@ -11,7 +11,6 @@ RegistrationProviderFactory, AuthUserFactory, CollectionFactory, - OSFGroupFactory, DraftRegistrationFactory, ) from osf.utils import permissions @@ -51,15 +50,7 @@ def user_non_contrib(self): return AuthUserFactory() @pytest.fixture() - def group_mem(self): - return AuthUserFactory() - - @pytest.fixture() - def group(self, group_mem): - return OSFGroupFactory(creator=group_mem) - - @pytest.fixture() - def project_public(self, user, user_admin_contrib, user_write_contrib, user_read_contrib, group, group_mem): + def project_public(self, user, user_admin_contrib, user_write_contrib, user_read_contrib): project_public = ProjectFactory(is_public=True, creator=user) project_public.add_contributor( user_write_contrib, @@ -71,7 +62,6 @@ def project_public(self, user, user_admin_contrib, user_write_contrib, user_read user_admin_contrib, permissions=permissions.ADMIN) project_public.save() - project_public.add_osf_group(group, permissions.ADMIN) project_public.add_tag('hello', Auth(user), save=True) return project_public diff --git a/api_tests/nodes/views/test_node_files_list.py b/api_tests/nodes/views/test_node_files_list.py index ce01ef7e942..c5d6a475283 100644 --- a/api_tests/nodes/views/test_node_files_list.py +++ b/api_tests/nodes/views/test_node_files_list.py @@ -22,7 +22,6 @@ from osf_tests.factories import ( ProjectFactory, AuthUserFactory, - OSFGroupFactory, PrivateLinkFactory ) from osf.utils.permissions import READ @@ -217,16 +216,6 @@ def test_returns_private_files_logged_in_non_contributor(self): assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - def test_returns_private_files_logged_in_osf_group_member(self): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - self.project.add_osf_group(group, READ) - res = self.app.get( - self.private_url, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 200 - def test_returns_addon_folders(self): user_auth = Auth(self.user) res = self.app.get(self.private_url, auth=self.user.auth) @@ -541,18 +530,6 @@ def test_returns_private_files_logged_in_non_contributor(self): assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - @responses.activate - def test_returns_private_files_logged_in_osf_group_member(self): - self.configure_addon(self.project) - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - self.project.add_osf_group(group, READ) - with self.fake_gv.run_fake(): - res = self.app.get( - self.private_url, auth=group_mem.auth, expect_errors=True - ) - assert res.status_code == 200 - class TestNodeFilesListFiltering(ApiTestCase): diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index 24f5f50f924..fd5e4ac8c47 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -6,7 +6,6 @@ from osf_tests.factories import ( NodeFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, AuthUserFactory, ForkFactory @@ -163,19 +162,6 @@ def test_authenticated_contributor_can_access_private_node_forks_list( forked_from = data['embeds']['forked_from']['data'] assert forked_from['id'] == private_project._id - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, permissions.READ) - private_fork.add_osf_group(group, permissions.READ) - res = app.get( - private_project_url, - auth=group_mem.auth) - assert res.status_code == 200 - assert len(res.json['data']) == 1 - data = res.json['data'][0] - assert data['attributes']['title'] == 'Fork of ' + \ - private_project.title - assert data['id'] == private_fork._id def test_node_forks_list_errors(self, app, private_project_url): @@ -345,15 +331,6 @@ def test_can_fork_private_node_logged_in_contributor( forked_from = data['embeds']['forked_from']['data'] assert forked_from['id'] == private_project._id - # test_group_member_read_can_create_fork_of_private_node - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, permissions.READ) - res = app.post_json_api( - private_project_url, - fork_data, auth=user.auth) - assert res.status_code == 201 - def test_fork_private_components_no_access( self, app, user_two, public_project, fork_data, public_project_url): diff --git a/api_tests/nodes/views/test_node_groups.py b/api_tests/nodes/views/test_node_groups.py deleted file mode 100644 index c1aa3a3e427..00000000000 --- a/api_tests/nodes/views/test_node_groups.py +++ /dev/null @@ -1,454 +0,0 @@ -import pytest -from guardian.shortcuts import get_perms -from waffle.testutils import override_flag - -from api.base.settings.defaults import API_BASE -from framework.auth.core import Auth -from osf.utils import permissions -from osf_tests.factories import ( - ProjectFactory, - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def write_contrib(): - return AuthUserFactory() - -@pytest.fixture() -def read_contrib(): - return AuthUserFactory() - -@pytest.fixture() -def non_contrib(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(member, manager): - group = OSFGroupFactory(creator=manager, name='Platform Team') - group.make_member(member, auth=Auth(manager)) - return group - -@pytest.fixture() -def private_project(write_contrib, read_contrib): - project = ProjectFactory(is_public=False) - project.add_contributor(read_contrib, permissions=permissions.READ) - project.add_contributor(write_contrib, permissions=permissions.WRITE, save=True) - return project - -@pytest.fixture() -def public_project(write_contrib, read_contrib): - project = ProjectFactory(is_public=True) - project.add_contributor(read_contrib, permissions=permissions.READ) - project.add_contributor(write_contrib, permissions=permissions.WRITE, save=True) - return project - -@pytest.fixture() -def public_url(public_project): - return f'/{API_BASE}nodes/{public_project._id}/groups/' - -@pytest.fixture() -def private_url(private_project): - return f'/{API_BASE}nodes/{private_project._id}/groups/' - -@pytest.fixture() -def public_detail_url(public_url, osf_group): - return f'{public_url}{osf_group._id}/' - -@pytest.fixture() -def make_node_group_payload(): - def payload(attributes, relationships=None): - payload_data = { - 'data': { - 'type': 'node-groups', - 'attributes': attributes, - } - } - if relationships: - payload_data['data']['relationships'] = relationships - - return payload_data - return payload - - -@pytest.mark.django_db -class TestNodeGroupsList: - @pytest.fixture() - def make_group_id(self): - def contrib_id(node, group): - return f'{node._id}-{group._id}' - return contrib_id - - def test_return(self, app, non_contrib, osf_group, member, manager, public_project, private_project, public_url, private_url, make_group_id): - with override_flag(OSF_GROUPS, active=True): - public_project.add_osf_group(osf_group, permissions.WRITE) - - # public url logged out - res = app.get(public_url) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(public_project, osf_group) in ids - assert resp_json[0]['attributes']['permission'] == permissions.WRITE - - # private project logged in - private_project.add_osf_group(osf_group, permissions.READ) - res = app.get(private_url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert resp_json[0]['attributes']['permission'] == permissions.READ - - # private project logged out - res = app.get(private_url, expect_errors=True) - assert res.status_code == 401 - - # private project non_contrib - res = app.get(private_url, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # private project group_member - res = app.get(private_url, auth=member.auth, expect_errors=True) - assert res.status_code == 200 - - # private project group_manager - res = app.get(private_url, auth=member.auth, expect_errors=True) - assert res.status_code == 200 - - def test_filter_groups(self, app, osf_group, private_project, manager, private_url, make_group_id): - with override_flag(OSF_GROUPS, active=True): - read_group = OSFGroupFactory(creator=manager, name='house') - write_group = OSFGroupFactory(creator=manager, name='doghouse') - private_project.add_osf_group(read_group, permissions.READ) - private_project.add_osf_group(write_group, permissions.WRITE) - private_project.add_osf_group(osf_group, permissions.ADMIN) - - # test filter on permission - url = private_url + '?filter[permission]=admin' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) not in ids - assert make_group_id(private_project, read_group) not in ids - - url = private_url + '?filter[permission]=write' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) in ids - assert make_group_id(private_project, read_group) not in ids - - url = private_url + '?filter[permission]=read' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) in ids - assert make_group_id(private_project, read_group) in ids - - # test_filter_on_invalid_permission - url = private_url + '?filter[permission]=bad_perm' - res = app.get(url, auth=private_project.creator.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a filterable permission.' - - url = private_url + '?filter[name]=Plat' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) in ids - assert make_group_id(private_project, write_group) not in ids - assert make_group_id(private_project, read_group) not in ids - - url = private_url + '?filter[name]=house' - res = app.get(url, auth=private_project.creator.auth) - resp_json = res.json['data'] - ids = [each['id'] for each in resp_json] - assert make_group_id(private_project, osf_group) not in ids - assert make_group_id(private_project, write_group) in ids - assert make_group_id(private_project, read_group) in ids - - -@pytest.mark.django_db -class TestNodeGroupCreate: - - def test_create_node_groups(self, app, osf_group, public_url, non_contrib, member, manager, - public_project, write_contrib, make_node_group_payload): - with override_flag(OSF_GROUPS, active=True): - attributes = {'permission': permissions.WRITE} - relationships = { - 'groups': { - 'data': { - 'type': 'groups', - 'id': osf_group._id, - } - } - } - payload = make_node_group_payload(attributes=attributes, relationships=relationships) - - # test add group noncontrib fails - res = app.post_json_api(public_url, payload, auth=non_contrib, expect_errors=True) - assert res.status_code == 401 - - # add group with write permissions fails - res = app.post_json_api(public_url, payload, auth=write_contrib, expect_errors=True) - assert res.status_code == 401 - - # add group with admin on node but not manager in group - res = app.post_json_api(public_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 403 - - # create group with admin permissions on node and manager permissions in group - public_project.add_contributor(manager, permissions=permissions.ADMIN, auth=Auth(public_project.creator), save=True) - - # test_perm_not_specified - given write by default - relationship_only = make_node_group_payload(attributes={}, relationships=relationships) - res = app.post_json_api(public_url, relationship_only, auth=manager.auth) - assert res.status_code == 201 - assert res.json['data']['attributes']['permission'] == permissions.WRITE - assert osf_group._id in res.json['data']['relationships']['groups']['links']['related']['href'] - - public_project.remove_osf_group(osf_group) - - # test_relationship_not_specified - attributes_only = make_node_group_payload(attributes=attributes) - res = app.post_json_api(public_url, attributes_only, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group relationship must be specified.' - - # test_group_is_invalid - relationships = { - 'groups': { - 'data': { - 'type': 'groups', - 'id': '12345', - } - } - } - invalid_group = make_node_group_payload(attributes=attributes, relationships=relationships) - res = app.post_json_api(public_url, invalid_group, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == 'Group {} is invalid.'.format('12345') - - # test_admin_perms - res = app.post_json_api(public_url, payload, auth=manager.auth) - assert public_project in osf_group.nodes - assert public_project.has_permission(member, permissions.WRITE) - assert res.json['data']['attributes']['permission'] == permissions.WRITE - assert osf_group._id in res.json['data']['relationships']['groups']['links']['related']['href'] - - # test creating group a second time fails - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'The group {} has already been added to the node {}'.format( - osf_group._id, public_project._id - ) - - # test incorrect permission string - public_project.remove_osf_group(osf_group) - payload['data']['attributes']['permission'] = 'not a real perm' - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'not a real perm is not a valid permission.' - - # test_incorrect_type - payload['data']['type'] = 'incorrect_type' - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test not a real group - payload['data']['type'] = 'node-groups' - payload['data']['relationships']['groups']['data']['id'] = 'not_a_real_group_id' - res = app.post_json_api(public_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - -@pytest.mark.django_db -class TestNodeGroupDetail: - - def test_node_group_detail(self, app, public_detail_url, osf_group, public_project): - with override_flag(OSF_GROUPS, active=True): - # res for group not attached to node raised permissions error - res = app.get(public_detail_url, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'Group {osf_group._id} does not have permissions to node {public_project._id}.' - - public_project.add_osf_group(osf_group, permissions.WRITE) - - # test attributes - res = app.get(public_detail_url) - attributes = res.json['data']['attributes'] - assert attributes['date_created'] == osf_group.created.replace(tzinfo=None).isoformat() - assert attributes['date_modified'] == osf_group.modified.replace(tzinfo=None).isoformat() - assert attributes['name'] == osf_group.name - assert attributes['permission'] == permissions.WRITE - - # test relationships - relationships = res.json['data']['relationships'] - assert list(relationships.keys()) == ['groups'] - assert osf_group._id in relationships['groups']['links']['related']['href'] - - # get group that does not exist - res = app.get(public_detail_url.replace(osf_group._id, 'hellonotarealroute'), expect_errors=True) - assert res.status_code == 404 - - def test_node_group_detail_perms(self, app, non_contrib, osf_group, member, public_project, private_project, public_detail_url, private_url): - with override_flag(OSF_GROUPS, active=True): - public_project.add_osf_group(osf_group, permissions.READ) - private_project.add_osf_group(osf_group, permissions.WRITE) - private_detail_url = private_url + osf_group._id + '/' - - # nonauth - res = app.get(private_detail_url, expect_errors=True) - assert res.status_code == 401 - - res = app.get(public_detail_url) - assert res.status_code == 200 - - # noncontrib - res = app.get(private_detail_url, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - res = app.get(public_detail_url, auth=non_contrib.auth) - assert res.status_code == 200 - - # member - res = app.get(private_detail_url, auth=member.auth) - assert res.status_code == 200 - - res = app.get(public_detail_url, auth=member.auth) - assert res.status_code == 200 - - -@pytest.mark.django_db -class TestNodeGroupUpdate: - - def test_update_permission(self, app, public_detail_url, osf_group, write_contrib, non_contrib, - public_project, make_node_group_payload): - with override_flag(OSF_GROUPS, active=True): - attributes = {'permission': permissions.WRITE} - payload = make_node_group_payload(attributes=attributes) - - # group has not been added to the node - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 404 - - public_project.add_osf_group(osf_group, permissions.READ) - - # test id not present in request - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 400 - - # test passing invalid group_id to update - payload['data']['id'] = 'nope' - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 409 - - payload['data']['id'] = public_project._id + '-' + osf_group._id - - # test update not logged in fails - res = app.patch_json_api(public_detail_url, payload, expect_errors=True) - assert res.status_code == 401 - - # test update noncontrib in fails - res = app.patch_json_api(public_detail_url, payload, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # test update as node write contrib fails - res = app.patch_json_api(public_detail_url, payload, auth=write_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # test update as node admin - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth) - res_json = res.json['data'] - assert res.status_code == 200 - assert not osf_group.is_member(public_project.creator.auth) - assert res_json['attributes']['permission'] == permissions.WRITE - assert permissions.WRITE_NODE in get_perms(osf_group.member_group, public_project) - - # test update invalid perm - payload['data']['attributes']['permission'] = 'bad_perm' - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a valid permission.' - - # test update no perm specified, perms unchanged - payload['data']['attributes'] = {} - res = app.patch_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 200 - assert res_json['attributes']['permission'] == permissions.WRITE - - -@pytest.mark.django_db -class TestNodeGroupDelete: - - def test_delete_group(self, app, public_detail_url, public_project, osf_group, member, manager, non_contrib, write_contrib): - with override_flag(OSF_GROUPS, active=True): - public_project.add_contributor(manager, permissions=permissions.ADMIN) - payload = { - 'data': [ - {'type': 'node-groups', 'id': f'{public_project._id}-{osf_group._id}'} - ] - } - # group has not been added to the node - res = app.delete_json_api(public_detail_url, payload, auth=public_project.creator.auth, expect_errors=True) - assert res.status_code == 404 - - public_project.add_osf_group(osf_group, permissions.WRITE) - - # test member with write permission cannot remove group - res = app.delete_json_api(public_detail_url, payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # not logged in user cannot remove group - res = app.delete_json_api(public_detail_url, payload, expect_errors=True) - assert res.status_code == 401 - - # non contributor cannot remove group - res = app.delete_json_api(public_detail_url, payload, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # write contributor cannot remove group - res = app.delete_json_api(public_detail_url, payload, auth=write_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager on group can remove group - res = app.delete_json_api(public_detail_url, payload, auth=manager.auth) - assert res.status_code == 204 - assert osf_group not in public_project.osf_groups - - # test member with admin permissions can remove group - public_project.add_osf_group(osf_group, permissions.ADMIN) - res = app.delete_json_api(public_detail_url, payload, auth=member.auth) - assert res.status_code == 204 - assert osf_group not in public_project.osf_groups - - second_group = OSFGroupFactory(creator=non_contrib) - second_group.make_member(member) - public_project.add_osf_group(second_group, permissions.WRITE) - - # test member with write cannot remove group - second_payload = { - 'data': [ - {'type': 'node-groups', 'id': f'{public_project._id}-{second_group._id}'} - ] - } - second_url = f'/{API_BASE}nodes/{public_project._id}/groups/{second_group._id}/' - res = app.delete_json_api(second_url, second_payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager can remove the group (even though they are not an admin contributor) - res = app.delete_json_api(second_url, second_payload, auth=non_contrib.auth, expect_errors=True) - assert res.status_code == 204 - assert second_group not in public_project.osf_groups diff --git a/api_tests/nodes/views/test_node_implicit_contributors_list.py b/api_tests/nodes/views/test_node_implicit_contributors_list.py index c27591a2e44..53b72df0366 100644 --- a/api_tests/nodes/views/test_node_implicit_contributors_list.py +++ b/api_tests/nodes/views/test_node_implicit_contributors_list.py @@ -3,11 +3,9 @@ from api.base.settings.defaults import API_BASE from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, AuthUserFactory, NodeFactory ) -from osf.utils.permissions import READ @pytest.fixture() @@ -59,15 +57,3 @@ def test_list_and_filter_implicit_contributors(self, app, component, admin_contr assert res.status_code == 200 assert res.content_type == 'application/vnd.api+json' assert len(res.json['data']) == 0 - - def test_osf_group_members_can_view_implicit_contributors(self, app, component, admin_contributor, implicit_contributor): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - component.add_osf_group(group, READ) - - url = f'/{API_BASE}nodes/{component._id}/implicit_contributors/' - res = app.get(url, auth=group_mem.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == implicit_contributor._id diff --git a/api_tests/nodes/views/test_node_institutions_list.py b/api_tests/nodes/views/test_node_institutions_list.py index 3ad46e2a6b3..0ddf17b0355 100644 --- a/api_tests/nodes/views/test_node_institutions_list.py +++ b/api_tests/nodes/views/test_node_institutions_list.py @@ -1,7 +1,6 @@ import pytest -from osf_tests.factories import InstitutionFactory, NodeFactory, AuthUserFactory, OSFGroupFactory -from osf.utils.permissions import READ +from osf_tests.factories import InstitutionFactory, NodeFactory, AuthUserFactory from api.base.settings.defaults import API_BASE @@ -59,13 +58,6 @@ def test_node_institution_detail( assert res.status_code == 200 assert len(res.json['data']) == 0 - # test_osf_group_member_can_view_node_institutions - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_one.add_osf_group(group, READ) - res = app.get(node_one_url) - assert res.status_code == 200 - # test_non_contrib node_one.is_public = False node_one.save() diff --git a/api_tests/nodes/views/test_node_linked_nodes.py b/api_tests/nodes/views/test_node_linked_nodes.py index a4b4662706a..a09a0c632c7 100644 --- a/api_tests/nodes/views/test_node_linked_nodes.py +++ b/api_tests/nodes/views/test_node_linked_nodes.py @@ -4,11 +4,9 @@ from framework.auth.core import Auth from osf_tests.factories import ( NodeFactory, - OSFGroupFactory, AuthUserFactory, NodeRelationFactory, ) -from osf.utils.permissions import WRITE, READ from website.project.signals import contributor_removed from api_tests.utils import disconnected_from_listeners @@ -112,13 +110,6 @@ def test_get_relationship_linked_nodes( res = app.get(url_private, expect_errors=True) assert res.status_code == 401 - # test_get_private_relationship_linked_nodes_read_group_mem - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_linking_private.add_osf_group(group, READ) - res = app.get(url_private, auth=group_mem.auth) - assert res.status_code == 200 - def test_post_contributing_node( self, app, user, node_contrib, node_private, make_payload, url_private): @@ -181,26 +172,6 @@ def test_post_private_node( assert node_other._id not in ids assert node_private._id in ids - # test_group_member_can_post_with_write - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_linking_private.add_osf_group(group, READ) - res = app.post_json_api( - url_private, - make_payload([node_other._id]), - auth=group_mem.auth, expect_errors=True - ) - assert res.status_code == 403 - - node_linking_private.update_osf_group(group, WRITE) - node_other.add_osf_group(group, WRITE) - res = app.post_json_api( - url_private, - make_payload([node_other._id]), - auth=group_mem.auth, expect_errors=True - ) - assert res.status_code == 201 - def test_post_mixed_nodes( self, app, user, node_private, node_other, node_contrib, make_payload, url_private): diff --git a/api_tests/nodes/views/test_node_linked_registrations.py b/api_tests/nodes/views/test_node_linked_registrations.py index ec5178009b1..46e334d4b77 100644 --- a/api_tests/nodes/views/test_node_linked_registrations.py +++ b/api_tests/nodes/views/test_node_linked_registrations.py @@ -5,7 +5,6 @@ from osf_tests.factories import ( AuthUserFactory, NodeFactory, - OSFGroupFactory, RegistrationFactory, NodeRelationFactory, ) @@ -116,16 +115,6 @@ def test_view_linked_registrations( assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - # test_osf_group_member_read_can_view_linked_reg - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_private.add_osf_group(group, READ) - res = make_request( - node_id=node_private._id, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 200 - @pytest.mark.django_db class TestNodeLinkedRegistrationsRelationshipRetrieve( @@ -194,16 +183,6 @@ def test_can_vew_linked_registrations_relationship( assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - # test_osf_group_member_can_view_linked_registration_relationship - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_private.add_osf_group(group, READ) - res = make_request( - node_id=node_private._id, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 200 - @pytest.mark.django_db class TestNodeLinkedRegistrationsRelationshipCreate( @@ -302,19 +281,6 @@ def test_cannot_create_linked_registrations_relationship( assert res.status_code == 403 assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail - # test_read_osf_group_mem_cannot_create_linked_registrations_relationship - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - node_private.add_osf_group(group, READ) - registration = RegistrationFactory(is_public=True) - res = make_request( - node_id=node_private._id, - reg_id=registration._id, - auth=group_mem.auth, - expect_errors=True - ) - assert res.status_code == 403 - # test_unauthenticated_user_cannot_create_linked_registrations_relationship registration = RegistrationFactory(is_public=True) res = make_request( diff --git a/api_tests/nodes/views/test_node_links_detail.py b/api_tests/nodes/views/test_node_links_detail.py index 80ad13cb755..24b0050b437 100644 --- a/api_tests/nodes/views/test_node_links_detail.py +++ b/api_tests/nodes/views/test_node_links_detail.py @@ -5,7 +5,6 @@ from osf.models import NodeLog from osf_tests.factories import ( ProjectFactory, - OSFGroupFactory, RegistrationFactory, AuthUserFactory, ) @@ -109,13 +108,6 @@ def test_node_link_detail( assert 'errors' in target_node assert target_node['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail - # test_returns_private_node_pointer_detail_logged_in_group_mem - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 200 - # test_self_link_points_to_node_link_detail_url res = app.get(public_url, auth=user.auth) assert res.status_code == 200 @@ -298,17 +290,6 @@ def test_deletes_private_node_pointer_logged_in_non_contrib( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - def test_deletes_private_node_pointer_logged_in_read_group_mem( - self, app, user_two, private_url, private_project): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.delete(private_url, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - private_project.update_osf_group(group, WRITE) - res = app.delete(private_url, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 204 - def test_return_deleted_public_node_pointer( self, app, user, public_project, public_url): with assert_latest_log(NodeLog.POINTER_REMOVED, public_project): diff --git a/api_tests/nodes/views/test_node_links_list.py b/api_tests/nodes/views/test_node_links_list.py index 136778d6895..cfd747cda98 100644 --- a/api_tests/nodes/views/test_node_links_list.py +++ b/api_tests/nodes/views/test_node_links_list.py @@ -6,7 +6,6 @@ from osf_tests.factories import ( ProjectFactory, RegistrationFactory, - OSFGroupFactory, AuthUserFactory ) from osf.utils.permissions import WRITE, READ @@ -103,16 +102,6 @@ def test_non_mutational_node_links_list_tests( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - # test_osf_group_member_read_can_view - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get( - private_url, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 200 - # test_node_links_bad_version url = f'{public_url}?version=2.1' res = app.get(url, auth=user.auth, expect_errors=True) @@ -396,14 +385,6 @@ def test_creates_public_node_pointer_logged_in( assert res.status_code == 403 assert 'detail' in res.json['errors'][0] - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - public_project.add_osf_group(group, READ) - res = app.post_json_api( - public_url, public_payload, - auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - res = app.post_json_api(public_url, public_payload, auth=user.auth) assert res.status_code == 201 assert res.content_type == 'application/vnd.api+json' @@ -420,16 +401,6 @@ def test_creates_private_node_pointer_logged_out( assert res.status_code == 401 assert 'detail' in res.json['errors'][0] - def test_creates_private_node_pointer_group_member( - self, app, private_project, private_pointer_project, private_url, make_payload): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, WRITE) - private_payload = make_payload(id=private_pointer_project._id) - res = app.post_json_api( - private_url, private_payload, auth=group_mem.auth) - assert res.status_code == 201 - def test_creates_private_node_pointer_logged_in_contributor( self, app, user, private_pointer_project, private_url, make_payload): private_payload = make_payload(id=private_pointer_project._id) diff --git a/api_tests/nodes/views/test_node_list.py b/api_tests/nodes/views/test_node_list.py index f71862ca8b0..15398613ea3 100644 --- a/api_tests/nodes/views/test_node_list.py +++ b/api_tests/nodes/views/test_node_list.py @@ -22,7 +22,6 @@ PreprintFactory, InstitutionFactory, RegionFactory, - OSFGroupFactory, DraftNodeFactory, ) from addons.osfstorage.settings import DEFAULT_REGION_ID @@ -128,14 +127,6 @@ def test_return( assert private_project._id not in ids assert draft_node._id not in ids - # test_returns_nodes_through_which_you_have_perms_through_osf_groups - group = OSFGroupFactory(creator=user) - another_project = ProjectFactory() - another_project.add_osf_group(group, permissions.READ) - res = app.get(url, auth=user.auth) - ids = [each['id'] for each in res.json['data']] - assert another_project._id in ids - def test_node_list_does_not_returns_registrations( self, app, user, public_project, url): registration = RegistrationFactory( @@ -220,13 +211,6 @@ def test_default_node_permission_queryset(self, app, url, private_project, user) ProjectFactory(is_public=True) assert default_node_permission_queryset(user_2, Node).count() == 2 - # Node read group member - project_3 = ProjectFactory(is_public=False) - assert default_node_permission_queryset(user_2, Node).count() == 2 - group = OSFGroupFactory(creator=user_2) - project_3.add_osf_group(group, permissions.READ) - assert default_node_permission_queryset(user_2, Node).count() == 3 - def test_current_user_permissions(self, app, user, url, public_project, non_contrib): # in most recent API version, read isn't implicit for public nodes url_public = url + '?version=2.11' @@ -275,53 +259,6 @@ def test_current_user_permissions(self, app, user, url, public_project, non_cont res = app.get(url_public, auth=superuser.auth) assert permissions.READ not in res.json['data'][0]['attributes']['current_user_permissions'] - def test_current_user_permissions_group_member(self, app, user, url, public_project): - # in most recent API version, read isn't implicit for public nodes - url_public = url + '?version=2.11' - - # Read group member has "read" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - public_project.add_osf_group(osf_group, permissions.READ) - res = app.get(url_public, auth=group_member.auth) - assert public_project.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data'][0]['attributes']['current_user_permissions'] - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is True - - # Write group member has "read" and "write" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - public_project.add_osf_group(osf_group, permissions.WRITE) - res = app.get(url_public, auth=group_member.auth) - assert res.json['data'][0]['attributes']['current_user_permissions'] == [permissions.WRITE, permissions.READ] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is True - - # Admin group member has "read" and "write" and "admin" permissions - group_member = AuthUserFactory() - osf_group = OSFGroupFactory(creator=group_member) - public_project.add_osf_group(osf_group, permissions.ADMIN) - res = app.get(url_public, auth=group_member.auth) - assert res.json['data'][0]['attributes']['current_user_permissions'] == [permissions.ADMIN, permissions.WRITE, permissions.READ] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is True - - # make sure 'read' is there for implicit read group members - NodeFactory(parent=public_project, is_public=True) - res = app.get(url_public, auth=group_member.auth) - assert public_project.has_permission(user, permissions.ADMIN) - assert permissions.READ in res.json['data'][0]['attributes']['current_user_permissions'] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is False - - # ensure 'read' is still included with older versions - public_project.remove_osf_group(osf_group) - res = app.get(url, auth=group_member.auth) - assert not public_project.has_permission(group_member, permissions.READ) - assert permissions.READ in res.json['data'][0]['attributes']['current_user_permissions'] - assert res.json['data'][0]['attributes']['current_user_is_contributor'] is False - assert res.json['data'][0]['attributes']['current_user_is_contributor_or_group_member'] is False - @pytest.mark.django_db @pytest.mark.enable_bookmark_creation @@ -1637,35 +1574,6 @@ def test_create_component_inherit_contributors( new_component.contributors ) == len(parent_project.contributors) - def test_create_component_inherit_groups( - self, app, user_one, user_two, title, category): - parent_project = ProjectFactory(creator=user_one) - group = OSFGroupFactory(creator=user_one) - second_group = OSFGroupFactory() - third_group = OSFGroupFactory(creator=user_two) - third_group.make_member(user_one) - parent_project.add_osf_group(group, permissions.WRITE) - parent_project.add_osf_group(second_group, permissions.WRITE) - url = '/{}nodes/{}/children/?inherit_contributors=true'.format( - API_BASE, parent_project._id) - component_data = { - 'data': { - 'type': 'nodes', - 'attributes': { - 'title': title, - 'category': category, - } - } - } - res = app.post_json_api(url, component_data, auth=user_one.auth) - assert res.status_code == 201 - json_data = res.json['data'] - new_component_id = json_data['id'] - new_component = AbstractNode.load(new_component_id) - assert group in new_component.osf_groups - assert second_group not in new_component.osf_groups - assert third_group not in new_component.osf_groups - def test_create_component_with_tags(self, app, user_one, title, category): parent_project = ProjectFactory(creator=user_one) url = f'/{API_BASE}nodes/{parent_project._id}/children/' @@ -1691,42 +1599,6 @@ def test_create_component_with_tags(self, app, user_one, title, category): assert tag1.name == 'test tag 1' assert tag2.name == 'test tag 2' - def test_create_component_inherit_contributors_with_unregistered_contributor( - self, app, user_one, title, category): - parent_project = ProjectFactory(creator=user_one) - parent_project.add_unregistered_contributor( - fullname='far', email='foo@bar.baz', - permissions=permissions.READ, - auth=Auth(user=user_one), save=True) - osf_group = OSFGroupFactory(creator=user_one) - osf_group.add_unregistered_member(fullname='far', email='foo@bar.baz', auth=Auth(user_one)) - osf_group.save() - parent_project.add_osf_group(osf_group, permissions.ADMIN) - url = '/{}nodes/{}/children/?inherit_contributors=true'.format( - API_BASE, parent_project._id) - component_data = { - 'data': { - 'type': 'nodes', - 'attributes': { - 'title': title, - 'category': category, - } - } - } - res = app.post_json_api(url, component_data, auth=user_one.auth) - assert res.status_code == 201 - json_data = res.json['data'] - - new_component_id = json_data['id'] - new_component = AbstractNode.load(new_component_id) - assert len(new_component.contributors) == 2 - assert len( - new_component.contributors - ) == len(parent_project.contributors) - expected_perms = {permissions.READ, permissions.ADMIN} - actual_perms = {contributor.permission for contributor in new_component.contributor_set.all()} - assert actual_perms == expected_perms - def test_create_component_inherit_contributors_with_blocked_email( self, app, user_one, title, category): parent_project = ProjectFactory(creator=user_one) @@ -4002,36 +3874,6 @@ def test_skip_uneditable_has_admin_permission_for_one_node( assert public_project_one.is_deleted is True assert public_project_three.is_deleted is False - def test_skip_uneditable_has_admin_permission_for_one_node_group_members( - self, app, public_project_one, public_project_three, url): - group_member = AuthUserFactory() - group = OSFGroupFactory(creator=group_member) - public_project_one.add_osf_group(group, permissions.ADMIN) - public_project_one.save() - public_project_three.add_osf_group(group, permissions.WRITE) - public_project_three.save() - payload = { - 'data': [ - { - 'id': public_project_one._id, - 'type': 'nodes', - }, - { - 'id': public_project_three._id, - 'type': 'nodes', - } - ] - } - - res = app.delete_json_api(url, payload, auth=group_member.auth, bulk=True) - assert res.status_code == 200 - assert res.json['errors'][0]['id'] == public_project_three._id - public_project_one.reload() - public_project_three.reload() - - assert public_project_one.is_deleted is True - assert public_project_three.is_deleted is False - def test_skip_uneditable_does_not_have_admin_permission_for_any_nodes( self, app, user_one, public_project_three, public_project_four, url): payload = { diff --git a/api_tests/nodes/views/test_node_logs.py b/api_tests/nodes/views/test_node_logs.py index 23c5056891f..5b44d894917 100644 --- a/api_tests/nodes/views/test_node_logs.py +++ b/api_tests/nodes/views/test_node_logs.py @@ -7,7 +7,6 @@ from osf_tests.factories import ( AuthUserFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, EmbargoFactory, ) @@ -77,13 +76,6 @@ def public_url(self, public_project): return '/{}nodes/{}/logs/?version=2.2'.format( API_BASE, public_project._id) - def test_can_view_osf_group_log(self, app, private_project, private_url): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth) - assert res.status_code == 200 - def test_add_tag(self, app, user, user_auth, public_project, public_url): public_project.add_tag('Rheisen', auth=user_auth) assert public_project.logs.latest().action == 'tag_added' diff --git a/api_tests/nodes/views/test_node_registrations_list.py b/api_tests/nodes/views/test_node_registrations_list.py index 45707f0b0d5..72b58ea58c6 100644 --- a/api_tests/nodes/views/test_node_registrations_list.py +++ b/api_tests/nodes/views/test_node_registrations_list.py @@ -5,7 +5,6 @@ from osf_tests.factories import ( ProjectFactory, RegistrationFactory, - OSFGroupFactory, AuthUserFactory, ) from osf.utils.permissions import READ @@ -85,13 +84,6 @@ def test_node_registration_list( assert res.status_code == 401 assert 'detail' in res.json['errors'][0] - # test_return_private_registration_group_mem_read - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, expect_errors=True, auth=group_mem.auth) - assert res.status_code == 200 - # test_return_private_registrations_logged_in_contributor res = app.get(private_url, auth=user.auth) assert res.status_code == 200 diff --git a/api_tests/nodes/views/test_node_reorder_components.py b/api_tests/nodes/views/test_node_reorder_components.py index a7019b17988..5bfa2ac374b 100644 --- a/api_tests/nodes/views/test_node_reorder_components.py +++ b/api_tests/nodes/views/test_node_reorder_components.py @@ -4,7 +4,6 @@ AuthUserFactory, ProjectFactory, NodeFactory, - OSFGroupFactory, ) from osf.models import NodeRelation from osf.utils import permissions @@ -22,15 +21,7 @@ def read_contrib(): return AuthUserFactory() @pytest.fixture() -def group_member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(group_member): - return OSFGroupFactory(creator=group_member) - -@pytest.fixture() -def project(admin_contrib, write_contrib, read_contrib, osf_group): +def project(admin_contrib, write_contrib, read_contrib): project = ProjectFactory(creator=admin_contrib) project.add_contributor(write_contrib, permissions.WRITE) project.add_contributor(read_contrib, permissions.READ) diff --git a/api_tests/nodes/views/test_node_settings.py b/api_tests/nodes/views/test_node_settings.py index f95d4c61ad6..4861e4d3035 100644 --- a/api_tests/nodes/views/test_node_settings.py +++ b/api_tests/nodes/views/test_node_settings.py @@ -5,7 +5,6 @@ AuthUserFactory, ProjectFactory, PrivateLinkFactory, - OSFGroupFactory, ) from osf.models import NodeLog from osf.utils import permissions @@ -22,14 +21,6 @@ def write_contrib(): def read_contrib(): return AuthUserFactory() -@pytest.fixture() -def group_member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(group_member): - return OSFGroupFactory(creator=group_member) - @pytest.fixture() def project(admin_contrib, write_contrib, read_contrib): project = ProjectFactory(creator=admin_contrib) @@ -50,7 +41,7 @@ class TestNodeSettingsGet: def non_contrib(self): return AuthUserFactory() - def test_node_settings_detail(self, app, admin_contrib, non_contrib, write_contrib, osf_group, group_member, url, project): + def test_node_settings_detail(self, app, admin_contrib, non_contrib, write_contrib, url, project): # non logged in uers can't access node settings res = app.get(url, expect_errors=True) @@ -68,11 +59,6 @@ def test_node_settings_detail(self, app, admin_contrib, non_contrib, write_contr res = app.get(url, auth=admin_contrib.auth) assert res.status_code == 200 - # group member can access node settings - project.add_osf_group(osf_group, permissions.READ) - res = app.get(url, auth=group_member.auth) - assert res.status_code == 200 - # allow_access_requests project.allow_access_requests = True project.save() @@ -146,7 +132,7 @@ def payload(self, project): } } - def test_put_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, url, osf_group, group_member): + def test_put_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, url): assert project.access_requests_enabled is True payload['data']['attributes']['access_requests_enabled'] = False # Logged out @@ -162,32 +148,14 @@ def test_put_permissions(self, app, project, payload, admin_contrib, write_contr res = app.put_json_api(url, payload, auth=read_contrib.auth, expect_errors=True) assert res.status_code == 403 - # group member read - project.add_osf_group(osf_group, permissions.READ) - project.save() - res = app.put_json_api(url, payload, auth=group_member.auth, expect_errors=True) - assert res.status_code == 403 - # Logged in write (Write contribs can only change some node settings) res = app.put_json_api(url, payload, auth=write_contrib.auth, expect_errors=True) assert res.status_code == 403 - # group member write - project.update_osf_group(osf_group, permissions.WRITE) - project.save() - res = app.put_json_api(url, payload, auth=group_member.auth, expect_errors=True) - assert res.status_code == 403 - # Logged in write (Write group mems can only change some node settings) res = app.put_json_api(url, payload, auth=admin_contrib.auth) assert res.status_code == 200 - # group member admin - project.update_osf_group(osf_group, permissions.ADMIN) - project.save() - res = app.put_json_api(url, payload, auth=group_member.auth, expect_errors=True) - assert res.status_code == 200 - @pytest.mark.django_db class TestNodeSettingsUpdate: @@ -203,7 +171,7 @@ def payload(self, project): } } - def test_patch_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, group_member, osf_group, url): + def test_patch_permissions(self, app, project, payload, admin_contrib, write_contrib, read_contrib, url): payload['data']['attributes']['redirect_link_enabled'] = True payload['data']['attributes']['redirect_link_url'] = 'https://cos.io' # Logged out @@ -227,21 +195,6 @@ def test_patch_permissions(self, app, project, payload, admin_contrib, write_con res = app.patch_json_api(url, payload, auth=admin_contrib.auth) assert res.status_code == 200 - # Logged in read group mem - project.add_osf_group(osf_group, permissions.READ) - res = app.patch_json_api(url, payload, auth=read_contrib.auth, expect_errors=True) - assert res.status_code == 403 - - # Logged in write group mem (Write group mems can only change some node settings) - project.add_osf_group(osf_group, permissions.WRITE) - res = app.patch_json_api(url, payload, auth=write_contrib.auth, expect_errors=True) - assert res.status_code == 200 - - # Logged in admin group mem - project.add_osf_group(osf_group, permissions.ADMIN) - res = app.patch_json_api(url, payload, auth=admin_contrib.auth) - assert res.status_code == 200 - def test_patch_invalid_type(self, app, project, payload, admin_contrib, url): payload['data']['type'] = 'Invalid Type' diff --git a/api_tests/nodes/views/test_node_wiki_list.py b/api_tests/nodes/views/test_node_wiki_list.py index 1972281f370..69ca6b2bb07 100644 --- a/api_tests/nodes/views/test_node_wiki_list.py +++ b/api_tests/nodes/views/test_node_wiki_list.py @@ -11,10 +11,8 @@ from osf_tests.factories import ( AuthUserFactory, ProjectFactory, - OSFGroupFactory, RegistrationFactory, ) -from osf.utils.permissions import WRITE, READ from tests.base import fake @@ -124,15 +122,6 @@ def test_return_wikis( assert res.status_code == 401 assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail - # test_return_private_node_wikis_logged_in_osf_group_member - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - private_project.add_osf_group(group, READ) - res = app.get(private_url, auth=group_mem.auth) - assert res.status_code == 200 - wiki_ids = [wiki['id'] for wiki in res.json['data']] - assert private_wiki._id in wiki_ids - # test_return_private_node_wikis_logged_in_non_contributor res = app.get(private_url, auth=non_contrib.auth, expect_errors=True) assert res.status_code == 403 @@ -347,13 +336,6 @@ def test_create_public_wiki_page_with_content(self, app, user_write_contributor, wiki_page = WikiPage.objects.get_for_node(project_public, page_name) assert wiki_page.get_version().content == 'my first wiki page' - # test_osf_group_member_write - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_public.add_osf_group(group, WRITE) - res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=group_mem.auth, expect_errors=True) - assert res.status_code == 201 - def test_create_public_wiki_page_with_empty_content(self, app, user_write_contributor, url_node_public, project_public): page_name = fake.word() payload = create_wiki_payload(page_name) @@ -384,13 +366,6 @@ def test_do_not_create_public_wiki_page( res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=user_non_contributor.auth, expect_errors=True) assert res.status_code == 403 - # test_do_not_create_public_wiki_page_as_read_osf_group_member - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_public.add_osf_group(group, READ) - res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - # test_do_not_create_public_wiki_page_as_unauthenticated res = app.post_json_api(url_node_public, create_wiki_payload(fake.word()), expect_errors=True) assert res.status_code == 401 diff --git a/api_tests/osf_groups/__init__.py b/api_tests/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/api_tests/osf_groups/views/__init__.py b/api_tests/osf_groups/views/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/api_tests/osf_groups/views/test_osf_group_detail.py b/api_tests/osf_groups/views/test_osf_group_detail.py deleted file mode 100644 index aa5b7c63b9c..00000000000 --- a/api_tests/osf_groups/views/test_osf_group_detail.py +++ /dev/null @@ -1,209 +0,0 @@ -import pytest - -from waffle.testutils import override_flag -from django.contrib.auth.models import Group - -from api.base.settings.defaults import API_BASE -from osf.models import OSFGroup -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -def build_member_relationship_payload(user_ids): - return { - 'data': [{ - 'type': 'users', - 'id': user_id - } for user_id in user_ids] - } - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def old_name(): - return 'Platform Team' - -@pytest.fixture() -def new_name(): - return 'My New Lab' - -@pytest.fixture() -def osf_group(manager, member, old_name): - group = OSFGroupFactory(name=old_name, creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def url(osf_group): - return f'/{API_BASE}groups/{osf_group._id}/' - -@pytest.fixture() -def managers_url(url): - return url + 'managers/' - -@pytest.fixture() -def members_url(url): - return url + 'members/' - -@pytest.fixture() -def name_payload(osf_group, new_name): - return { - 'data': { - 'id': osf_group._id, - 'type': 'groups', - 'attributes': { - 'name': new_name - } - } - } - - -@pytest.mark.django_db -class TestGroupDetail: - - def test_return(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.get(url) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test authenticated user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test authenticated member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test authenticated manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == osf_group._id - assert data['type'] == 'groups' - assert data['attributes']['name'] == osf_group.name - assert 'members' in data['relationships'] - - # test invalid group - url = '/{}groups/{}/'.format(API_BASE, '12345_bad_id') - res = app.get(url, expect_errors=True) - assert res.status_code == 404 - - -@pytest.mark.django_db -class TestOSFGroupUpdate: - def test_patch_osf_group_perms(self, app, member, manager, user, osf_group, url, name_payload, new_name): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.patch_json_api(url, expect_errors=True) - assert res.status_code == 401 - - # test authenticated_user - res = app.patch_json_api(url, {}, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - # test authenticated_member - res = app.patch_json_api(url, {}, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test authenticated_manager - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 200 - assert res.json['data']['attributes']['name'] == new_name - - def test_patch_osf_group_attributes(self, app, manager, osf_group, url, name_payload, old_name, new_name): - with override_flag(OSF_GROUPS, active=True): - # test_blank_name - assert osf_group.name == old_name - name_payload['data']['attributes']['name'] = '' - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'This field may not be blank.' - osf_group.reload - assert osf_group.name == old_name - - # test_name_updated - name_payload['data']['attributes']['name'] = new_name - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 200 - assert res.json['data']['attributes']['name'] == new_name - osf_group.reload() - assert osf_group.name == new_name - - # test_invalid_type - name_payload['data']['type'] = 'bad_type' - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test_id_mismatch - name_payload['data']['type'] = 'groups' - name_payload['data']['id'] = '12345_bad_id' - res = app.patch_json_api(url, name_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - -@pytest.mark.django_db -class TestOSFGroupDelete: - def test_delete_perms(self, app, osf_group, manager, member, user, url): - with override_flag(OSF_GROUPS, active=True): - res = app.delete_json_api(url, expect_errors=True) - assert res.status_code == 401 - - res = app.delete_json_api(url, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - res = app.delete_json_api(url, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - res = app.delete_json_api(url, auth=manager.auth) - assert res.status_code == 204 - - def test_delete_specifics(self, app, osf_group, manager, member, user, url): - with override_flag(OSF_GROUPS, active=True): - osf_group_name = osf_group.name - manager_group_name = osf_group.manager_group.name - member_group_name = osf_group.member_group.name - - assert manager_group_name in manager.groups.values_list('name', flat=True) - assert member_group_name in member.groups.values_list('name', flat=True) - - res = app.delete_json_api(url, auth=manager.auth) - assert res.status_code == 204 - - assert not OSFGroup.objects.filter(name=osf_group_name).exists() - assert not Group.objects.filter(name=manager_group_name).exists() - assert not Group.objects.filter(name=member_group_name).exists() - - assert manager_group_name not in manager.groups.values_list('name', flat=True) - assert member_group_name not in member.groups.values_list('name', flat=True) - - res = app.get(url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 diff --git a/api_tests/osf_groups/views/test_osf_group_members_detail.py b/api_tests/osf_groups/views/test_osf_group_members_detail.py deleted file mode 100644 index 4b643058063..00000000000 --- a/api_tests/osf_groups/views/test_osf_group_members_detail.py +++ /dev/null @@ -1,259 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from framework.auth.core import Auth -from api.base.settings.defaults import API_BASE -from osf.utils.permissions import MEMBER, MANAGER -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def old_name(): - return 'Platform Team' - -@pytest.fixture() -def osf_group(manager, member, old_name): - group = OSFGroupFactory(name=old_name, creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def url(osf_group, member): - return f'/{API_BASE}groups/{osf_group._id}/members/{member._id}/' - -@pytest.fixture() -def bad_url(osf_group): - return '/{}groups/{}/members/{}/'.format(API_BASE, osf_group._id, '12345') - -@pytest.mark.django_db -class TestOSFGroupMembersDetail: - def test_return_perms(self, app, member, manager, user, osf_group, url, bad_url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.get(url) - assert res.status_code == 200 - - # test user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - - # test member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - - # test manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - - # test invalid member - res = app.get(bad_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - def test_return_member(self, app, member, manager, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - res = app.get(url) - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == f'{osf_group._id}-{member._id}' - assert data['type'] == 'group-members' - assert data['attributes']['role'] == MEMBER - assert data['attributes']['unregistered_member'] is None - assert data['attributes']['full_name'] == member.fullname - assert member._id in data['relationships']['users']['links']['related']['href'] - - user = osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - res = app.get(f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/') - assert res.status_code == 200 - data = res.json['data'] - assert data['id'] == f'{osf_group._id}-{user._id}' - assert data['type'] == 'group-members' - assert data['attributes']['role'] == MANAGER - assert data['attributes']['unregistered_member'] == 'Crazy 8s' - assert data['attributes']['full_name'] == 'Crazy 8s' - assert res.json['data']['attributes']['full_name'] == 'Crazy 8s' - - -def build_update_payload(group_id, user_id, role): - return { - 'data': { - 'id': f'{group_id}-{user_id}', - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - } - -@pytest.mark.django_db -class TestOSFGroupMembersUpdate: - def test_update_role(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = build_update_payload(osf_group._id, member._id, MANAGER) - - # test unauthenticated - res = app.patch_json_api(url, payload, expect_errors=True) - assert res.status_code == 401 - - # test user - res = app.patch_json_api(url, payload, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - # test member - res = app.patch_json_api(url, payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager - res = app.patch_json_api(url, payload, auth=manager.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['role'] == MANAGER - assert res.json['data']['attributes']['full_name'] == member.fullname - assert res.json['data']['id'] == f'{osf_group._id}-{member._id}' - - payload = build_update_payload(osf_group._id, member._id, MEMBER) - res = app.patch_json_api(url, payload, auth=manager.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['role'] == MEMBER - assert res.json['data']['attributes']['full_name'] == member.fullname - assert res.json['data']['id'] == f'{osf_group._id}-{member._id}' - - def test_update_errors(self, app, member, manager, user, osf_group, url, bad_url): - with override_flag(OSF_GROUPS, active=True): - # id not in payload - payload = { - 'data': { - 'type': 'group-members', - 'attributes': { - 'role': MEMBER - } - } - } - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'This field may not be null.' - - # test improperly formatted id - payload = build_update_payload(osf_group._id, member._id, MANAGER) - payload['data']['id'] = 'abcde' - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test improper type - payload = build_update_payload(osf_group._id, member._id, MANAGER) - payload['data']['type'] = 'bad_type' - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # test invalid role - payload = build_update_payload(osf_group._id, member._id, 'bad_perm') - res = app.patch_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a valid role; choose manager or member.' - - # test user is not a member - payload = build_update_payload(osf_group._id, user._id, MEMBER) - bad_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.patch_json_api(bad_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{user._id} cannot be found in this OSFGroup' - - # test cannot downgrade remaining manager - payload = build_update_payload(osf_group._id, manager._id, MEMBER) - manager_url = f'/{API_BASE}groups/{osf_group._id}/members/{manager._id}/' - res = app.patch_json_api(manager_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test cannot remove last confirmed manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.patch_json_api(manager_url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - -@pytest.mark.django_db -class TestOSFGroupMembersDelete: - def test_delete_perms(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.delete_json_api(url, expect_errors=True) - assert res.status_code == 401 - - # test user - res = app.delete_json_api(url, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - - # test member - osf_group.make_member(user) - user_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.delete_json_api(user_url, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - # test manager - assert osf_group.is_member(member) is True - assert osf_group.is_manager(member) is False - - res = app.delete_json_api(url, auth=manager.auth) - assert res.status_code == 204 - assert osf_group.is_member(member) is False - assert osf_group.is_manager(member) is False - - # test delete manager (not last manager) - osf_group.make_manager(user) - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user) is True - user_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.delete_json_api(user_url, auth=user.auth) - assert res.status_code == 204 - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - def test_delete_yourself(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - assert osf_group.is_member(member) is True - assert osf_group.is_manager(member) is False - res = app.delete_json_api(url, auth=member.auth, expect_errors=True) - assert res.status_code == 204 - assert osf_group.is_member(member) is False - assert osf_group.is_manager(member) is False - - def test_delete_errors(self, app, member, manager, user, osf_group, url, bad_url): - with override_flag(OSF_GROUPS, active=True): - # test invalid user - res = app.delete_json_api(bad_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - # test user does not belong to group - bad_url = f'/{API_BASE}groups/{osf_group._id}/members/{user._id}/' - res = app.delete_json_api(bad_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{user._id} cannot be found in this OSFGroup' - - # test user is last manager - manager_url = f'/{API_BASE}groups/{osf_group._id}/members/{manager._id}/' - res = app.delete_json_api(manager_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test user is last registered manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.delete_json_api(manager_url, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' diff --git a/api_tests/osf_groups/views/test_osf_group_members_list.py b/api_tests/osf_groups/views/test_osf_group_members_list.py deleted file mode 100644 index 6018016fb0c..00000000000 --- a/api_tests/osf_groups/views/test_osf_group_members_list.py +++ /dev/null @@ -1,626 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from django.utils import timezone - -from framework.auth.core import Auth -from api.base.settings.defaults import API_BASE -from osf.models import OSFUser -from osf.utils.permissions import MEMBER, MANAGE, MANAGER -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def old_name(): - return 'Platform Team' - -@pytest.fixture() -def user3(osf_group): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(manager, member, old_name): - group = OSFGroupFactory(name=old_name, creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def url(osf_group): - return f'/{API_BASE}groups/{osf_group._id}/members/' - - -@pytest.mark.django_db -class TestGroupMembersList: - def test_return_perms(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test unauthenticated - res = app.get(url) - assert res.status_code == 200 - - # test user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - - # test member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - - # test manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - - # test invalid group - url = '/{}groups/{}/members/'.format(API_BASE, '12345_bad_id') - res = app.get(url, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - - def test_return_members(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - res = app.get(url) - data = res.json['data'] - assert len(data) == 2 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{manager._id}' in member_ids - assert f'{osf_group._id}-{member._id}' in member_ids - - -@pytest.mark.django_db -class TestOSFGroupMembersFilter: - def test_filtering(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test filter members - url_filter = url + '?filter[role]=member' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{member._id}' in member_ids - - # test filter managers - url_filter = url + '?filter[role]=manager' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{manager._id}' in member_ids - - # test invalid role - url_filter = url + '?filter[role]=bad_role' - res = app.get(url_filter, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == "Value \'bad_role\' is not valid." - - # test filter fullname - url_filter = url + f'?filter[full_name]={manager.fullname}' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{manager._id}' in member_ids - - # test filter fullname - url_filter = url + f'?filter[full_name]={member.fullname}' - res = app.get(url_filter) - data = res.json['data'] - assert len(data) == 1 - member_ids = [mem['id'] for mem in data] - assert f'{osf_group._id}-{member._id}' in member_ids - - # test invalid filter - url_filter = url + '?filter[created]=2018-02-01' - res = app.get(url_filter, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == "\'created\' is not a valid field for this endpoint." - -def make_create_payload(role, user=None, full_name=None, email=None): - base_payload = { - 'data': { - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - } - if user: - base_payload['data']['relationships'] = { - 'users': { - 'data': { - 'id': user._id, - 'type': 'users' - } - } - } - else: - if full_name: - base_payload['data']['attributes']['full_name'] = full_name - if email: - base_payload['data']['attributes']['email'] = email - - return base_payload - -@pytest.mark.django_db -class TestOSFGroupMembersCreate: - def test_create_manager(self, app, manager, user3, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = make_create_payload(MANAGER, user3) - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - data = res.json['data'] - assert data['attributes']['role'] == MANAGER - assert data['attributes']['full_name'] == user3.fullname - assert data['attributes']['unregistered_member'] is None - assert data['id'] == f'{osf_group._id}-{user3._id}' - assert user3._id in data['relationships']['users']['links']['related']['href'] - assert osf_group.has_permission(user3, MANAGE) is True - - def test_create_member(self, app, member, manager, user3, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = make_create_payload(MEMBER, user3) - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - data = res.json['data'] - assert data['attributes']['role'] == MEMBER - assert data['attributes']['full_name'] == user3.fullname - assert data['attributes']['unregistered_member'] is None - assert data['id'] == f'{osf_group._id}-{user3._id}' - assert data['id'] == f'{osf_group._id}-{user3._id}' - assert user3._id in data['relationships']['users']['links']['related']['href'] - assert osf_group.has_permission(user3, MANAGE) is False - assert osf_group.has_permission(user3, MEMBER) is True - - def test_add_unregistered_member(self, app, manager, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - full_name = 'Crazy 8s' - payload = make_create_payload(MEMBER, user=None, full_name=full_name, email='eight@cos.io') - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - data = res.json['data'] - assert data['attributes']['role'] == MEMBER - user = OSFUser.load(data['id'].split('-')[1]) - assert user._id in data['relationships']['users']['links']['related']['href'] - assert osf_group.has_permission(user, MANAGE) is False - assert data['attributes']['full_name'] == full_name - assert data['attributes']['unregistered_member'] == full_name - assert osf_group.has_permission(user, MEMBER) is True - assert user in osf_group.members_only - assert user not in osf_group.managers - - # test unregistered user is already a member - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'User already exists.' - - # test unregistered user email is blocked - payload['data']['attributes']['email'] = 'eight@example.com' - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Email address domain is blocked.' - - def test_create_member_perms(self, app, manager, member, osf_group, user3, url): - with override_flag(OSF_GROUPS, active=True): - payload = make_create_payload(MEMBER, user3) - # Unauthenticated - res = app.post_json_api(url, payload, expect_errors=True) - assert res.status_code == 401 - - # Logged in, nonmember - res = app.post_json_api(url, payload, auth=user3.auth, expect_errors=True) - assert res.status_code == 403 - - # Logged in, nonmanager - res = app.post_json_api(url, payload, auth=member.auth, expect_errors=True) - assert res.status_code == 403 - - def test_create_members_errors(self, app, manager, member, user3, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # invalid user - bad_user_payload = make_create_payload(MEMBER, user=user3) - bad_user_payload['data']['relationships']['users']['data']['id'] = 'bad_user_id' - res = app.post_json_api(url, bad_user_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == 'User with id bad_user_id not found.' - - # invalid type - bad_type_payload = make_create_payload(MEMBER, user=user3) - bad_type_payload['data']['type'] = 'bad_type' - res = app.post_json_api(url, bad_type_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # invalid role - bad_perm_payload = make_create_payload('bad_role', user=user3) - res = app.post_json_api(url, bad_perm_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_role is not a valid role; choose manager or member.' - - # fullname not included - unregistered_payload = make_create_payload(MEMBER, user=None, full_name=None, email='eight@cos.io') - res = app.post_json_api(url, unregistered_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - - # email not included - unregistered_payload = make_create_payload(MEMBER, user=None, full_name='Crazy 8s', email=None) - res = app.post_json_api(url, unregistered_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - - # user is already a member - existing_member_payload = make_create_payload(MEMBER, user=member) - res = app.post_json_api(url, existing_member_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'User is already a member of this group.' - - # Disabled user - user3.date_disabled = timezone.now() - user3.save() - payload = make_create_payload(MEMBER, user=user3) - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Deactivated users cannot be added to OSF Groups.' - - # No role specified - given member by default - user3.date_disabled = None - user3.save() - payload = make_create_payload(MEMBER, user=user3) - payload['attributes'] = {} - res = app.post_json_api(url, payload, auth=manager.auth) - assert res.status_code == 201 - assert res.json['data']['attributes']['role'] == MEMBER - assert osf_group.has_permission(user3, 'member') - assert not osf_group.has_permission(user3, 'manager') - -def make_bulk_create_payload(role, user=None, full_name=None, email=None): - base_payload = { - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - - if user: - base_payload['relationships'] = { - 'users': { - 'data': { - 'id': user._id, - 'type': 'users' - } - } - } - else: - if full_name: - base_payload['attributes']['full_name'] = full_name - if email: - base_payload['attributes']['email'] = email - - return base_payload - -@pytest.mark.django_db -class TestOSFGroupMembersBulkCreate: - def test_bulk_create_group_member_perms(self, app, url, manager, member, user, user3, osf_group): - with override_flag(OSF_GROUPS, active=True): - payload_user_three = make_bulk_create_payload(MANAGER, user3) - payload_user = make_bulk_create_payload(MEMBER, user) - bulk_payload = [payload_user_three, payload_user] - - # unauthenticated - res = app.post_json_api(url, {'data': bulk_payload}, expect_errors=True, bulk=True) - assert res.status_code == 401 - - # non member - res = app.post_json_api(url, {'data': bulk_payload}, auth=user.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # member - res = app.post_json_api(url, {'data': bulk_payload}, auth=member.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # manager - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, bulk=True) - assert res.status_code == 201 - assert len(res.json['data']) == 2 - - assert osf_group.is_member(user) is True - assert osf_group.is_member(user3) is True - assert osf_group.is_manager(user) is False - assert osf_group.is_manager(user3) is True - - def test_bulk_create_unregistered(self, app, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload_user = make_bulk_create_payload(MEMBER, user) - payload_unregistered = make_bulk_create_payload(MEMBER, user=None, full_name='Crazy 8s', email='eight@cos.io') - res = app.post_json_api(url, {'data': [payload_user, payload_unregistered]}, auth=manager.auth, bulk=True) - unreg_user = OSFUser.objects.get(username='eight@cos.io') - assert res.status_code == 201 - ids = [user_data['id'] for user_data in res.json['data']] - roles = [user_data['attributes']['role'] for user_data in res.json['data']] - assert f'{osf_group._id}-{user._id}' in ids - assert f'{osf_group._id}-{unreg_user._id}' in ids - assert roles[0] == MEMBER - assert roles[1] == MEMBER - unregistered_names = [user_data['attributes']['unregistered_member'] for user_data in res.json['data']] - assert {'Crazy 8s', None} == set(unregistered_names) - - assert osf_group.has_permission(user, MANAGE) is False - assert osf_group.has_permission(user, MEMBER) is True - assert osf_group.has_permission(unreg_user, MANAGE) is False - assert osf_group.has_permission(unreg_user, MEMBER) is True - assert osf_group.is_member(unreg_user) is True - assert osf_group.is_manager(unreg_user) is False - - def test_bulk_create_group_member_errors(self, app, url, manager, member, user, user3, osf_group): - with override_flag(OSF_GROUPS, active=True): - payload_member = make_bulk_create_payload(MANAGER, member) - payload_user = make_bulk_create_payload(MANAGER, user) - - # User in bulk payload is an invalid user - bad_user_payload = make_bulk_create_payload(MEMBER, user=user3) - bad_user_payload['relationships']['users']['data']['id'] = 'bad_user_id' - bulk_payload = [payload_user, bad_user_payload] - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == 'User with id bad_user_id not found.' - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # User in bulk payload is invalid - bad_type_payload = make_bulk_create_payload(MEMBER, user=user3) - bad_type_payload['type'] = 'bad_type' - bulk_payload = [payload_user, bad_type_payload] - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 409 - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # User in bulk payload has invalid role specified - bad_role_payload = make_bulk_create_payload('bad_role', user=user3) - res = app.post_json_api(url, {'data': [payload_user, bad_role_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_role is not a valid role; choose manager or member.' - assert osf_group.is_member(user3) is False - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user3) is False - assert osf_group.is_manager(user) is False - - # fullname not included - unregistered_payload = make_bulk_create_payload(MEMBER, user=None, full_name=None, email='eight@cos.io') - res = app.post_json_api(url, {'data': [payload_user, unregistered_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # email not included - unregistered_payload = make_bulk_create_payload(MEMBER, user=None, full_name='Crazy 8s', email=None) - res = app.post_json_api(url, {'data': [payload_user, unregistered_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'You must provide a full_name/email combination to add an unconfirmed member.' - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - # Member of bulk payload is already a member - bulk_payload = [payload_member, payload_user] - res = app.post_json_api(url, {'data': bulk_payload}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'User is already a member of this group.' - assert osf_group.is_member(member) is True - assert osf_group.is_member(user) is False - assert osf_group.is_manager(member) is False - assert osf_group.is_manager(user) is False - - # Disabled user - user3.date_disabled = timezone.now() - user3.save() - payload = make_bulk_create_payload(MEMBER, user=user3) - res = app.post_json_api(url, {'data': [payload_user, payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Deactivated users cannot be added to OSF Groups.' - - # No role specified, given member by default - user3.date_disabled = None - user3.save() - payload = make_bulk_create_payload(MEMBER, user=user3) - payload['attributes'] = {} - res = app.post_json_api(url, {'data': [payload_user, payload]}, auth=manager.auth, bulk=True) - assert res.status_code == 201 - assert len(res.json['data']) == 2 - ids = [user_data['id'] for user_data in res.json['data']] - assert f'{osf_group._id}-{user._id}' in ids - assert f'{osf_group._id}-{user3._id}' in ids - assert osf_group.is_member(user3) is True - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user3) is False - assert osf_group.is_manager(user) is True - -def build_bulk_update_payload(group_id, user_id, role): - return { - 'id': f'{group_id}-{user_id}', - 'type': 'group-members', - 'attributes': { - 'role': role - } - } - - -@pytest.mark.django_db -class TestOSFGroupMembersBulkUpdate: - def test_update_role(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - payload = build_bulk_update_payload(osf_group._id, member._id, MANAGER) - bulk_payload = {'data': [payload]} - - # test unauthenticated - res = app.patch_json_api(url, bulk_payload, expect_errors=True, bulk=True) - assert res.status_code == 401 - - # test user - res = app.patch_json_api(url, bulk_payload, auth=user.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test member - res = app.patch_json_api(url, bulk_payload, auth=member.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test manager - res = app.patch_json_api(url, bulk_payload, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 200 - assert res.json['data'][0]['attributes']['role'] == MANAGER - assert res.json['data'][0]['attributes']['full_name'] == member.fullname - assert res.json['data'][0]['id'] == f'{osf_group._id}-{member._id}' - - payload = build_bulk_update_payload(osf_group._id, member._id, MEMBER) - bulk_payload = {'data': [payload]} - res = app.patch_json_api(url, bulk_payload, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 200 - assert res.json['data'][0]['attributes']['role'] == MEMBER - assert res.json['data'][0]['attributes']['full_name'] == member.fullname - assert res.json['data'][0]['id'] == f'{osf_group._id}-{member._id}' - - def test_bulk_update_errors(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # id not in payload - payload = { - 'type': 'group-members', - 'attributes': { - 'role': MEMBER - } - } - bulk_payload = {'data': [payload]} - - res = app.patch_json_api(url, bulk_payload, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Member identifier not provided.' - - # test improperly formatted id - payload = build_bulk_update_payload(osf_group._id, member._id, MANAGER) - payload['id'] = 'abcde' - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Member identifier incorrectly formatted.' - - # test improper type - payload = build_bulk_update_payload(osf_group._id, member._id, MANAGER) - payload['type'] = 'bad_type' - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 409 - - # test invalid role - payload = build_bulk_update_payload(osf_group._id, member._id, 'bad_perm') - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'bad_perm is not a valid role; choose manager or member.' - - # test user is not a member - payload = build_bulk_update_payload(osf_group._id, user._id, MEMBER) - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Could not find all objects to update.' - - # test cannot downgrade remaining manager - payload = build_bulk_update_payload(osf_group._id, manager._id, MEMBER) - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test cannot remove last confirmed manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.patch_json_api(url, {'data': [payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - -def create_bulk_delete_payload(group_id, user_id): - return { - 'id': f'{group_id}-{user_id}', - 'type': 'group-members' - } - -@pytest.mark.django_db -class TestOSFGroupMembersBulkDelete: - def test_delete_perms(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - member_payload = create_bulk_delete_payload(osf_group._id, member._id) - bulk_payload = {'data': [member_payload]} - # test unauthenticated - res = app.delete_json_api(url, bulk_payload, expect_errors=True, bulk=True) - assert res.status_code == 401 - - # test user - res = app.delete_json_api(url, bulk_payload, auth=user.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test member - res = app.delete_json_api(url, bulk_payload, auth=member.auth, expect_errors=True, bulk=True) - assert res.status_code == 403 - - # test manager - assert osf_group.is_member(member) is True - assert osf_group.is_manager(member) is False - - res = app.delete_json_api(url, bulk_payload, auth=manager.auth, bulk=True) - assert res.status_code == 204 - assert osf_group.is_member(member) is False - assert osf_group.is_manager(member) is False - - # test user does not belong to OSF Group - osf_group.make_manager(user) - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user) is True - user_payload = create_bulk_delete_payload(osf_group._id, user._id) - bulk_payload = {'data': [user_payload, member_payload]} - res = app.delete_json_api(url, bulk_payload, auth=user.auth, bulk=True, expect_errors=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{member._id} cannot be found in this OSFGroup' - - # test bulk delete manager (not last one) - osf_group.make_manager(user) - assert osf_group.is_member(user) is True - assert osf_group.is_manager(user) is True - user_payload = create_bulk_delete_payload(osf_group._id, user._id) - bulk_payload = {'data': [user_payload]} - res = app.delete_json_api(url, bulk_payload, auth=user.auth, bulk=True) - assert res.status_code == 204 - assert osf_group.is_member(user) is False - assert osf_group.is_manager(user) is False - - def test_delete_errors(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test invalid user - invalid_payload = create_bulk_delete_payload(osf_group._id, '12345') - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Could not find all objects to delete.' - - # test user does not belong to group - invalid_payload = create_bulk_delete_payload(osf_group._id, user._id) - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 404 - assert res.json['errors'][0]['detail'] == f'{user._id} cannot be found in this OSFGroup' - - # test user is last manager - invalid_payload = create_bulk_delete_payload(osf_group._id, manager._id) - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' - - # test user is last registered manager - osf_group.add_unregistered_member('Crazy 8s', 'eight@cos.io', Auth(manager), MANAGER) - assert len(osf_group.managers) == 2 - res = app.delete_json_api(url, {'data': [invalid_payload]}, auth=manager.auth, expect_errors=True, bulk=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Group must have at least one manager.' diff --git a/api_tests/osf_groups/views/test_osf_groups_list.py b/api_tests/osf_groups/views/test_osf_groups_list.py deleted file mode 100644 index 89ef26ab96b..00000000000 --- a/api_tests/osf_groups/views/test_osf_groups_list.py +++ /dev/null @@ -1,151 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from api.base.settings.defaults import API_BASE -from osf.models import OSFGroup -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(manager, member): - group = OSFGroupFactory(name='Platform Team', creator=manager) - group.make_member(member) - return group - -@pytest.mark.django_db -class TestGroupList: - - @pytest.fixture() - def url(self): - return f'/{API_BASE}groups/' - - def test_return(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - # test nonauthenticated - res = app.get(url) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated user - res = app.get(url, auth=user.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated member - res = app.get(url, auth=member.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - assert data[0]['type'] == 'groups' - assert data[0]['attributes']['name'] == osf_group.name - - # test authenticated manager - res = app.get(url, auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - assert data[0]['type'] == 'groups' - assert data[0]['attributes']['name'] == osf_group.name - - def test_groups_filter(self, app, member, manager, user, osf_group, url): - with override_flag(OSF_GROUPS, active=True): - second_group = OSFGroupFactory(name='Apples', creator=manager) - res = app.get(url + '?filter[name]=Platform', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - - res = app.get(url + '?filter[name]=Apple', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == second_group._id - - res = app.get(url + '?filter[bad_field]=Apple', auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - - res = app.get(url + '?filter[name]=Platform') - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - res = app.get(url + '?filter[name]=Apple') - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - -@pytest.mark.django_db -class TestOSFGroupCreate: - @pytest.fixture() - def url(self): - return f'/{API_BASE}groups/' - - @pytest.fixture() - def simple_payload(self): - return { - 'data': { - 'type': 'groups', - 'attributes': { - 'name': 'My New Lab' - }, - } - } - - def test_create_osf_group(self, app, url, manager, simple_payload): - # Nonauthenticated - with override_flag(OSF_GROUPS, active=True): - res = app.post_json_api(url, simple_payload, expect_errors=True) - assert res.status_code == 401 - - # Authenticated - res = app.post_json_api(url, simple_payload, auth=manager.auth) - assert res.status_code == 201 - assert res.json['data']['type'] == 'groups' - assert res.json['data']['attributes']['name'] == 'My New Lab' - group = OSFGroup.objects.get(_id=res.json['data']['id']) - assert group.creator_id == manager.id - assert group.has_permission(manager, 'manage') is True - assert group.has_permission(manager, 'member') is True - - def test_create_osf_group_validation_errors(self, app, url, manager, simple_payload): - # Need data key - with override_flag(OSF_GROUPS, active=True): - res = app.post_json_api(url, simple_payload['data'], auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Request must include /data.' - - # Incorrect type - simple_payload['data']['type'] = 'incorrect_type' - res = app.post_json_api(url, simple_payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 409 - - # Required name field - payload = { - 'data': { - 'type': 'groups' - } - } - res = app.post_json_api(url, payload, auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'This field is required.' diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 808ddc6b98d..5a08bdc378f 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -26,7 +26,6 @@ AuthUserFactory, UnregUserFactory, WithdrawnRegistrationFactory, - OSFGroupFactory, CommentFactory, InstitutionFactory, ) @@ -394,27 +393,6 @@ def test_update_registration( assert res.status_code == 403 assert res.json['errors'][0]['detail'] == 'You do not have permission to perform this action.' - # test_osf_group_member_write_cannot_update_registration - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - public_project.add_osf_group(group, permissions.WRITE) - res = app.put_json_api( - public_url, - public_to_private_payload, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 403 - - # test_osf_group_member_admin_cannot_update_registration - public_project.remove_osf_group(group) - public_project.add_osf_group(group, permissions.ADMIN) - res = app.put_json_api( - public_url, - public_to_private_payload, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 403 - def test_fields( self, app, user, public_registration, private_registration, public_url, institution_one, diff --git a/api_tests/registrations/views/test_registration_list.py b/api_tests/registrations/views/test_registration_list.py index 894c448b5b4..3629d0fe6b7 100644 --- a/api_tests/registrations/views/test_registration_list.py +++ b/api_tests/registrations/views/test_registration_list.py @@ -20,7 +20,6 @@ AuthUserFactory, CollectionFactory, DraftRegistrationFactory, - OSFGroupFactory, NodeLicenseRecordFactory, TagFactory, SubjectFactory, @@ -838,13 +837,6 @@ def test_cannot_create_registration( res = app.post_json_api(url_registrations, payload, expect_errors=True) assert res.status_code == 401 - # admin via a group cannot create registration - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_public.add_osf_group(group, permissions.ADMIN) - res = app.post_json_api(url_registrations, payload, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - @mock.patch('framework.celery_tasks.handlers.enqueue_task') def test_registration_draft_must_be_specified( self, mock_enqueue, app, user, payload, url_registrations): @@ -1564,7 +1556,6 @@ def test_registration_draft_must_be_draft_of_current_node( def test_need_admin_perms_on_draft( self, mock_enqueue, app, user, schema, payload_ver, url_registrations_ver): user_two = AuthUserFactory() - group = OSFGroupFactory(creator=user) # User is an admin contributor on draft registration but not on node draft_registration = DraftRegistrationFactory(creator=user_two, registration_schema=schema) @@ -1590,17 +1581,6 @@ def test_need_admin_perms_on_draft( res = app.post_json_api(url_registrations_ver, payload_ver, auth=user.auth) assert res.status_code == 201 - # User is an admin group contributor on the node but not on draft registration - draft_registration = DraftRegistrationFactory(creator=user_two, registration_schema=schema) - draft_registration.branched_from.add_osf_group(group, permissions.ADMIN) - payload_ver['data']['attributes']['draft_registration_id'] = draft_registration._id - assert draft_registration.branched_from.is_admin_contributor(user) is False - assert draft_registration.branched_from.has_permission(user, permissions.ADMIN) is True - assert draft_registration.has_permission(user, permissions.ADMIN) is False - res = app.post_json_api(url_registrations_ver, payload_ver, auth=user.auth, expect_errors=True) - assert res.status_code == 403 - assert res.json['errors'][0]['detail'] == 'You must be an admin contributor on the draft registration to create a registration.' - # User is an admin contributor on node but not on draft registration draft_registration = DraftRegistrationFactory(creator=user_two, registration_schema=schema) draft_registration.add_contributor(user, permissions.WRITE) diff --git a/api_tests/sparse/test_sparse_node_list.py b/api_tests/sparse/test_sparse_node_list.py index 8df12d60e4c..8673f074d74 100644 --- a/api_tests/sparse/test_sparse_node_list.py +++ b/api_tests/sparse/test_sparse_node_list.py @@ -3,14 +3,12 @@ from api.base.settings.defaults import API_BASE from framework.auth.core import Auth from osf.models import AbstractNode -from osf.utils import permissions from osf_tests.factories import ( CollectionFactory, ProjectFactory, AuthUserFactory, PreprintFactory, InstitutionFactory, - OSFGroupFactory, DraftNodeFactory, ) from website.views import find_bookmark_collection @@ -111,14 +109,6 @@ def test_return( assert private_project._id not in ids assert draft_node._id not in ids - # test_returns_nodes_through_which_you_have_perms_through_osf_groups - group = OSFGroupFactory(creator=user) - another_project = ProjectFactory() - another_project.add_osf_group(group, permissions.READ) - res = app.get(sparse_url, auth=user.auth) - ids = [each['id'] for each in res.json['data']] - assert another_project._id in ids - def test_node_list_has_proper_root(self, app, user, sparse_url): project_one = ProjectFactory(title='Project One', is_public=True) ProjectFactory(parent=project_one, is_public=True) diff --git a/api_tests/users/serializers/test_serializers.py b/api_tests/users/serializers/test_serializers.py index f9e2dbb9cff..f0967890b06 100644 --- a/api_tests/users/serializers/test_serializers.py +++ b/api_tests/users/serializers/test_serializers.py @@ -9,7 +9,6 @@ PreprintFactory, ProjectFactory, InstitutionFactory, - OSFGroupFactory, ) from tests.utils import make_drf_request_with_version from django.utils import timezone @@ -92,16 +91,6 @@ def public_project(user): def deleted_project(user): return ProjectFactory(creator=user, is_deleted=True) -@pytest.fixture() -def group(user): - return OSFGroupFactory(creator=user, name='Platform') - -@pytest.fixture() -def group_project(group): - project = ProjectFactory() - project.add_osf_group(group) - return project - def pytest_generate_tests(metafunc): # called once per each test function @@ -210,9 +199,7 @@ def test_related_counts_equal_related_views(self, private_preprint, withdrawn_preprint, unpublished_preprint, # not in the view/related counts by default - deleted_preprint, - group, - group_project): + deleted_preprint): view_count = self.get_view_count(user, field_name, auth=user) related_count = self.get_related_count(user, field_name, auth=user) diff --git a/api_tests/users/views/test_user_list.py b/api_tests/users/views/test_user_list.py index 5649411c551..715ce328b42 100644 --- a/api_tests/users/views/test_user_list.py +++ b/api_tests/users/views/test_user_list.py @@ -14,7 +14,6 @@ from osf_tests.factories import ( AuthUserFactory, UserFactory, - OSFGroupFactory, ProjectFactory, ApiOAuth2ScopeFactory, RegistrationFactory, @@ -100,47 +99,6 @@ def test_find_no_user_in_users(self, app, user_one, user_two): assert user_one._id not in ids assert user_two._id not in ids - def test_more_than_one_projects_in_common(self, app, user_one, user_two): - group = OSFGroupFactory(creator=user_one) - group.make_member(user_two) - - project1 = ProjectFactory(creator=user_one) - project1.add_contributor( - contributor=user_two, - permissions=CREATOR_PERMISSIONS, - auth=Auth(user=user_one) - ) - project1.save() - project2 = ProjectFactory(creator=user_one) - project2.add_contributor( - contributor=user_two, - permissions=CREATOR_PERMISSIONS, - auth=Auth(user=user_one) - ) - project2.save() - - project3 = ProjectFactory() - project4 = ProjectFactory() - project3.add_osf_group(group) - project4.add_osf_group(group) - project4.is_deleted = True - project3.save() - project4.save() - - RegistrationFactory( - project=project1, - creator=user_one, - is_public=True) - - url = f'/{API_BASE}users/?show_projects_in_common=true' - res = app.get(url, auth=user_two.auth) - user_json = res.json['data'] - for user in user_json: - if user['id'] == user_two._id: - meta = user['relationships']['nodes']['links']['related']['meta'] - assert 'projects_in_common' in meta - assert meta['projects_in_common'] == 4 - def test_users_projects_in_common(self, app, user_one, user_two): user_one.fullname = 'hello' user_one.save() diff --git a/api_tests/users/views/test_user_nodes_list.py b/api_tests/users/views/test_user_nodes_list.py index 8d39119e387..99deaf40a1e 100644 --- a/api_tests/users/views/test_user_nodes_list.py +++ b/api_tests/users/views/test_user_nodes_list.py @@ -7,7 +7,6 @@ from osf_tests.factories import ( AuthUserFactory, CollectionFactory, - OSFGroupFactory, PreprintFactory, ProjectFactory, RegistrationFactory, @@ -175,24 +174,6 @@ def test_user_nodes( assert public_project_user_one._id == ids[1] assert private_project_user_one._id == ids[0] - # test_osf_group_member_node_shows_up_in_user_nodes - group_mem = AuthUserFactory() - url = f'/{API_BASE}users/{group_mem._id}/nodes/' - res = app.get(url, auth=group_mem.auth) - assert len(res.json['data']) == 0 - - group = OSFGroupFactory(creator=group_mem) - private_project_user_one.add_osf_group(group, permissions.READ) - res = app.get(url, auth=group_mem.auth) - assert len(res.json['data']) == 1 - - res = app.get(url, auth=user_one.auth) - assert len(res.json['data']) == 1 - - private_project_user_one.delete() - res = app.get(url, auth=user_one.auth) - assert len(res.json['data']) == 0 - @pytest.mark.django_db class TestUserNodesPreprintsFiltering: @@ -320,27 +301,6 @@ def test_current_user_permissions_filter(self, app, url, contrib, no_perm_node, res = app.get(f'{url}null', auth=contrib.auth, expect_errors=True) assert res.status_code == 400 - user2 = AuthUserFactory() - osf_group = OSFGroupFactory(creator=user2) - read_node.add_osf_group(osf_group, permissions.READ) - write_node.add_osf_group(osf_group, permissions.WRITE) - admin_node.add_osf_group(osf_group, permissions.ADMIN) - - # test filter group member read - res = app.get(f'{url}read', auth=user2.auth) - assert len(res.json['data']) == 3 - assert {read_node._id, write_node._id, admin_node._id} == {node['id'] for node in res.json['data']} - - # test filter group member write - res = app.get(f'{url}write', auth=user2.auth) - assert len(res.json['data']) == 2 - assert {admin_node._id, write_node._id} == {node['id'] for node in res.json['data']} - - # test filter group member admin - res = app.get(f'{url}admin', auth=user2.auth) - assert len(res.json['data']) == 1 - assert [admin_node._id] == [node['id'] for node in res.json['data']] - def test_filter_my_current_user_permissions_to_other_users_nodes(self, app, contrib, no_perm_node, read_node, write_node, admin_node): url = f'/{API_BASE}users/{contrib._id}/nodes/?filter[current_user_permissions]=' diff --git a/api_tests/users/views/test_user_osf_groups_list.py b/api_tests/users/views/test_user_osf_groups_list.py deleted file mode 100644 index 47c92e726d2..00000000000 --- a/api_tests/users/views/test_user_osf_groups_list.py +++ /dev/null @@ -1,119 +0,0 @@ -import pytest -from waffle.testutils import override_flag - -from api.base.settings.defaults import API_BASE -from osf_tests.factories import ( - AuthUserFactory, - OSFGroupFactory, -) -from osf.features import OSF_GROUPS - - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def osf_group(manager, member): - group = OSFGroupFactory(name='Platform Team', creator=manager) - group.make_member(member) - return group - -@pytest.fixture() -def second_osf_group(manager, member): - group = OSFGroupFactory(name='Interfaces Team', creator=manager) - return group - - -@pytest.mark.django_db -class TestUserGroupList: - - @pytest.fixture() - def manager_url(self, manager): - return f'/{API_BASE}users/{manager._id}/groups/' - - @pytest.fixture() - def member_url(self, member): - return f'/{API_BASE}users/{member._id}/groups/' - - def test_return_manager_groups(self, app, member, manager, user, osf_group, second_osf_group, manager_url): - with override_flag(OSF_GROUPS, active=True): - # test nonauthenticated - res = app.get(manager_url) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 0 - - # test authenticated user - res = app.get(manager_url, auth=user.auth) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 0 - - # test authenticated member - res = app.get(manager_url, auth=member.auth) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 1 - - # test authenticated manager - res = app.get(manager_url, auth=manager.auth) - assert res.status_code == 200 - ids = [group['id'] for group in res.json['data']] - assert len(ids) == 2 - assert osf_group._id in ids - assert second_osf_group._id in ids - - def test_groups_filter(self, app, member, manager, user, osf_group, second_osf_group, manager_url): - with override_flag(OSF_GROUPS, active=True): - res = app.get(manager_url + '?filter[name]=Platform', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - - res = app.get(manager_url + '?filter[name]=Apple', auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - res = app.get(manager_url + '?filter[bad_field]=Apple', auth=manager.auth, expect_errors=True) - assert res.status_code == 400 - - def test_return_member_groups(self, app, member, manager, user, osf_group, second_osf_group, member_url): - with override_flag(OSF_GROUPS, active=True): - # test nonauthenticated - res = app.get(member_url) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated user - res = app.get(member_url, auth=user.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 0 - - # test authenticated member - res = app.get(member_url, auth=member.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - - # test authenticated manager - res = app.get(member_url, auth=manager.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert data[0]['id'] == osf_group._id - assert data[0]['type'] == 'groups' - assert data[0]['attributes']['name'] == osf_group.name diff --git a/api_tests/users/views/test_user_registrations_list.py b/api_tests/users/views/test_user_registrations_list.py index 740d0263038..fd92a9dc057 100644 --- a/api_tests/users/views/test_user_registrations_list.py +++ b/api_tests/users/views/test_user_registrations_list.py @@ -9,9 +9,7 @@ CollectionFactory, ProjectFactory, RegistrationFactory, - OSFGroupFactory ) -from osf.utils import permissions from tests.base import ApiTestCase from website.views import find_bookmark_collection @@ -30,14 +28,6 @@ def user_one(self): def user_two(self): return AuthUserFactory() - @pytest.fixture() - def group_member(self): - return AuthUserFactory() - - @pytest.fixture() - def osf_group(self, group_member): - return OSFGroupFactory(creator=group_member) - @pytest.fixture() def project_public_user_one(self, user_one): return ProjectFactory( @@ -66,16 +56,6 @@ def project_private_user_two(self, user_two): is_public=False, creator=user_two) - @pytest.fixture() - def project_private_group_member(self, user_one, osf_group): - project = ProjectFactory( - title='Private Project Group Member', - is_public=False, - creator=user_one - ) - project.add_osf_group(osf_group, permissions.ADMIN) - return project - @pytest.fixture() def project_deleted_user_one(self, user_one): return CollectionFactory( diff --git a/framework/auth/oauth_scopes.py b/framework/auth/oauth_scopes.py index 796023228df..87be917e4de 100644 --- a/framework/auth/oauth_scopes.py +++ b/framework/auth/oauth_scopes.py @@ -57,9 +57,6 @@ class CoreScopes: NODE_CONTRIBUTORS_READ = 'nodes.contributors_read' NODE_CONTRIBUTORS_WRITE = 'nodes.contributors_write' - OSF_GROUPS_READ = 'osf_groups.groups_read' - OSF_GROUPS_WRITE = 'osf_groups.groups_write' - PREPRINT_CONTRIBUTORS_READ = 'preprints.contributors_read' PREPRINT_CONTRIBUTORS_WRITE = 'preprints.contributors_write' @@ -84,9 +81,6 @@ class CoreScopes: NODE_PREPRINTS_READ = 'node.preprints_read' NODE_PREPRINTS_WRITE = 'node.preprints_write' - NODE_OSF_GROUPS_READ = 'node.osf_groups_read' - NODE_OSF_GROUPS_WRITE = 'node.osf_groups_write' - PREPRINTS_READ = 'preprint.preprints_read' PREPRINTS_WRITE = 'preprint.preprints_write' @@ -239,10 +233,6 @@ class ComposedScopes: DRAFT_READ = (CoreScopes.NODE_DRAFT_REGISTRATIONS_READ, CoreScopes.DRAFT_REGISTRATIONS_READ, CoreScopes.DRAFT_CONTRIBUTORS_READ) DRAFT_WRITE = (CoreScopes.NODE_DRAFT_REGISTRATIONS_WRITE, CoreScopes.DRAFT_REGISTRATIONS_WRITE, CoreScopes.DRAFT_CONTRIBUTORS_WRITE) - # OSF Groups - GROUP_READ = (CoreScopes.OSF_GROUPS_READ, ) - GROUP_WRITE = (CoreScopes.OSF_GROUPS_WRITE, ) - # Identifier views IDENTIFIERS_READ = (CoreScopes.IDENTIFIERS_READ, ) IDENTIFIERS_WRITE = (CoreScopes.IDENTIFIERS_WRITE, ) @@ -287,11 +277,11 @@ class ComposedScopes: # Privileges relating to who can access a node (via contributors or registrations) NODE_ACCESS_READ = (CoreScopes.NODE_CONTRIBUTORS_READ, CoreScopes.NODE_REGISTRATIONS_READ, CoreScopes.NODE_VIEW_ONLY_LINKS_READ, CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_READ, - CoreScopes.NODE_REQUESTS_READ, CoreScopes.NODE_SETTINGS_READ, CoreScopes.NODE_OSF_GROUPS_READ) + CoreScopes.NODE_REQUESTS_READ, CoreScopes.NODE_SETTINGS_READ) NODE_ACCESS_WRITE = NODE_ACCESS_READ + \ (CoreScopes.NODE_CONTRIBUTORS_WRITE, CoreScopes.NODE_REGISTRATIONS_WRITE, CoreScopes.NODE_VIEW_ONLY_LINKS_WRITE, CoreScopes.REGISTRATION_VIEW_ONLY_LINKS_WRITE, - CoreScopes.NODE_REQUESTS_WRITE, CoreScopes.NODE_SETTINGS_WRITE, CoreScopes.NODE_OSF_GROUPS_WRITE) + CoreScopes.NODE_REQUESTS_WRITE, CoreScopes.NODE_SETTINGS_WRITE) # Privileges relating to who can access a preprint via contributors PREPRINT_ACCESS_READ = (CoreScopes.PREPRINT_CONTRIBUTORS_READ,) @@ -319,7 +309,6 @@ class ComposedScopes: + DRAFT_READ\ + REVIEWS_READ\ + PREPRINT_ALL_READ\ - + GROUP_READ\ + ( CoreScopes.CEDAR_METADATA_RECORD_READ, CoreScopes.MEETINGS_READ, @@ -341,7 +330,6 @@ class ComposedScopes: + DRAFT_WRITE\ + REVIEWS_WRITE\ + PREPRINT_ALL_WRITE\ - + GROUP_WRITE\ + TOKENS_WRITE\ + ( CoreScopes.CEDAR_METADATA_RECORD_WRITE, diff --git a/osf/admin.py b/osf/admin.py index 71c0ae8172b..b94c168c5be 100644 --- a/osf/admin.py +++ b/osf/admin.py @@ -24,18 +24,18 @@ class OSFUserAdmin(admin.ModelAdmin): def formfield_for_manytomany(self, db_field, request, **kwargs): """ - Restricts preprint/node/osfgroup django groups from showing up in the user's groups list in the admin app + Restricts preprint/node django groups from showing up in the user's groups list in the admin app """ if db_field.name == 'groups': - kwargs['queryset'] = Group.objects.exclude(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='osfgroup_') | Q(name__startswith='collections_')) + kwargs['queryset'] = Group.objects.exclude(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='collections_')) return super().formfield_for_manytomany(db_field, request, **kwargs) def save_related(self, request, form, formsets, change): """ - Since m2m fields overridden with new form data in admin app, preprint groups/node/osfgroup groups (which are now excluded from being selections) + Since m2m fields overridden with new form data in admin app, preprint groups/node groups (which are now excluded from being selections) are removed. Manually re-adds preprint/node groups after adding new groups in form. """ - groups_to_preserve = list(form.instance.groups.filter(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='osfgroup_') | Q(name__startswith='collections_'))) + groups_to_preserve = list(form.instance.groups.filter(Q(name__startswith='preprint_') | Q(name__startswith='node_') | Q(name__startswith='collections_'))) super().save_related(request, form, formsets, change) if 'groups' in form.cleaned_data: for group in groups_to_preserve: diff --git a/osf/migrations/0030_alter_osfgroupgroupobjectpermission_unique_together_and_more.py b/osf/migrations/0030_alter_osfgroupgroupobjectpermission_unique_together_and_more.py new file mode 100644 index 00000000000..d628d04b912 --- /dev/null +++ b/osf/migrations/0030_alter_osfgroupgroupobjectpermission_unique_together_and_more.py @@ -0,0 +1,65 @@ +# Generated by Django 4.2.15 on 2025-04-25 12:54 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0029_remove_abstractnode_keenio_read_key'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='osfgroupgroupobjectpermission', + unique_together=None, + ), + migrations.RemoveField( + model_name='osfgroupgroupobjectpermission', + name='content_object', + ), + migrations.RemoveField( + model_name='osfgroupgroupobjectpermission', + name='group', + ), + migrations.RemoveField( + model_name='osfgroupgroupobjectpermission', + name='permission', + ), + migrations.RemoveField( + model_name='osfgrouplog', + name='group', + ), + migrations.RemoveField( + model_name='osfgrouplog', + name='user', + ), + migrations.AlterUniqueTogether( + name='osfgroupuserobjectpermission', + unique_together=None, + ), + migrations.RemoveField( + model_name='osfgroupuserobjectpermission', + name='content_object', + ), + migrations.RemoveField( + model_name='osfgroupuserobjectpermission', + name='permission', + ), + migrations.RemoveField( + model_name='osfgroupuserobjectpermission', + name='user', + ), + migrations.DeleteModel( + name='OSFGroup', + ), + migrations.DeleteModel( + name='OSFGroupGroupObjectPermission', + ), + migrations.DeleteModel( + name='OSFGroupLog', + ), + migrations.DeleteModel( + name='OSFGroupUserObjectPermission', + ), + ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 275fd148b6c..0c65b67ee1a 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -68,8 +68,6 @@ ApiOAuth2PersonalToken, ApiOAuth2Scope, ) -from .osf_group import OSFGroup -from .osf_grouplog import OSFGroupLog from .outcome_artifacts import OutcomeArtifact from .outcomes import Outcome from .preprint import Preprint @@ -111,4 +109,3 @@ OSFUser, ) from .user_message import UserMessage - diff --git a/osf/models/node.py b/osf/models/node.py index 83d646cc717..7d196f238dc 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -26,7 +26,7 @@ GroupObjectPermissionBase, UserObjectPermissionBase, ) -from guardian.shortcuts import get_objects_for_user, get_groups_with_perms +from guardian.shortcuts import get_objects_for_user from framework import status from framework.auth import oauth_scopes @@ -800,50 +800,6 @@ def can_edit(self, auth=None, user=None): is_api_node = False return (user and self.has_permission(user, WRITE)) or is_api_node - def add_osf_group(self, group, permission=WRITE, auth=None): - if auth and not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Must be an admin to add an OSF Group.') - group.add_group_to_node(self, permission, auth) - - def update_osf_group(self, group, permission=WRITE, auth=None): - if auth and not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Must be an admin to add an OSF Group.') - group.update_group_permissions_to_node(self, permission, auth) - - def remove_osf_group(self, group, auth=None): - if auth and not (self.has_permission(auth.user, ADMIN) or group.has_permission(auth.user, 'manage')): - raise PermissionsError('Must be an admin or an OSF Group manager to remove an OSF Group.') - group.remove_group_from_node(self, auth) - - @property - def osf_groups(self): - """Returns a queryset of OSF Groups whose members have some permission to the node - """ - from .osf_group import OSFGroupGroupObjectPermission, OSFGroup - - member_groups = get_groups_with_perms(self).filter(name__icontains='osfgroup') - return OSFGroup.objects.filter( - id__in=OSFGroupGroupObjectPermission.objects.filter(group_id__in=member_groups).values_list( - 'content_object_id')) - - def get_osf_groups_with_perms(self, permission): - """Returns a queryset of OSF Groups whose members have the specified permission to the node - """ - from .osf_group import OSFGroup - from .node import NodeGroupObjectPermission - try: - perm_id = Permission.objects.get(codename=permission + '_node').id - except Permission.DoesNotExist: - raise ValueError('Specified permission does not exist.') - member_groups = NodeGroupObjectPermission.objects.filter( - permission_id=perm_id, content_object_id=self.id - ).filter( - group__name__icontains='osfgroup' - ).values_list( - 'group_id', flat=True - ) - return OSFGroup.objects.filter(osfgroupgroupobjectpermission__group_id__in=member_groups) - def get_logs_queryset(self, auth): return NodeLog.objects.filter( node_id=self.id, diff --git a/osf/models/osf_group.py b/osf/models/osf_group.py deleted file mode 100644 index a9a6b3b6f56..00000000000 --- a/osf/models/osf_group.py +++ /dev/null @@ -1,576 +0,0 @@ -import logging -import functools -from django.apps import apps -from django.core.exceptions import ValidationError -from django.db import models -from django.db.models.signals import post_save -from django.dispatch import receiver -from guardian.shortcuts import assign_perm, remove_perm, get_perms, get_objects_for_group, get_group_perms -from guardian.models import GroupObjectPermissionBase, UserObjectPermissionBase - -from framework.exceptions import PermissionsError -from framework.auth.core import get_user, Auth -from framework.sentry import log_exception -from osf.exceptions import BlockedEmailError -from .base import BaseModel, ObjectIDMixin -from .mixins import GuardianMixin, Loggable -from .node import Node -from .nodelog import NodeLog -from .user import OSFUser -from .osf_grouplog import OSFGroupLog -from .validators import validate_email -from osf.utils.permissions import ADMIN, READ_NODE, WRITE, MANAGER, MEMBER, MANAGE, reduce_permissions -from osf.utils import sanitize -from website.project import signals as project_signals -from website.osf_groups import signals as group_signals -from website.util import api_v2_url - -logger = logging.getLogger(__name__) - - -class OSFGroup(GuardianMixin, Loggable, ObjectIDMixin, BaseModel): - """ - OSFGroup model. When an OSFGroup is created, a manager and member Django group are created. - Managers belong to both manager and member groups. Members belong to the member group only. - - The OSFGroup's Django member group is given permissions to nodes, so all OSFGroup members - get the same permission to the node. - """ - - name = models.TextField(blank=False) - creator = models.ForeignKey(OSFUser, - db_index=True, - related_name='osfgroups_created', - on_delete=models.SET_NULL, - null=True, blank=True) - - groups = { - 'member': ('member_group',), - 'manager': ('manage_group',), - } - group_format = 'osfgroup_{self.id}_{group}' - - def __unicode__(self): - return f'OSFGroup_{self.id}_{self.name}' - - class Meta: - permissions = ( - ('view_group', 'Can view group details'), - ('member_group', 'Has group membership'), - ('manage_group', 'Can manage group membership'), - ) - - @property - def _primary_key(self): - return self._id - - @property - def manager_group(self): - """ - OSFGroup's Django manager group object - """ - return self.get_group(MANAGER) - - @property - def member_group(self): - """ - OSFGroup's Django member group object - """ - return self.get_group(MEMBER) - - @property - def managers(self): - # All users that belong to the OSF Group's manager group - return self.manager_group.user_set.all() - - @property - def members(self): - # All members/managers belonging to this OSFGroup - - # the member group has both members and managers - return self.member_group.user_set.all() - - @property - def members_only(self): - # Users that are truly members-only and not managers - return self.members.exclude(id__in=self.managers) - - @property - def nodes(self): - """ - Returns nodes that the OSF group has permission to - """ - return get_objects_for_group(self.member_group, READ_NODE, Node) - - @property - def absolute_api_v2_url(self): - path = f'/groups/{self._id}/' - return api_v2_url(path) - - @property - def url(self): - # TODO - front end hasn't been set up - return f'/{self._primary_key}/' - - def get_absolute_url(self): - return self.absolute_api_v2_url - - def is_member(self, user): - # Checking group membership instead of permissions, because unregistered - # members have no perms - return user in self.members - - def is_manager(self, user): - # Checking group membership instead of permissions, because unregistered - # members have no perms - return user in self.managers - - def _require_manager_permission(self, auth=None): - if auth and not self.has_permission(auth.user, MANAGE): - raise PermissionsError('Must be a group manager to modify group membership.') - - def _disabled_user_check(self, user): - if user.is_disabled: - raise ValueError('Deactivated users cannot be added to OSF Groups.') - - def _enforce_one_manager(self, user): - # Group must have at least one registered manager - if (len(self.managers) == 1 and self.managers[0] == user) or not self.managers.filter(is_registered=True).exclude(id=user.id): - raise ValueError('Group must have at least one manager.') - - def _get_node_group_perms(self, node, permission): - """ - Gets expanded permissions for a node. The expanded permissions can be used - to add to the member group. - - Raises error if permission is invalid. - """ - permissions = node.groups.get(permission) - if not permissions: - raise ValueError(f'{permission} is not a valid permission.') - return permissions - - def send_member_email(self, user, permission, auth=None): - group_signals.member_added.send(self, user=user, permission=permission, auth=auth) - - def make_member(self, user, auth=None): - """Add member or downgrade manager to member - - :param user: OSFUser object, intended member - :param auth: Auth object - """ - self._require_manager_permission(auth) - self._disabled_user_check(user) - adding_member = not self.is_member(user) - if user in self.members_only: - return False - - self.member_group.user_set.add(user) - if self.is_manager(user): - self._enforce_one_manager(user) - self.manager_group.user_set.remove(user) - self.add_role_updated_log(user, MEMBER, auth) - else: - self.add_log( - OSFGroupLog.MEMBER_ADDED, - params={ - 'group': self._id, - 'user': user._id, - }, - auth=auth) - self.update_search() - - if adding_member: - self.send_member_email(user, MEMBER, auth) - - def make_manager(self, user, auth=None): - """Add manager or upgrade member to manager - - :param user: OSFUser object, intended manager - :param auth: Auth object - """ - self._require_manager_permission(auth) - self._disabled_user_check(user) - adding_member = not self.is_member(user) - if self.is_manager(user): - return False - - if not self.is_member(user): - self.add_log( - OSFGroupLog.MANAGER_ADDED, - params={ - 'group': self._id, - 'user': user._id, - }, - auth=auth) - - else: - self.add_role_updated_log(user, MANAGER, auth) - self.manager_group.user_set.add(user) - self.member_group.user_set.add(user) - self.update_search() - - if adding_member: - self.send_member_email(user, MANAGER, auth) - - def add_unregistered_member(self, fullname, email, auth, role=MEMBER): - """Add unregistered member or manager to OSFGroup - - :param fullname: string, user fullname - :param email: email, user email - :param auth: Auth object - :param role: string, "member" or "manager", default is member - """ - OSFUser = apps.get_model('osf.OSFUser') - - try: - validate_email(email) - except BlockedEmailError: - raise ValidationError('Email address domain is blocked.') - - user = get_user(email=email) - if user: - if user.is_registered or self.is_member(user): - raise ValueError('User already exists.') - else: - user = OSFUser.create_unregistered(fullname=fullname, email=email) - user.add_unclaimed_record( - self, - referrer=auth.user, - given_name=fullname, - email=email, - ) - - if role == MANAGER: - self.make_manager(user, auth=auth) - else: - self.make_member(user, auth=auth) - - return user - - def replace_contributor(self, old, new): - """ - Replacing unregistered member with a verified user - - Using "replace_contributor" language to mimic Node model, so this can be called in - the same views using to claim accounts on nodes. - """ - if not self.is_member(old): - return False - - # Remove unclaimed record for the group - if self._id in old.unclaimed_records: - del old.unclaimed_records[self._id] - old.save() - - # For the manager and member Django group attached to the OSFGroup, - # add the new user to the group, and remove the old. This - # will give the new user the appropriate permissions to the OSFGroup - for group_name in self.groups.keys(): - if self.get_group(group_name).user_set.filter(id=old.id).exists(): - self.get_group(group_name).user_set.remove(old) - self.get_group(group_name).user_set.add(new) - - self.update_search() - return True - - def remove_member(self, user, auth=None): - """Remove member or manager - - :param user: OSFUser object, member/manager to remove - :param auth: Auth object - """ - if not (auth and user == auth.user): - self._require_manager_permission(auth) - - if not self.is_member(user): - return False - self._enforce_one_manager(user) - self.manager_group.user_set.remove(user) - self.member_group.user_set.remove(user) - - self.add_log( - OSFGroupLog.MEMBER_REMOVED, - params={ - 'group': self._id, - 'user': user._id, - }, - auth=auth) - - self.update_search() - - for node in self.nodes: - project_signals.contributor_removed.send(node, user=user) - node.disconnect_addons(user, auth) - - def set_group_name(self, name, auth=None): - """Set the name of the group. - - :param str new Name: The new osf group name - :param auth: Auth object - """ - self._require_manager_permission(auth) - new_name = sanitize.strip_html(name) - # Title hasn't changed after sanitzation, bail out - if self.name == new_name: - return False - old_name = self.name - self.name = new_name - - self.add_log( - OSFGroupLog.EDITED_NAME, - params={ - 'group': self._id, - 'name_original': old_name - }, - auth=auth) - self.update_search() - for node in self.nodes: - node.update_search() - - def add_group_to_node(self, node, permission=WRITE, auth=None): - """Gives the OSF Group permissions to the node. Called from node model. - - :param obj Node - :param str Highest permission to grant, 'read', 'write', or 'admin' - :param auth: Auth object - """ - self._require_manager_permission(auth) - - current_perm = self.get_permission_to_node(node) - if current_perm: - if current_perm == permission: - return False - # If group already has perms to node, update permissions instead - return self.update_group_permissions_to_node(node, permission, auth) - - permissions = self._get_node_group_perms(node, permission) - for perm in permissions: - assign_perm(perm, self.member_group, node) - - params = { - 'group': self._id, - 'node': node._id, - 'permission': permission - } - - self.add_log( - OSFGroupLog.NODE_CONNECTED, - params=params, - auth=auth) - - self.add_corresponding_node_log(node, NodeLog.GROUP_ADDED, params, auth) - node.update_search() - - for user in self.members: - group_signals.group_added_to_node.send(self, node=node, user=user, permission=permission, auth=auth) - - def update_group_permissions_to_node(self, node, permission=WRITE, auth=None): - """Updates the OSF Group permissions to the node. Called from node model. - - :param obj Node - :param str Highest permission to grant, 'read', 'write', or 'admin' - :param auth: Auth object - """ - if self.get_permission_to_node(node) == permission: - return False - permissions = self._get_node_group_perms(node, permission) - to_remove = set(get_perms(self.member_group, node)).difference(permissions) - for perm in to_remove: - remove_perm(perm, self.member_group, node) - for perm in permissions: - assign_perm(perm, self.member_group, node) - params = { - 'group': self._id, - 'node': node._id, - 'permission': permission - } - self.add_log( - OSFGroupLog.NODE_PERMS_UPDATED, - params=params, - auth=auth - ) - - self.add_corresponding_node_log(node, NodeLog.GROUP_UPDATED, params, auth) - - def remove_group_from_node(self, node, auth): - """Removes the OSFGroup from the node. Called from node model. - - :param obj Node - """ - if not self.get_permission_to_node(node): - return False - for perm in node.groups[ADMIN]: - remove_perm(perm, self.member_group, node) - params = { - 'group': self._id, - 'node': node._id, - } - self.add_log( - OSFGroupLog.NODE_DISCONNECTED, - params=params, - auth=auth) - - self.add_corresponding_node_log(node, NodeLog.GROUP_REMOVED, params, auth) - node.update_search() - - for user in self.members: - node.disconnect_addons(user, auth) - project_signals.contributor_removed.send(node, user=user) - - def get_permission_to_node(self, node): - """ - Returns the permission this OSF group has to the given node - - :param node: Node object - """ - perms = get_group_perms(self.member_group, node) - return reduce_permissions(perms) if perms else None - - def has_permission(self, user, permission): - """Returns whether the user has the given permission to the OSFGroup - :param user: Auth object - :param role: member/manange permission - :return Boolean - """ - if not user or user.is_anonymous: - return False - - # Using get_group_perms to get permissions that are inferred through - # group membership - not inherited from superuser status - return '{}_{}'.format(permission, 'group') in get_group_perms(user, self) - - def remove_group(self, auth=None): - """Removes the OSFGroup and associated manager and member django groups - :param auth: Auth object - """ - self._require_manager_permission(auth) - group_id = self._id - members = list(self.members.values_list('id', flat=True)) - nodes = self.nodes - - self.member_group.delete() - self.manager_group.delete() - self.delete() - self.update_search(deleted_id=group_id) - - for user in OSFUser.objects.filter(id__in=members): - for node in nodes: - node.disconnect_addons(user, auth) - params = { - 'group': group_id, - 'node': node._id, - } - self.add_corresponding_node_log(node, NodeLog.GROUP_REMOVED, params, auth) - project_signals.contributor_removed.send(node, user=user) - node.update_search() - - def save(self, *args, **kwargs): - first_save = not bool(self.pk) - ret = super().save(*args, **kwargs) - if first_save: - self.update_group_permissions() - self.make_manager(self.creator) - - return ret - - def add_role_updated_log(self, user, role, auth=None): - """Creates a log when role changes - :param auth: Auth object - """ - self.add_log( - OSFGroupLog.ROLE_UPDATED, - params={ - 'group': self._id, - 'new_role': role, - 'user': user._id, - }, - auth=auth) - - def add_corresponding_node_log(self, node, action, params, auth): - """ Used for logging OSFGroup-related action to nodes - for example, - adding a group to a node. - - :param node: Node object - :param action: string, Node log action - :param params: dict, log params - """ - node.add_log( - action=action, - params=params, - auth=auth, - save=True - ) - - def add_log(self, action, params, auth, log_date=None, save=True): - """Create OSFGroupLog - :param action: string, OSFGroup log action - :param params: dict, log params - """ - user = None - if auth: - user = auth.user - - log = OSFGroupLog( - action=action, user=user, - params=params, group=self - ) - - log.save() - - self._complete_add_log(log, action, user, save) - return log - - def update_search(self, deleted_id=None): - from website import search - - try: - search.search.update_group(self, bulk=False, async_update=True, deleted_id=deleted_id) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception(e) - - @classmethod - def bulk_update_search(cls, groups, index=None): - from website import search - try: - serialize = functools.partial(search.search.update_group, index=index, bulk=True, async_update=False) - search.search.bulk_update_nodes(serialize, groups, index=index) - except search.exceptions.SearchUnavailableError as e: - logger.exception(e) - log_exception(e) - - -@receiver(post_save, sender=OSFGroup) -def add_project_created_log(sender, instance, created, **kwargs): - if created: - log_action = OSFGroupLog.GROUP_CREATED - log_params = { - 'group': instance._id, - } - - instance.add_log( - log_action, - params=log_params, - auth=Auth(user=instance.creator), - log_date=instance.created, - save=True, - ) - - -class OSFGroupUserObjectPermission(UserObjectPermissionBase): - """ - Direct Foreign Key Table for guardian - User models - we typically add object - perms directly to Django groups instead of users, so this will be used infrequently - """ - content_object = models.ForeignKey(OSFGroup, on_delete=models.CASCADE) - - -class OSFGroupGroupObjectPermission(GroupObjectPermissionBase): - """ - Direct Foreign Key Table for guardian - Group models. Makes permission checks faster. - - This table gives a Django group a particular permission to an OSF Group. - (Every time an OSFGroup is created, a Django member group, and Django manager group are created. - The member group is given member perms, manager group has manager perms.) - """ - content_object = models.ForeignKey(OSFGroup, on_delete=models.CASCADE) diff --git a/osf/models/osf_grouplog.py b/osf/models/osf_grouplog.py deleted file mode 100644 index 38ef2ee3551..00000000000 --- a/osf/models/osf_grouplog.py +++ /dev/null @@ -1,52 +0,0 @@ -from django.db import models -from .base import BaseModel, ObjectIDMixin -from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField -from website.util import api_v2_url - - -class OSFGroupLog(ObjectIDMixin, BaseModel): - DATE_FORMAT = '%m/%d/%Y %H:%M UTC' - - GROUP_CREATED = 'group_created' - - MEMBER_ADDED = 'member_added' - MANAGER_ADDED = 'manager_added' - MEMBER_REMOVED = 'member_removed' - ROLE_UPDATED = 'role_updated' - EDITED_NAME = 'edit_name' - NODE_CONNECTED = 'node_connected' - NODE_PERMS_UPDATED = 'node_permissions_updated' - NODE_DISCONNECTED = 'node_disconnected' - - actions = ([GROUP_CREATED, MEMBER_ADDED, MANAGER_ADDED, MEMBER_REMOVED, ROLE_UPDATED, - EDITED_NAME, NODE_CONNECTED, NODE_PERMS_UPDATED, NODE_DISCONNECTED]) - - action_choices = [(action, action.upper()) for action in actions] - - action = models.CharField(max_length=255, db_index=True) - params = DateTimeAwareJSONField(default=dict) - should_hide = models.BooleanField(default=False) - user = models.ForeignKey('OSFUser', related_name='group_logs', db_index=True, - null=True, blank=True, on_delete=models.CASCADE) - group = models.ForeignKey('OSFGroup', related_name='logs', - db_index=True, null=True, blank=True, on_delete=models.CASCADE) - - def __unicode__(self): - return ('({self.action!r}, user={self.user!r}, group={self.group!r}, params={self.params!r}) ' - 'with id {self.id!r}').format(self=self) - - class Meta: - ordering = ['-created'] - get_latest_by = 'created' - - @property - def absolute_api_v2_url(self): - path = f'/logs/{self._id}/' - return api_v2_url(path) - - def get_absolute_url(self): - return self.absolute_api_v2_url - - @property - def absolute_url(self): - return self.absolute_api_v2_url diff --git a/osf/models/user.py b/osf/models/user.py index 97d444698fd..82c0a0d4790 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -14,7 +14,6 @@ import pytz from dirtyfields import DirtyFieldsMixin -from django.apps import apps from django.conf import settings from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager from django.contrib.auth.hashers import check_password @@ -24,7 +23,6 @@ from django.db.models import Count, Exists, OuterRef from django.db.models.signals import post_save from django.utils import timezone -from guardian.shortcuts import get_objects_for_user from framework import sentry from framework.auth import Auth, signals, utils @@ -57,7 +55,7 @@ from osf.utils.fields import NonNaiveDateTimeField, LowercaseEmailField, ensure_str from osf.utils.names import impute_names from osf.utils.requests import check_select_for_update -from osf.utils.permissions import API_CONTRIBUTOR_PERMISSIONS, MANAGER, MEMBER, MANAGE, ADMIN +from osf.utils.permissions import API_CONTRIBUTOR_PERMISSIONS, MANAGER, MEMBER, ADMIN from website import settings as website_settings from website import filters, mails from website.project import new_bookmark_collection @@ -644,14 +642,6 @@ def is_authenticated(self): # Needed for django compat def is_anonymous(self): return False - @property - def osf_groups(self): - """ - OSFGroups that the user belongs to - """ - OSFGroup = apps.get_model('osf.OSFGroup') - return get_objects_for_user(self, 'member_group', OSFGroup, with_superuser=False) - def is_institutional_admin_at(self, institution): """ Checks if user is admin of a specific institution. @@ -861,17 +851,7 @@ def merge_user(self, user): # Transfer user's draft registrations self._merge_user_draft_registrations(user) - # transfer group membership - for group in user.osf_groups: - if not group.is_manager(self): - if group.has_permission(user, MANAGE): - group.make_manager(self) - else: - group.make_member(self) - group.remove_member(user) - # finalize the merge - remove_sessions_for_user(user) # - username is set to the GUID so the merging user can set it primary @@ -1528,9 +1508,6 @@ def update_search_nodes(self): for node in self.contributor_to: node.update_search() - for group in self.osf_groups: - group.update_search() - def update_date_last_login(self, login_time=None): self.date_last_login = login_time or timezone.now() @@ -1677,7 +1654,6 @@ def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None, s """ from .provider import AbstractProvider - from .osf_group import OSFGroup if not skip_referrer_permissions: if isinstance(claim_origin, AbstractProvider): @@ -1685,12 +1661,6 @@ def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None, s raise PermissionsError( f'Referrer does not have permission to add a moderator to provider {claim_origin._id}' ) - - elif isinstance(claim_origin, OSFGroup): - if not claim_origin.has_permission(referrer, MANAGE): - raise PermissionsError( - f'Referrer does not have permission to add a member to {claim_origin._id}' - ) else: if not claim_origin.has_permission(referrer, ADMIN): raise PermissionsError( @@ -2029,9 +1999,6 @@ def gdpr_delete(self): hard_delete=True ) - # A Potentially out of date check that user isn't a member of a OSFGroup - self._validate_osf_groups() - # Finally delete the user's info. self._clear_identifying_information() @@ -2088,20 +2055,6 @@ def _validate_and_remove_resource_for_gdpr_delete(self, resources, hard_delete): logger.info(f'Soft-deleting {entity.__class__.__name__} (pk: {entity.pk})...') entity.remove_node(auth=Auth(self)) - def _validate_osf_groups(self): - """ - This method ensures a user isn't in an OSFGroup before deleting them.. - """ - for group in self.osf_groups: - if not group.managers.exclude(id=self.id).filter(is_registered=True).exists() and group.members.exclude( - id=self.id).exists(): - raise UserStateError( - f'You cannot delete this user because they are the only registered manager of OSFGroup {group._id} that contains other members.') - elif len(group.managers) == 1 and group.managers[0] == self: - group.remove_group() - else: - group.remove_member(self) - def _clear_identifying_information(self): ''' This method ensures a user's info is deleted during a GDPR delete @@ -2156,10 +2109,9 @@ def has_resources(self): from osf.models import Preprint nodes = self.nodes.filter(deleted__isnull=True).exists() - groups = self.osf_groups.exists() preprints = Preprint.objects.filter(_contributors=self, ever_public=True, deleted__isnull=True).exists() - return groups or nodes or preprints + return nodes or preprints class Meta: # custom permissions for use in the OSF Admin App diff --git a/osf_tests/factories.py b/osf_tests/factories.py index 7ad8885e1ad..1310c9aed63 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -374,15 +374,6 @@ def _create(cls, *args, **kwargs): return obj -class OSFGroupFactory(DjangoModelFactory): - name = factory.Faker('company') - created = factory.LazyFunction(timezone.now) - creator = factory.SubFactory(AuthUserFactory) - - class Meta: - model = models.OSFGroup - - class RegistrationFactory(BaseNodeFactory): creator = None diff --git a/osf_tests/test_analytics.py b/osf_tests/test_analytics.py index 14b6c05df51..2bcd1fd1d10 100644 --- a/osf_tests/test_analytics.py +++ b/osf_tests/test_analytics.py @@ -11,7 +11,7 @@ from addons.osfstorage.models import OsfStorageFile from framework import analytics -from osf.models import PageCounter, OSFGroup +from osf.models import PageCounter from tests.base import OsfTestCase from osf_tests.factories import UserFactory, ProjectFactory @@ -127,26 +127,6 @@ def test_download_update_counter_contributor(self, user, project, file_node): assert page_counter.total == 0 assert page_counter.unique == 0 - platform_group = OSFGroup.objects.create(creator=user, name='Platform') - group_member = UserFactory() - project.add_osf_group(platform_group) - - session['auth_user_id'] = group_member._id - session.save() - PageCounter.update_counter(resource, file_node, version=None, action='download', node_info={ - 'contributors': project.contributors_and_group_members}, session_key=session.session_key - ) - page_counter.refresh_from_db() - assert page_counter.total == 1 - assert page_counter.unique == 1 - - platform_group.make_member(group_member) - PageCounter.update_counter(resource, file_node, version=None, action='download', node_info={ - 'contributors': project.contributors_and_group_members}, session_key=session.session_key - ) - assert page_counter.total == 1 - assert page_counter.unique == 1 - def test_get_all_downloads_on_date(self, page_counter, page_counter2): """ This method tests that multiple pagecounter objects have their download totals summed properly. diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py index 878b12962d9..4b2c4a91acf 100644 --- a/osf_tests/test_comment.py +++ b/osf_tests/test_comment.py @@ -27,7 +27,6 @@ UserFactory, UnregUserFactory, AuthUserFactory, - OSFGroupFactory, ) # All tests will require a databse @@ -353,21 +352,6 @@ def test_edit(self): assert comment.node.logs.count() == 2 assert comment.node.logs.latest().action == NodeLog.COMMENT_UPDATED - def test_create_sends_mention_added_signal_if_group_member_mentions(self, node, user, auth): - manager = AuthUserFactory() - group = OSFGroupFactory(creator=manager) - node.add_osf_group(group) - assert node.is_contributor_or_group_member(manager) is True - with capture_signals() as mock_signals: - Comment.create( - auth=auth, - user=user, - node=node, - target=node.guids.all()[0], - content='This is a comment with a group member mention [@Group Member](http://localhost:5000/' + manager._id + '/).' - ) - assert mock_signals.signals_sent() == ({comment_added, mention_added}) - def test_delete(self, node): comment = CommentFactory(node=node) auth = Auth(comment.user) diff --git a/osf_tests/test_draft_registration.py b/osf_tests/test_draft_registration.py index f7beb3ceae8..4551d4d998b 100644 --- a/osf_tests/test_draft_registration.py +++ b/osf_tests/test_draft_registration.py @@ -68,17 +68,6 @@ def test_register(self): draft.register(auth) assert draft.registered_node - # group member with admin access cannot register - member = factories.AuthUserFactory() - osf_group = factories.OSFGroupFactory(creator=user) - osf_group.make_member(member, auth=auth) - project.add_osf_group(osf_group, ADMIN) - draft_2 = factories.DraftRegistrationFactory(branched_from=project) - assert project.has_permission(member, ADMIN) - with pytest.raises(PermissionsError): - draft_2.register(Auth(member)) - assert not draft_2.registered_node - @mock.patch('website.settings.ENABLE_ARCHIVER', False) def test_register_no_title_fails(self): user = factories.UserFactory() @@ -198,9 +187,7 @@ def test_create_from_node_existing(self, user): node = factories.ProjectFactory(creator=user) member = factories.AuthUserFactory() - osf_group = factories.OSFGroupFactory(creator=user) - osf_group.make_member(member, auth=Auth(user)) - node.add_osf_group(osf_group, ADMIN) + node.add_contributor(member, permissions=ADMIN) write_contrib = factories.AuthUserFactory() subject = factories.SubjectFactory() diff --git a/osf_tests/test_elastic_search.py b/osf_tests/test_elastic_search.py index 56c42391095..396e0d6b2aa 100644 --- a/osf_tests/test_elastic_search.py +++ b/osf_tests/test_elastic_search.py @@ -15,7 +15,6 @@ from osf.models import ( Retraction, NodeLicense, - OSFGroup, Tag, Preprint, ) @@ -443,77 +442,6 @@ def test_make_public(self): assert len(docs) == 1 -@pytest.mark.enable_search -@pytest.mark.enable_enqueue_task -class TestOSFGroup(OsfTestCase): - - def setUp(self): - with run_celery_tasks(): - super().setUp() - search.delete_index(elastic_search.INDEX) - search.create_index(elastic_search.INDEX) - self.user = factories.UserFactory(fullname='John Deacon') - self.user_two = factories.UserFactory(fullname='Grapes McGee') - self.group = OSFGroup( - name='Cornbread', - creator=self.user, - ) - self.group.save() - self.project = factories.ProjectFactory(is_public=True, creator=self.user, title='Biscuits') - self.project.save() - - def test_create_osf_group(self): - title = 'Butter' - group = OSFGroup(name=title, creator=self.user) - group.save() - docs = query(title)['results'] - assert len(docs) == 1 - - def test_set_group_name(self): - title = 'Eggs' - self.group.set_group_name(title) - self.group.save() - docs = query(title)['results'] - assert len(docs) == 1 - - docs = query('Cornbread')['results'] - assert len(docs) == 0 - - def test_add_member(self): - self.group.make_member(self.user_two) - docs = query(f'category:group AND "{self.user_two.fullname}"')['results'] - assert len(docs) == 1 - - self.group.make_manager(self.user_two) - docs = query(f'category:group AND "{self.user_two.fullname}"')['results'] - assert len(docs) == 1 - - self.group.remove_member(self.user_two) - docs = query(f'category:group AND "{self.user_two.fullname}"')['results'] - assert len(docs) == 0 - - def test_connect_to_node(self): - self.project.add_osf_group(self.group) - docs = query(f'category:project AND "{self.group.name}"')['results'] - assert len(docs) == 1 - - self.project.remove_osf_group(self.group) - docs = query(f'category:project AND "{self.group.name}"')['results'] - assert len(docs) == 0 - - def test_remove_group(self): - group_name = self.group.name - self.project.add_osf_group(self.group) - docs = query(f'category:project AND "{group_name}"')['results'] - assert len(docs) == 1 - - self.group.remove_group() - docs = query(f'category:project AND "{group_name}"')['results'] - assert len(docs) == 0 - docs = query(group_name)['results'] - assert len(docs) == 0 - - @pytest.mark.enable_search @pytest.mark.enable_enqueue_task class TestPreprint(OsfTestCase): diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index 4fcd6e542cf..a8c2245b9a7 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -60,7 +60,6 @@ InstitutionFactory, SubjectFactory, TagFactory, - OSFGroupFactory, CollectionFactory, CollectionProviderFactory, ) @@ -891,23 +890,6 @@ def test_add_contributor(self, node, user, auth): assert user2 in user.recently_added.all() - def test_add_contributor_already_group_member(self, node, user, auth): - group = OSFGroupFactory(creator=user) - user2 = UserFactory() - group.make_member(user2) - node.add_osf_group(group, permissions.ADMIN) - - assert node.is_contributor_or_group_member(user2) is True - assert node.is_contributor(user2) is False - assert node.has_permission(user2, permissions.ADMIN) - - node.add_contributor(contributor=user2, auth=auth) - node.save() - assert node.is_contributor(user2) is True - assert node.has_permission(user2, permissions.ADMIN) - # Even though user2 has admin perms, they don't have it through admin contributorship - assert node.is_admin_contributor(user2) is False - def test_add_contributors(self, node, auth): user1 = UserFactory() user2 = UserFactory() @@ -981,11 +963,6 @@ def test_is_contributor(self, node): assert node.is_contributor(noncontrib) is False assert node.is_contributor(None) is False - group = OSFGroupFactory(creator=noncontrib) - node.add_osf_group(group, permissions.READ) - assert node.is_contributor(noncontrib) is False - assert node.is_contributor_or_group_member(noncontrib) is True - superuser = AuthUserFactory() superuser.is_superuser = True superuser.save() @@ -1003,11 +980,6 @@ def test_is_admin_contributor(self, node): node.set_permissions(contrib, WRITE) - group = OSFGroupFactory(creator=contrib) - node.add_osf_group(group, permissions.ADMIN) - assert node.has_permission(contrib, permissions.ADMIN) is True - assert node.is_admin_contributor(contrib) is False - def test_visible_contributor_ids(self, node, user): visible_contrib = UserFactory() invisible_contrib = UserFactory() @@ -1065,14 +1037,6 @@ def test_set_visible_missing(self, node): with pytest.raises(ValueError): node.set_visible(UserFactory(), True) - def test_set_visible_group_member(self, node, user): - user2 = AuthUserFactory() - group = OSFGroupFactory(creator=user2) - node.add_osf_group(group, permissions.ADMIN) - - with pytest.raises(ValueError): - node.set_visible(user2, True) - def test_copy_contributors_from_adds_contributors(self, node): contrib, contrib2 = UserFactory(), UserFactory() node.add_contributor(contrib, visible=True) @@ -1100,15 +1064,12 @@ def test_copy_contributors_from_preserves_visibility(self, node): def test_copy_contributors_from_preserves_permissions(self, node): read, admin = UserFactory(), UserFactory() - group = OSFGroupFactory(creator=read) node.add_contributor(read, permissions.READ, visible=True) node.add_contributor(admin, permissions.ADMIN, visible=False) - node.add_osf_group(group, permissions.WRITE) node2 = NodeFactory() node2.copy_contributors_from(node) assert node2.has_permission(read, permissions.READ) is True - assert node2.has_permission(read, permissions.WRITE) is False assert node2.has_permission(admin, permissions.ADMIN) is True def test_remove_contributor(self, node, auth): @@ -1126,19 +1087,6 @@ def test_remove_contributor(self, node, auth): assert node.logs.latest().action == 'contributor_removed' assert node.logs.latest().params['contributors'] == [user2._id] - def test_remove_contributor_admin_group_members(self, node, user, auth): - user2 = UserFactory() - group = OSFGroupFactory(creator=user2) - node.add_osf_group(group, permissions.ADMIN) - assert node.has_permission(user2, permissions.ADMIN) is True - - removed = node.remove_contributor(contributor=user, auth=auth) - assert removed is False - # Contributor could not be removed even though there was another - # user with admin perms - group membership insufficient - assert node.has_permission(user, permissions.ADMIN) is True - assert node.is_contributor(user) is True - def test_remove_contributors(self, node, auth): user1 = UserFactory() user2 = UserFactory() @@ -1342,12 +1290,6 @@ def test_parent_admin_contributors(self, user): child_two = ProjectFactory(parent=project, creator=user_two) assert child_two.parent_admin_contributors.count() == 1 - user_three = UserFactory() - group = OSFGroupFactory(name='Platform', creator=user_three) - project.add_osf_group(group, permissions.ADMIN) - assert child_two.parent_admin_contributors.count() == 1 - assert child_two.parent_admin_users.count() == 2 - def test_admin_contributor_or_group_member_ids(self, user): project = ProjectFactory(creator=user) assert project.admin_contributor_or_group_member_ids == {user._id} @@ -1362,21 +1304,6 @@ def test_admin_contributor_or_group_member_ids(self, user): assert child1.admin_contributor_or_group_member_ids == {child1.creator._id, admin._id} assert child2.admin_contributor_or_group_member_ids == {child2.creator._id, child1.creator._id, admin._id} - # OSFGroup added with write perms - group_member = UserFactory() - group = OSFGroupFactory(creator=group_member) - project.add_osf_group(group, permissions.WRITE) - project.save() - assert child1.admin_contributor_or_group_member_ids == {child1.creator._id, admin._id} - assert child2.admin_contributor_or_group_member_ids == {child2.creator._id, child1.creator._id, admin._id} - - # OSFGroup updated to admin perms - project.update_osf_group(group, permissions.ADMIN) - project.save() - assert child1.admin_contributor_or_group_member_ids == {child1.creator._id, admin._id, group_member._id} - assert child2.admin_contributor_or_group_member_ids == {child2.creator._id, child1.creator._id, admin._id, group_member._id} - - class TestContributorAddedSignal: # Override disconnected signals from conftest @@ -1546,11 +1473,6 @@ def test_set_permissions(self, node, user): ) node.save() - with pytest.raises(NodeStateError): - node.set_permissions(user, WRITE) - - group = OSFGroupFactory(creator=user) - node.add_osf_group(group, ADMIN) with pytest.raises(NodeStateError): node.set_permissions(user, WRITE) @@ -1559,20 +1481,6 @@ def test_set_permissions(self, node, user): assert node.has_permission(high, permissions.WRITE) is True assert node.has_permission(high, permissions.ADMIN) is True - def test_set_permissions_raises_error_if_only_admins_permissions_are_reduced(self, node): - # creator is the only admin - with pytest.raises(NodeStateError) as excinfo: - node.set_permissions(node.creator, permissions=WRITE) - assert excinfo.value.args[0] == 'Must have at least one registered admin contributor' - - new_user = AuthUserFactory() - osf_group = OSFGroupFactory(creator=new_user) - node.add_osf_group(osf_group, permissions.ADMIN) - # A group member being added as a contributor doesn't throw any errors, even if that - # group member is being downgraded to write. Group members don't count towards - # the one registered admin contributor tally - node.set_permissions(new_user, permissions.WRITE) - def test_add_permission_with_admin_also_grants_read_and_write(self, node): user = UserFactory() Contributor.objects.create( @@ -1936,24 +1844,6 @@ def test_register_node_copies_contributors_from_draft_registration(self, mock_si assert registration.has_permission(draft_reg_user, permissions.WRITE) is True assert registration.has_permission(node_user, permissions.WRITE) is False - @mock.patch('website.project.signals.after_create_registration') - def test_register_node_does_not_copy_group_members(self, mock_signal): - user = UserFactory() - node = NodeFactory(creator=user) - - group_mem = UserFactory() - group = OSFGroupFactory(creator=group_mem) - node.add_osf_group(group, permissions.READ) - node.save() - - assert node.has_permission(group_mem, permissions.READ) is True - - draft_reg = DraftRegistrationFactory(branched_from=node) - registration = node.register_node(get_default_metaschema(), Auth(user), draft_reg, None) - - assert registration.has_permission(user, permissions.ADMIN) is True - assert registration.has_permission(group_mem, permissions.READ) is False - @mock.patch('website.project.signals.after_create_registration') def test_register_node_makes_private_registration(self, mock_signal): user = UserFactory() @@ -2152,25 +2042,6 @@ def test_add_unregistered_raises_error_if_user_is_registered(self, node, auth): auth=auth ) - def test_add_unregistered_contributor_already_group_member(self, node, user, auth): - given_name = 'Grapes McGee' - username = 'fake@cos.io' - group = OSFGroupFactory(creator=user) - unreg_user = group.add_unregistered_member(given_name, username, auth=Auth(user)) - assert unreg_user.get_unclaimed_record(group._id)['email'] == username - - node.add_osf_group(group, permissions.ADMIN) - - node.add_unregistered_contributor( - email=username, - fullname=given_name, - auth=auth - ) - node.save - unreg_user.reload() - unclaimed_data = unreg_user.get_unclaimed_record(node._primary_key) - assert unclaimed_data['email'] == username - def test_find_by_institutions(): inst1, inst2 = InstitutionFactory(), InstitutionFactory() project = ProjectFactory(is_public=True) @@ -2203,11 +2074,6 @@ def test_can_comment(): noncontrib = UserFactory() assert private_node.can_comment(Auth(noncontrib)) is False - group_mem = UserFactory() - group = OSFGroupFactory(creator=group_mem) - private_node.add_osf_group(group, permissions.READ) - assert private_node.can_comment(Auth(group_mem)) is True - def test_parent_kwarg(): parent = NodeFactory() child = NodeFactory(parent=parent) @@ -2685,46 +2551,22 @@ def test_manage_contributors_no_admins(self, node, auth): users, auth=auth, save=True, ) - def test_manage_contributors_no_registered_admins(self, node, auth): - unregistered = UnregUserFactory() - node.add_unregistered_contributor( - unregistered.fullname, - unregistered.email, - auth=Auth(node.creator), - permissions=ADMIN, - existing_user=unregistered - ) - users = [ - {'id': node.creator._id, 'permission': READ, 'visible': True}, - {'id': unregistered._id, 'permission': ADMIN, 'visible': True}, - ] - - group = OSFGroupFactory(creator=node.creator) - node.add_osf_group(group, permissions.ADMIN) - with pytest.raises(NodeStateError): - node.manage_contributors( - users, auth=auth, save=True, - ) - def test_get_admin_contributors(self, user, auth): read, write, admin = UserFactory(), UserFactory(), UserFactory() nonactive_admin = UserFactory() noncontrib = UserFactory() - group_member = UserFactory() - group = OSFGroupFactory(creator=group_member) project = ProjectFactory(creator=user) project.add_contributor(read, auth=auth, permissions=READ) project.add_contributor(write, auth=auth, permissions=WRITE) project.add_contributor(admin, auth=auth, permissions=ADMIN) project.add_contributor(nonactive_admin, auth=auth, permissions=ADMIN) - project.add_osf_group(group, permissions.ADMIN) project.save() nonactive_admin.is_disabled = True nonactive_admin.save() result = list(project.get_admin_contributors([ - read, write, admin, noncontrib, nonactive_admin, group_member + read, write, admin, noncontrib, nonactive_admin ])) assert admin in result @@ -2732,7 +2574,6 @@ def test_get_admin_contributors(self, user, auth): assert write not in result assert noncontrib not in result assert nonactive_admin not in result - assert group_member not in result # copied from tests/test_models.py class TestNodeTraversals: diff --git a/osf_tests/test_osfgroup.py b/osf_tests/test_osfgroup.py deleted file mode 100644 index 722cffd7bc7..00000000000 --- a/osf_tests/test_osfgroup.py +++ /dev/null @@ -1,1124 +0,0 @@ -from unittest import mock -import pytest -import time -from django.contrib.auth.models import Group -from django.core.exceptions import ValidationError - -from addons.github.tests import factories -from addons.osfstorage.models import OsfStorageFile -from framework.auth import Auth -from django.contrib.auth.models import AnonymousUser -from django.contrib.contenttypes.models import ContentType -from framework.exceptions import PermissionsError -from osf.models import OSFGroup, Node, OSFUser, OSFGroupLog, NodeLog -from osf.utils.permissions import MANAGER, MEMBER, MANAGE, READ, WRITE, ADMIN -from website.notifications.utils import get_all_node_subscriptions -from website.osf_groups import signals as group_signals -from .factories import ( - NodeFactory, - ProjectFactory, - AuthUserFactory, - OSFGroupFactory -) - -pytestmark = pytest.mark.django_db - -@pytest.fixture() -def manager(): - return AuthUserFactory() - -@pytest.fixture() -def member(): - return AuthUserFactory() - -@pytest.fixture() -def user(): - return AuthUserFactory() - -@pytest.fixture() -def user_two(): - return AuthUserFactory() - -@pytest.fixture() -def user_three(): - return AuthUserFactory() - -@pytest.fixture() -def auth(manager): - return Auth(manager) - -@pytest.fixture() -def project(manager): - return ProjectFactory(creator=manager) - -@pytest.fixture() -def osf_group(manager, member): - osf_group = OSFGroupFactory(creator=manager) - osf_group.make_member(member) - return osf_group - -class TestOSFGroup: - - def test_osf_group_creation(self, manager, member, user_two, fake): - osf_group = OSFGroup.objects.create(name=fake.bs(), creator=manager) - # OSFGroup creator given manage permissions - assert osf_group.has_permission(manager, MANAGE) is True - assert osf_group.has_permission(user_two, MANAGE) is False - - assert manager in osf_group.managers - assert manager in osf_group.members - assert manager not in osf_group.members_only - - user_two.is_superuser = True - user_two.save() - - # Superusers don't have permission to group - assert osf_group.has_permission(user_two, MEMBER) is False - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_make_manager(self, mock_send_mail, manager, member, user_two, user_three, osf_group): - # no permissions - with pytest.raises(PermissionsError): - osf_group.make_manager(user_two, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.make_manager(user_two, Auth(member)) - - # manage permissions - osf_group.make_manager(user_two, Auth(manager)) - assert osf_group.has_permission(user_two, MANAGE) is True - assert user_two in osf_group.managers - assert user_two in osf_group.members - assert mock_send_mail.call_count == 1 - - # upgrade to manager - osf_group.make_manager(member, Auth(manager)) - assert osf_group.has_permission(member, MANAGE) is True - assert member in osf_group.managers - assert member in osf_group.members - # upgrading an existing member does not re-send an email - assert mock_send_mail.call_count == 1 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_make_member(self, mock_send_mail, manager, member, user_two, user_three, osf_group): - # no permissions - with pytest.raises(PermissionsError): - osf_group.make_member(user_two, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.make_member(user_two, Auth(member)) - - # manage permissions - osf_group.make_member(user_two, Auth(manager)) - assert osf_group.has_permission(user_two, MANAGE) is False - assert user_two not in osf_group.managers - assert user_two in osf_group.members - assert mock_send_mail.call_count == 1 - - # downgrade to member, sole manager - with pytest.raises(ValueError): - osf_group.make_member(manager, Auth(manager)) - - # downgrade to member - osf_group.make_manager(user_two, Auth(manager)) - assert user_two in osf_group.managers - assert user_two in osf_group.members - osf_group.make_member(user_two, Auth(manager)) - assert user_two not in osf_group.managers - assert user_two in osf_group.members - assert mock_send_mail.call_count == 1 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_unregistered_member(self, mock_send_mail, manager, member, osf_group, user_two): - test_fullname = 'Test User' - test_email = 'test_member@cos.io' - test_manager_email = 'test_manager@cos.io' - - # Email already exists - with pytest.raises(ValueError): - osf_group.add_unregistered_member(test_fullname, user_two.username, auth=Auth(manager)) - - # Test need manager perms to add - with pytest.raises(PermissionsError): - osf_group.add_unregistered_member(test_fullname, test_email, auth=Auth(member)) - - # Add member - osf_group.add_unregistered_member(test_fullname, test_email, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - unreg_user = OSFUser.objects.get(username=test_email) - assert unreg_user in osf_group.members - assert unreg_user not in osf_group.managers - assert osf_group.has_permission(unreg_user, MEMBER) is True - assert osf_group._id in unreg_user.unclaimed_records - - # Attempt to add unreg user as a member - with pytest.raises(ValueError): - osf_group.add_unregistered_member(test_fullname, test_email, auth=Auth(manager)) - - # Add unregistered manager - osf_group.add_unregistered_member(test_fullname, test_manager_email, auth=Auth(manager), role=MANAGER) - assert mock_send_mail.call_count == 2 - unreg_manager = OSFUser.objects.get(username=test_manager_email) - assert unreg_manager in osf_group.members - assert unreg_manager in osf_group.managers - assert osf_group.has_permission(unreg_manager, MEMBER) is True - assert osf_group._id in unreg_manager.unclaimed_records - - # Add unregistered member with blocked email - with pytest.raises(ValidationError): - osf_group.add_unregistered_member(test_fullname, 'test@example.com', auth=Auth(manager), role=MANAGER) - - def test_remove_member(self, manager, member, user_three, osf_group): - new_member = AuthUserFactory() - osf_group.make_member(new_member) - assert new_member not in osf_group.managers - assert new_member in osf_group.members - - # no permissions - with pytest.raises(PermissionsError): - osf_group.remove_member(new_member, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.remove_member(new_member, Auth(member)) - - # manage permissions - osf_group.remove_member(new_member, Auth(manager)) - assert new_member not in osf_group.managers - assert new_member not in osf_group.members - - # Remove self - member can remove themselves - osf_group.remove_member(member, Auth(member)) - assert member not in osf_group.managers - assert member not in osf_group.members - - def test_remove_manager(self, manager, member, user_three, osf_group): - new_manager = AuthUserFactory() - osf_group.make_manager(new_manager) - # no permissions - with pytest.raises(PermissionsError): - osf_group.remove_member(new_manager, Auth(user_three)) - - # member only - with pytest.raises(PermissionsError): - osf_group.remove_member(new_manager, Auth(member)) - - # manage permissions - osf_group.remove_member(new_manager, Auth(manager)) - assert new_manager not in osf_group.managers - assert new_manager not in osf_group.members - - # can't remove last manager - with pytest.raises(ValueError): - osf_group.remove_member(manager, Auth(manager)) - assert manager in osf_group.managers - assert manager in osf_group.members - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_notify_group_member_email_does_not_send_before_throttle_expires(self, mock_send_mail, manager, osf_group): - member = AuthUserFactory() - assert member.member_added_email_records == {} - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - - record = member.member_added_email_records[osf_group._id] - assert record is not None - # 2nd call does not send email because throttle period has not expired - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager)) - assert member.member_added_email_records[osf_group._id] == record - assert mock_send_mail.call_count == 1 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_notify_group_member_email_sends_after_throttle_expires(self, mock_send_mail, osf_group, member, manager): - throttle = 0.5 - - member = AuthUserFactory() - assert member.member_added_email_records == {} - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 1 - - time.sleep(1) # throttle period expires - # 2nd call does not send email because throttle period has not expired - assert member.member_added_email_records[osf_group._id] is not None - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 2 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_notify_group_unregistered_member_throttle(self, mock_send_mail, osf_group, member, manager): - throttle = 0.5 - - member = AuthUserFactory() - member.is_registered = False - member.add_unclaimed_record(osf_group, referrer=manager, given_name='grapes mcgee', email='grapes@cos.io') - member.save() - assert member.member_added_email_records == {} - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 1 - - assert member.member_added_email_records[osf_group._id] is not None - # 2nd call does not send email because throttle period has not expired - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - - time.sleep(1) # throttle period expires - # 2nd call does not send email because throttle period has not expired - assert member.member_added_email_records[osf_group._id] is not None - group_signals.member_added.send(osf_group, user=member, permission=WRITE, auth=Auth(manager), throttle=throttle) - assert mock_send_mail.call_count == 2 - - def test_rename_osf_group(self, manager, member, user_two, osf_group): - new_name = 'Platform Team' - # no permissions - with pytest.raises(PermissionsError): - osf_group.set_group_name(new_name, Auth(user_two)) - - # member only - with pytest.raises(PermissionsError): - osf_group.set_group_name(new_name, Auth(member)) - - # manage permissions - osf_group.set_group_name(new_name, Auth(manager)) - osf_group.save() - - assert osf_group.name == new_name - - def test_remove_group(self, manager, member, osf_group): - osf_group_name = osf_group.name - manager_group_name = osf_group.manager_group.name - member_group_name = osf_group.member_group.name - - osf_group.remove_group(Auth(manager)) - assert not OSFGroup.objects.filter(name=osf_group_name).exists() - assert not Group.objects.filter(name=manager_group_name).exists() - assert not Group.objects.filter(name=member_group_name).exists() - - assert manager_group_name not in manager.groups.values_list('name', flat=True) - - def test_remove_group_node_perms(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, ADMIN) - assert project.has_permission(member, ADMIN) is True - - osf_group.remove_group(Auth(manager)) - - assert project.has_permission(member, ADMIN) is False - - def test_user_groups_property(self, manager, member, osf_group): - assert osf_group in manager.osf_groups - assert osf_group in member.osf_groups - - other_group = OSFGroupFactory() - - assert other_group not in manager.osf_groups - assert other_group not in member.osf_groups - - def test_user_group_roles(self, manager, member, user_three, osf_group): - assert manager.group_role(osf_group) == MANAGER - assert member.group_role(osf_group) == MEMBER - assert user_three.group_role(osf_group) is None - - def test_replace_contributor(self, manager, member, osf_group): - user = osf_group.add_unregistered_member('test_user', 'test@cos.io', auth=Auth(manager)) - assert user in osf_group.members - assert user not in osf_group.managers - assert ( - osf_group._id in - user.unclaimed_records.keys() - ) - osf_group.replace_contributor(user, member) - assert user not in osf_group.members - assert user not in osf_group.managers - assert osf_group.has_permission(member, MEMBER) is True - assert osf_group.has_permission(user, MEMBER) is False - - # test unclaimed_records is removed - assert ( - osf_group._id not in - user.unclaimed_records.keys() - ) - - def test_get_users_with_perm_osf_groups(self, project, manager, member, osf_group): - # Explicitly added as a contributor - read_users = project.get_users_with_perm(READ) - write_users = project.get_users_with_perm(WRITE) - admin_users = project.get_users_with_perm(ADMIN) - assert len(project.get_users_with_perm(READ)) == 1 - assert len(project.get_users_with_perm(WRITE)) == 1 - assert len(project.get_users_with_perm(ADMIN)) == 1 - assert manager in read_users - assert manager in write_users - assert manager in admin_users - - # Added through osf groups - project.add_osf_group(osf_group, WRITE) - read_users = project.get_users_with_perm(READ) - write_users = project.get_users_with_perm(WRITE) - admin_users = project.get_users_with_perm(ADMIN) - assert len(project.get_users_with_perm(READ)) == 2 - assert len(project.get_users_with_perm(WRITE)) == 2 - assert len(project.get_users_with_perm(ADMIN)) == 1 - assert member in read_users - assert member in write_users - assert member not in admin_users - - def test_merge_users_transfers_group_membership(self, member, manager, osf_group): - # merge member - other_user = AuthUserFactory() - other_user.merge_user(member) - other_user.save() - assert osf_group.is_member(other_user) - - # merge manager - other_other_user = AuthUserFactory() - other_other_user.merge_user(manager) - other_other_user.save() - assert osf_group.is_member(other_other_user) - assert osf_group.has_permission(other_other_user, MANAGE) - - def test_merge_users_already_group_manager(self, member, manager, osf_group): - # merge users - both users have group membership - different roles - manager.merge_user(member) - manager.save() - assert osf_group.has_permission(manager, MANAGE) - assert osf_group.is_member(member) is False - - def test_osf_group_is_admin_parent(self, project, manager, member, osf_group, user_two, user_three): - child = NodeFactory(parent=project, creator=manager) - assert project.is_admin_parent(manager) is True - assert project.is_admin_parent(member) is False - - project.add_contributor(user_two, WRITE, save=True) - assert project.is_admin_parent(user_two) is False - - assert child.is_admin_parent(manager) is True - child.add_contributor(user_two, ADMIN, save=True) - assert child.is_admin_parent(user_two) is True - - assert child.is_admin_parent(user_three) is False - osf_group.make_member(user_three) - project.add_osf_group(osf_group, WRITE) - assert child.is_admin_parent(user_three) is False - - project.update_osf_group(osf_group, ADMIN) - assert child.is_admin_parent(user_three) is True - assert child.is_admin_parent(user_three, include_group_admin=False) is False - project.remove_osf_group(osf_group) - - child.add_osf_group(osf_group, WRITE) - assert child.is_admin_parent(user_three) is False - child.update_osf_group(osf_group, ADMIN) - assert child.is_admin_parent(user_three) is True - assert child.is_admin_parent(user_three, include_group_admin=False) is False - - -class TestNodeGroups: - def test_node_contributors_and_group_members(self, manager, member, osf_group, project, user, user_two): - assert project.contributors_and_group_members.count() == 1 - project.add_osf_group(osf_group, ADMIN) - assert project.contributors_and_group_members.count() == 2 - project.add_contributor(user, WRITE) - project.add_contributor(user_two, READ) - project.save() - assert project.contributors_and_group_members.count() == 4 - - def test_add_osf_group_to_node_already_connected(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, ADMIN) - assert project.has_permission(member, ADMIN) is True - - project.add_osf_group(osf_group, WRITE) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - - def test_osf_group_nodes(self, manager, member, project, osf_group): - nodes = osf_group.nodes - assert len(nodes) == 0 - project.add_osf_group(osf_group, READ) - assert project in osf_group.nodes - - project_two = ProjectFactory(creator=manager) - project_two.add_osf_group(osf_group, WRITE) - assert len(osf_group.nodes) == 2 - assert project_two in osf_group.nodes - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_osf_group_to_node(self, mock_send_mail, manager, member, user_two, osf_group, project): - # noncontributor - with pytest.raises(PermissionsError): - project.add_osf_group(osf_group, WRITE, auth=Auth(member)) - - # Non-admin on project - project.add_contributor(user_two, WRITE) - project.save() - with pytest.raises(PermissionsError): - project.add_osf_group(osf_group, WRITE, auth=Auth(user_two)) - - project.add_osf_group(osf_group, READ, auth=Auth(manager)) - assert mock_send_mail.call_count == 1 - # Manager was already a node admin - assert project.has_permission(manager, ADMIN) is True - assert project.has_permission(manager, WRITE) is True - assert project.has_permission(manager, READ) is True - - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, WRITE, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - # project admin cannot add a group they are not a manager of - other_group = OSFGroupFactory() - with pytest.raises(PermissionsError): - project.add_osf_group(other_group, ADMIN, auth=Auth(project.creator)) - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_osf_group_to_node_emails_and_subscriptions(self, mock_send_mail, manager, member, user_two, osf_group, project): - osf_group.make_member(user_two) - - # Manager is already a node contributor - already has subscriptions - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 0 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - assert mock_send_mail.call_count == 1 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - # Three members of group, but user adding group to node doesn't get email - assert mock_send_mail.call_count == 3 - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 0 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - # Member is a contributor - project.add_contributor(member, WRITE, save=True) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - # Don't unsubscribe member because they belong to a group that has perms - project.remove_contributor(member, Auth(manager)) - assert len(get_all_node_subscriptions(manager, project)) == 2 - assert len(get_all_node_subscriptions(member, project)) == 2 - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - @mock.patch('website.osf_groups.views.mails.send_mail') - def test_add_group_to_node_throttle(self, mock_send_mail, osf_group, manager, member, project): - throttle = 100 - assert manager.group_connected_email_records == {} - group_signals.group_added_to_node.send(osf_group, node=project, user=manager, permission=WRITE, auth=Auth(member), throttle=throttle) - assert mock_send_mail.call_count == 1 - - assert manager.group_connected_email_records[osf_group._id] is not None - # 2nd call does not send email because throttle period has not expired - group_signals.group_added_to_node.send(osf_group, node=project, user=manager, permission=WRITE, auth=Auth(member), throttle=throttle) - assert mock_send_mail.call_count == 1 - - throttle = 0.5 - - time.sleep(1) # throttle period expires - # 2nd call does not send email because throttle period has not expired - assert manager.group_connected_email_records[osf_group._id] is not None - group_signals.group_added_to_node.send(osf_group, node=project, user=manager, permission=WRITE, auth=Auth(member), throttle=throttle) - assert mock_send_mail.call_count == 2 - - def test_add_osf_group_to_node_default_permission(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, auth=Auth(manager)) - - assert project.has_permission(manager, ADMIN) is True - assert project.has_permission(manager, WRITE) is True - assert project.has_permission(manager, READ) is True - - # osf_group given write permissions by default - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - def test_update_osf_group_node(self, manager, member, user_two, user_three, osf_group, project): - project.add_osf_group(osf_group, ADMIN) - - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, READ) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, WRITE) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.update_osf_group(osf_group, ADMIN) - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - # Project admin who does not belong to the manager group can update group permissions - project.add_contributor(user_two, ADMIN, save=True) - project.update_osf_group(osf_group, READ, auth=Auth(user_two)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is True - - # Project write contributor cannot update group permissions - project.add_contributor(user_three, WRITE, save=True) - with pytest.raises(PermissionsError): - project.update_osf_group(osf_group, ADMIN, auth=Auth(user_three)) - assert project.has_permission(member, ADMIN) is False - - def test_remove_osf_group_from_node(self, manager, member, user_two, osf_group, project): - # noncontributor - with pytest.raises(PermissionsError): - project.remove_osf_group(osf_group, auth=Auth(member)) - - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, READ) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is False - - # Project admin who does not belong to the manager group can remove the group - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - project.add_contributor(user_two, ADMIN) - project.save() - project.remove_osf_group(osf_group, auth=Auth(user_two)) - assert project.has_permission(member, ADMIN) is False - assert project.has_permission(member, WRITE) is False - assert project.has_permission(member, READ) is False - - # Manager who is not an admin can remove the group - user_three = AuthUserFactory() - osf_group.make_manager(user_three) - project.add_osf_group(osf_group, WRITE) - assert project.has_permission(user_three, ADMIN) is False - assert project.has_permission(user_three, WRITE) is True - assert project.has_permission(user_three, READ) is True - project.remove_osf_group(osf_group, auth=Auth(user_three)) - assert project.has_permission(user_three, ADMIN) is False - assert project.has_permission(user_three, WRITE) is False - assert project.has_permission(user_three, READ) is False - - def test_node_groups_property(self, manager, member, osf_group, project): - project.add_osf_group(osf_group, ADMIN, auth=Auth(manager)) - project.save() - assert osf_group in project.osf_groups - assert len(project.osf_groups) == 1 - - group_two = OSFGroupFactory(creator=manager) - project.add_osf_group(group_two, ADMIN, auth=Auth(manager)) - project.save() - assert group_two in project.osf_groups - assert len(project.osf_groups) == 2 - - def test_get_osf_groups_with_perms_property(self, manager, member, osf_group, project): - second_group = OSFGroupFactory(creator=manager) - third_group = OSFGroupFactory(creator=manager) - fourth_group = OSFGroupFactory(creator=manager) - OSFGroupFactory(creator=manager) - - project.add_osf_group(osf_group, ADMIN) - project.add_osf_group(second_group, WRITE) - project.add_osf_group(third_group, WRITE) - project.add_osf_group(fourth_group, READ) - - read_groups = project.get_osf_groups_with_perms(READ) - assert len(read_groups) == 4 - - write_groups = project.get_osf_groups_with_perms(WRITE) - assert len(write_groups) == 3 - - admin_groups = project.get_osf_groups_with_perms(ADMIN) - assert len(admin_groups) == 1 - - with pytest.raises(ValueError): - project.get_osf_groups_with_perms('crazy') - - def test_osf_group_node_can_view(self, project, manager, member, osf_group): - assert project.can_view(Auth(member)) is False - project.add_osf_group(osf_group, READ) - assert project.can_view(Auth(member)) is True - assert project.can_edit(Auth(member)) is False - - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, WRITE) - assert project.can_view(Auth(member)) is True - assert project.can_edit(Auth(member)) is True - - child = ProjectFactory(parent=project) - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, ADMIN) - # implicit OSF Group admin - assert child.can_view(Auth(member)) is True - assert child.can_edit(Auth(member)) is False - - grandchild = ProjectFactory(parent=child) - assert grandchild.can_view(Auth(member)) is True - assert grandchild.can_edit(Auth(member)) is False - - def test_node_has_permission(self, project, manager, member, osf_group): - assert project.can_view(Auth(member)) is False - project.add_osf_group(osf_group, READ) - assert project.has_permission(member, READ) is True - assert project.has_permission(member, WRITE) is False - assert osf_group.get_permission_to_node(project) == READ - - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, WRITE) - assert project.has_permission(member, READ) is True - assert project.has_permission(member, WRITE) is True - assert project.has_permission(member, ADMIN) is False - assert osf_group.get_permission_to_node(project) == WRITE - - child = ProjectFactory(parent=project) - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, ADMIN) - assert osf_group.get_permission_to_node(project) == ADMIN - # implicit OSF Group admin - assert child.has_permission(member, ADMIN) is False - assert child.has_permission(member, READ) is True - assert osf_group.get_permission_to_node(child) is None - - grandchild = ProjectFactory(parent=child) - assert grandchild.has_permission(member, WRITE) is False - assert grandchild.has_permission(member, READ) is True - - def test_node_get_permissions_override(self, project, manager, member, osf_group): - project.add_osf_group(osf_group, WRITE) - assert set(project.get_permissions(member)) == {READ, WRITE} - - project.remove_osf_group(osf_group) - project.add_osf_group(osf_group, READ) - assert set(project.get_permissions(member)) == {READ} - - anon = AnonymousUser() - assert project.get_permissions(anon) == [] - - def test_is_contributor(self, project, manager, member, osf_group): - assert project.is_contributor(manager) is True - assert project.is_contributor(member) is False - project.add_osf_group(osf_group, READ, auth=Auth(project.creator)) - assert project.is_contributor(member) is False - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.is_contributor_or_group_member(member) is False - project.add_contributor(member, READ) - assert project.is_contributor(member) is True - assert project.is_contributor_or_group_member(member) is True - - def test_is_contributor_or_group_member(self, project, manager, member, osf_group): - project.add_osf_group(osf_group, ADMIN, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.is_contributor_or_group_member(member) is False - project.add_osf_group(osf_group, WRITE, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - assert project.is_contributor_or_group_member(member) is False - project.add_osf_group(osf_group, READ, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(member) is True - - project.remove_osf_group(osf_group, auth=Auth(manager)) - osf_group.add_unregistered_member('jane', 'janedoe@cos.io', Auth(manager)) - unreg = osf_group.members.get(username='janedoe@cos.io') - assert unreg.is_registered is False - assert project.is_contributor_or_group_member(unreg) is False - project.add_osf_group(osf_group, READ, auth=Auth(project.creator)) - assert project.is_contributor_or_group_member(unreg) is True - - child = ProjectFactory(parent=project) - assert child.is_contributor_or_group_member(manager) is False - - def test_node_object_can_view_osfgroups(self, manager, member, project, osf_group): - project.add_contributor(member, ADMIN, save=True) # Member is explicit admin contributor on project - child = NodeFactory(parent=project, creator=manager) # Member is implicit admin on child - grandchild = NodeFactory(parent=child, creator=manager) # Member is implicit admin on grandchild - - project_two = ProjectFactory(creator=manager) - project_two.add_osf_group(osf_group, ADMIN) # Member has admin permissions to project_two through osf_group - child_two = NodeFactory(parent=project_two, creator=manager) # Member has implicit admin on child_two through osf_group - grandchild_two = NodeFactory(parent=child_two, creator=manager) # Member has implicit admin perms on grandchild_two through osf_group - can_view = Node.objects.can_view(member) - assert len(can_view) == 6 - assert set(list(can_view.values_list('id', flat=True))) == {project.id, - child.id, - grandchild.id, - project_two.id, - child_two.id, - grandchild_two.id} - - grandchild_two.is_deleted = True - grandchild_two.save() - can_view = Node.objects.can_view(member) - assert len(can_view) == 5 - assert grandchild_two not in can_view - - def test_parent_admin_users_osf_groups(self, manager, member, user_two, project, osf_group): - child = NodeFactory(parent=project, creator=manager) - project.add_osf_group(osf_group, ADMIN) - # Manager has explict admin to child, member has implicit admin. - # Manager should be in admin_users, member should be in parent_admin_users - admin_users = child.get_users_with_perm(ADMIN) - assert manager in admin_users - assert member not in admin_users - - assert manager not in child.parent_admin_users - assert member in child.parent_admin_users - - user_two.is_superuser = True - user_two.save() - - assert user_two not in admin_users - assert user_two not in child.parent_admin_users - - -class TestOSFGroupLogging: - def test_logging(self, project, manager, member): - # Calling actions 2x in this test to assert we're not getting double logs - group = OSFGroup.objects.create(name='My Lab', creator_id=manager.id) - assert group.logs.count() == 2 - log = group.logs.last() - assert log.action == OSFGroupLog.GROUP_CREATED - assert log.user == manager - assert log.user == manager - assert log.params['group'] == group._id - - log = group.logs.first() - assert log.action == OSFGroupLog.MANAGER_ADDED - assert log.params['group'] == group._id - - group.make_member(member, Auth(manager)) - group.make_member(member, Auth(manager)) - assert group.logs.count() == 3 - log = group.logs.first() - assert log.action == OSFGroupLog.MEMBER_ADDED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - - group.make_manager(member, Auth(manager)) - group.make_manager(member, Auth(manager)) - assert group.logs.count() == 4 - log = group.logs.first() - assert log.action == OSFGroupLog.ROLE_UPDATED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - assert log.params['new_role'] == MANAGER - - group.make_member(member, Auth(manager)) - group.make_member(member, Auth(manager)) - log = group.logs.first() - assert group.logs.count() == 5 - assert log.action == OSFGroupLog.ROLE_UPDATED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - assert log.params['new_role'] == MEMBER - - group.remove_member(member, Auth(manager)) - group.remove_member(member, Auth(manager)) - assert group.logs.count() == 6 - log = group.logs.first() - assert log.action == OSFGroupLog.MEMBER_REMOVED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['user'] == member._id - - group.set_group_name('New Name', Auth(manager)) - group.set_group_name('New Name', Auth(manager)) - assert group.logs.count() == 7 - log = group.logs.first() - assert log.action == OSFGroupLog.EDITED_NAME - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['name_original'] == 'My Lab' - - project.add_osf_group(group, WRITE, Auth(manager)) - project.add_osf_group(group, WRITE, Auth(manager)) - assert group.logs.count() == 8 - log = group.logs.first() - assert log.action == OSFGroupLog.NODE_CONNECTED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['node'] == project._id - assert log.params['permission'] == WRITE - node_log = project.logs.first() - - assert node_log.action == NodeLog.GROUP_ADDED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - assert node_log.params['permission'] == WRITE - - project.update_osf_group(group, READ, Auth(manager)) - project.update_osf_group(group, READ, Auth(manager)) - log = group.logs.first() - assert group.logs.count() == 9 - assert log.action == OSFGroupLog.NODE_PERMS_UPDATED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['node'] == project._id - assert log.params['permission'] == READ - node_log = project.logs.first() - - assert node_log.action == NodeLog.GROUP_UPDATED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - assert node_log.params['permission'] == READ - - project.remove_osf_group(group, Auth(manager)) - project.remove_osf_group(group, Auth(manager)) - assert group.logs.count() == 10 - log = group.logs.first() - assert log.action == OSFGroupLog.NODE_DISCONNECTED - assert log.user == manager - assert log.params['group'] == group._id - assert log.params['node'] == project._id - node_log = project.logs.first() - - assert node_log.action == NodeLog.GROUP_REMOVED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - - project.add_osf_group(group, WRITE, Auth(manager)) - project.add_osf_group(group, WRITE, Auth(manager)) - group.remove_group(auth=Auth(manager)) - - node_log = project.logs.first() - assert node_log.action == NodeLog.GROUP_REMOVED - assert node_log.user == manager - assert node_log.params['group'] == group._id - assert node_log.params['node'] == project._id - - -class TestRemovingContributorOrGroupMembers: - """ - Post OSF-Groups, the same kinds of checks you run when removing a contributor, - need to be run when a group is removed from a node (or a user is removed from a group, - or the group is deleted altogether). - - The actions are only executed if the user has no perms at all: no contributorship, - and no group membership - """ - - @pytest.fixture() - def project(self, user_two, user_three, external_account): - project = ProjectFactory(creator=user_two) - project.add_contributor(user_three, ADMIN) - project.add_addon('github', auth=Auth(user_two)) - project.creator.add_addon('github') - project.creator.external_accounts.add(external_account) - project.creator.save() - return project - - @pytest.fixture() - def file(self, project, user_two): - filename = 'my_file.txt' - project_file = OsfStorageFile.create( - target_object_id=project.id, - target_content_type=ContentType.objects.get_for_model(project), - path=f'/{filename}', - name=filename, - materialized_path=f'/{filename}') - - project_file.save() - from addons.osfstorage import settings as osfstorage_settings - - project_file.create_version(user_two, { - 'object': '06d80e', - 'service': 'cloud', - osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', - }, { - 'size': 1337, - 'contentType': 'img/png' - }).save - project_file.checkout = user_two - project_file.save() - return project_file - - @pytest.fixture() - def external_account(self): - return factories.GitHubAccountFactory() - - @pytest.fixture() - def node_settings(self, project, external_account): - node_settings = project.get_addon('github') - user_settings = project.creator.get_addon('github') - user_settings.oauth_grants[project._id] = {external_account._id: []} - user_settings.save() - node_settings.user_settings = user_settings - node_settings.user = 'Queen' - node_settings.repo = 'Sheer-Heart-Attack' - node_settings.external_account = external_account - node_settings.save() - node_settings.set_auth - return node_settings - - def test_remove_contributor_no_member_perms(self, project, node_settings, user_two, user_three, request_context, file): - assert project.get_addon('github').user_settings is not None - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - project.remove_contributor(user_two, Auth(user_three)) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_group_from_node_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_member_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_member(user_two) - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_delete_group_no_contributor_perms(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - # Manually removing contributor - contrib_obj = project.contributor_set.get(user=user_two) - contrib_obj.delete() - project.clear_permissions(user_two) - - assert project.is_contributor(user_two) is False - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_group() - project.reload() - - assert project.get_addon('github').user_settings is None - file.reload() - assert file.checkout is None - assert len(get_all_node_subscriptions(user_two, project)) == 0 - - def test_remove_contributor_also_member(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_remove_osf_group_from_node_also_member(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - project.remove_osf_group(group) - project.reload() - - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_remove_member_also_contributor(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - group.make_manager(user_three) - project.add_osf_group(group, ADMIN) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_member(user_two) - project.reload() - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 - - def test_delete_group_also_contributor(self, project, node_settings, user_two, user_three, request_context, file): - group = OSFGroupFactory(creator=user_two) - project.add_osf_group(group, ADMIN) - group.make_manager(user_three) - - assert project.is_contributor(user_two) is True - assert project.is_contributor_or_group_member(user_two) is True - assert node_settings.user_settings is not None - group.remove_group() - project.reload() - assert project.get_addon('github').user_settings is not None - file.reload() - assert file.checkout is not None - assert len(get_all_node_subscriptions(user_two, project)) == 2 diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index c031fcc344a..ba9a144ec4a 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -6,7 +6,6 @@ from urllib.parse import urlparse, urljoin, parse_qs from django.db import connection, transaction -from django.contrib.auth.models import Group from django.test.utils import CaptureQueriesContext from django.utils import timezone from django.conf import settings as django_conf_settings @@ -26,7 +25,6 @@ from osf.models import ( AbstractNode, OSFUser, - OSFGroup, Tag, Contributor, NotableDomain, @@ -55,7 +53,6 @@ ExternalAccountFactory, InstitutionFactory, NodeFactory, - OSFGroupFactory, PreprintProviderFactory, ProjectFactory, TagFactory, @@ -235,30 +232,6 @@ def test_merged_user_with_two_account_on_same_project_with_different_visibility_ assert project.get_visible(user) is True assert project.is_contributor(user2) is False - def test_merged_user_group_member_permissions_are_ignored(self, user): - user2 = UserFactory.build() - user2.save() - group = OSFGroupFactory(creator=user2) - - project = ProjectFactory(is_public=True) - project.add_osf_group(group, permissions.ADMIN) - assert project.has_permission(user2, permissions.ADMIN) - # Both the master and dupe are contributors - project.add_contributor(user2, log=False) - project.add_contributor(user, log=False) - project.set_permissions(user=user, permissions=permissions.READ) - project.set_permissions(user=user2, permissions=permissions.WRITE) - project.save() - user.merge_user(user2) - user.save() - project.reload() - - assert project.has_permission(user, permissions.ADMIN) is True - assert project.is_admin_contributor(user) is False - assert project.is_contributor(user2) is False - assert group.is_member(user) is True - assert group.is_member(user2) is False - def test_merge_projects(self): user = AuthUserFactory() user2 = AuthUserFactory() @@ -837,25 +810,6 @@ def test_has_osfstorage_usersettings(self, user): class TestProjectsInCommon: - def test_get_projects_in_common(self, user, auth): - user2 = UserFactory() - project = NodeFactory(creator=user) - project.add_contributor(contributor=user2, auth=auth) - project.save() - - group = OSFGroupFactory(creator=user, name='Platform') - group.make_member(user2) - group_project = ProjectFactory() - group_project.add_osf_group(group) - group_project.save() - - project_keys = {node._id for node in user.all_nodes} - projects = set(user.all_nodes) - user2_project_keys = {node._id for node in user2.all_nodes} - - assert {n._id for n in user.get_projects_in_common(user2)} == project_keys.intersection(user2_project_keys) - assert user.get_projects_in_common(user2) == projects.intersection(user2.all_nodes) - def test_n_projects_in_common(self, user, auth): user2 = UserFactory() user3 = UserFactory() @@ -864,9 +818,7 @@ def test_n_projects_in_common(self, user, auth): project.add_contributor(contributor=user2, auth=auth) project.save() - group = OSFGroupFactory(name='Platform', creator=user) - group.make_member(user3) - project.add_osf_group(group) + project.add_contributor(contributor=user, auth=auth) project.save() assert user.n_projects_in_common(user2) == 1 @@ -1795,9 +1747,6 @@ def test_contributor_to_property(self): project_to_be_invisible_on = ProjectFactory() project_to_be_invisible_on.add_contributor(self.user, visible=False) project_to_be_invisible_on.save() - group = OSFGroupFactory(creator=self.user, name='Platform') - group_project = ProjectFactory() - group_project.add_osf_group(group, permissions.READ) contributor_to_nodes = [node._id for node in self.user.contributor_to] @@ -1807,7 +1756,6 @@ def test_contributor_to_property(self): assert deleted_node._id not in contributor_to_nodes assert bookmark_collection_node._id not in contributor_to_nodes assert collection_node._id not in contributor_to_nodes - assert group_project._id not in contributor_to_nodes def test_contributor_or_group_member_to_property(self): normal_node = ProjectFactory(creator=self.user) @@ -1820,9 +1768,6 @@ def test_contributor_or_group_member_to_property(self): project_to_be_invisible_on = ProjectFactory() project_to_be_invisible_on.add_contributor(self.user, visible=False) project_to_be_invisible_on.save() - group = OSFGroupFactory(creator=self.user, name='Platform') - group_project = ProjectFactory() - group_project.add_osf_group(group, permissions.READ) registration = RegistrationFactory(creator=self.user) contributor_to_or_group_member_nodes = [node._id for node in self.user.contributor_or_group_member_to] @@ -1833,16 +1778,10 @@ def test_contributor_or_group_member_to_property(self): assert deleted_node._id not in contributor_to_or_group_member_nodes assert bookmark_collection_node._id not in contributor_to_or_group_member_nodes assert collection_node._id not in contributor_to_or_group_member_nodes - assert group_project._id in contributor_to_or_group_member_nodes assert registration._id in contributor_to_or_group_member_nodes def test_all_nodes_property(self): project = ProjectFactory(creator=self.user) - project_two = ProjectFactory() - - group = OSFGroupFactory(creator=self.user) - project_two.add_osf_group(group) - project_two.save() project_three = ProjectFactory() project_three.save() @@ -1850,7 +1789,6 @@ def test_all_nodes_property(self): user_nodes = self.user.all_nodes assert user_nodes.count() == 2 assert project in user_nodes - assert project_two in user_nodes assert project_three not in user_nodes def test_visible_contributor_to_property(self): @@ -2238,47 +2176,6 @@ def test_cant_gdpr_delete_shared_node_if_only_admin(self, user, project_user_is_ assert exc_info.value.args[0] == 'You cannot delete Node {} because it would' \ ' be a Node with contributors, but with no admin.'.format(project_user_is_only_admin._id) - def test_cant_gdpr_delete_osf_group_if_only_manager(self, user): - group = OSFGroupFactory(name='My Group', creator=user) - osf_group_name = group.name - manager_group_name = group.manager_group.name - member_group_name = group.member_group.name - member = AuthUserFactory() - group.make_member(member) - - with pytest.raises(UserStateError) as exc_info: - user.gdpr_delete() - - assert exc_info.value.args[0] == 'You cannot delete this user because ' \ - 'they are the only registered manager of OSFGroup ' \ - '{} that contains other members.'.format(group._id) - - unregistered = group.add_unregistered_member('fake_user', 'fake_email@cos.io', Auth(user), 'manager') - assert len(group.managers) == 2 - - with pytest.raises(UserStateError) as exc_info: - user.gdpr_delete() - - assert exc_info.value.args[0] == 'You cannot delete this user because ' \ - 'they are the only registered manager of OSFGroup ' \ - '{} that contains other members.'.format(group._id) - - group.remove_member(member) - member.gdpr_delete() - # User is not the last member in the group, so they are just removed - assert OSFGroup.objects.filter(name=osf_group_name).exists() - assert Group.objects.filter(name=manager_group_name).exists() - assert Group.objects.filter(name=member_group_name).exists() - assert group.is_member(member) is False - assert group.is_manager(member) is False - - group.remove_member(unregistered) - user.gdpr_delete() - # Group was deleted because user was the only member - assert not OSFGroup.objects.filter(name=osf_group_name).exists() - assert not Group.objects.filter(name=manager_group_name).exists() - assert not Group.objects.filter(name=member_group_name).exists() - def test_cant_gdpr_delete_with_addon_credentials(self, user, project_with_two_admins_and_addon_credentials): with pytest.raises(UserStateError) as exc_info: diff --git a/tasks/__init__.py b/tasks/__init__.py index 180d7838126..1490638ac7f 100755 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -355,7 +355,6 @@ def test_module(ctx, module=None, numprocesses=None, nocapture=False, params=Non 'api_tests/meetings', 'api_tests/metrics', 'api_tests/nodes', - 'api_tests/osf_groups', 'api_tests/requests', 'api_tests/resources', 'api_tests/schema_responses', diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index 35bccc88119..fb501b80233 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -29,7 +29,6 @@ AuthUserFactory, CommentFactory, NodeFactory, - OSFGroupFactory, PreprintFactory, PreprintProviderFactory, PrivateLinkFactory, @@ -193,13 +192,6 @@ def test_check_can_access_valid(self): self.project.save() assert check_can_access(self.project, contributor) - def test_check_can_access_osf_group_member_valid(self): - user = AuthUserFactory() - group = OSFGroupFactory(creator=user) - self.project.add_osf_group(group, permissions.READ) - self.project.save() - assert check_can_access(self.project, user) - def test_check_user_access_invalid(self): noncontrib = AuthUserFactory() with pytest.raises(HTTPError): @@ -632,17 +624,6 @@ def test_show_wiki_is_false_for_read_contributors_when_no_wiki_or_content(self): def test_show_wiki_is_false_for_noncontributors_when_no_wiki_or_content(self): assert not _should_show_wiki_widget(self.project, None) - def test_show_wiki_for_osf_group_members(self): - group = OSFGroupFactory(creator=self.noncontributor) - self.project.add_osf_group(group, permissions.READ) - assert not _should_show_wiki_widget(self.project, self.noncontributor) - assert not _should_show_wiki_widget(self.project2, self.noncontributor) - - self.project.remove_osf_group(group) - self.project.add_osf_group(group, permissions.WRITE) - assert _should_show_wiki_widget(self.project, self.noncontributor) - assert not _should_show_wiki_widget(self.project2, self.noncontributor) - class TestUnconfirmedUserViews(OsfTestCase): diff --git a/tests/test_project_creation_view.py b/tests/test_project_creation_view.py index b2b0aeae788..da6fa8ac76a 100644 --- a/tests/test_project_creation_view.py +++ b/tests/test_project_creation_view.py @@ -7,7 +7,6 @@ from osf.utils import permissions from osf_tests.factories import ( AuthUserFactory, - OSFGroupFactory, ProjectFactory, ProjectWithAddonFactory, ) @@ -114,10 +113,8 @@ def test_create_component_with_contributors_read_write(self): url = web_url_for('project_new_node', pid=self.project._id) non_admin = AuthUserFactory() read_user = AuthUserFactory() - group = OSFGroupFactory(creator=read_user) self.project.add_contributor(non_admin, permissions=permissions.WRITE) self.project.add_contributor(read_user, permissions=permissions.READ) - self.project.add_osf_group(group, permissions.ADMIN) self.project.save() post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True} res = self.app.post(url, data=post_data, auth=non_admin.auth) @@ -136,8 +133,6 @@ def test_create_component_with_contributors_read_write(self): assert child.has_permission(read_user, permissions.ADMIN) is False assert child.has_permission(read_user, permissions.WRITE) is False assert child.has_permission(read_user, permissions.READ) is True - # User creating the component was not a manager on the group - assert group not in child.osf_groups # check redirect url assert '/contributors/' in res.location @@ -145,10 +140,8 @@ def test_group_copied_over_to_component_if_manager(self): url = web_url_for('project_new_node', pid=self.project._id) non_admin = AuthUserFactory() write_user = AuthUserFactory() - group = OSFGroupFactory(creator=write_user) self.project.add_contributor(non_admin, permissions=permissions.WRITE) self.project.add_contributor(write_user, permissions=permissions.WRITE) - self.project.add_osf_group(group, permissions.ADMIN) self.project.save() post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True} res = self.app.post(url, data=post_data, auth=write_user.auth) @@ -166,8 +159,6 @@ def test_group_copied_over_to_component_if_manager(self): assert child.has_permission(write_user, permissions.ADMIN) is True assert child.has_permission(write_user, permissions.WRITE) is True assert child.has_permission(write_user, permissions.READ) is True - # User creating the component was a manager of the group, so group copied - assert group in child.osf_groups # check redirect url assert '/contributors/' in res.location @@ -260,4 +251,3 @@ def test_project_new_from_template_contributor(self): url = api_url_for('project_new_from_template', nid=project._id) res = self.app.post(url, auth=contributor.auth) assert res.status_code == 201 - diff --git a/tests/test_registrations/base.py b/tests/test_registrations/base.py index 61e86cc767d..60d8b855808 100644 --- a/tests/test_registrations/base.py +++ b/tests/test_registrations/base.py @@ -9,7 +9,7 @@ from osf.models import RegistrationSchema from tests.base import OsfTestCase -from osf_tests.factories import AuthUserFactory, ProjectFactory, DraftRegistrationFactory, OSFGroupFactory +from osf_tests.factories import AuthUserFactory, ProjectFactory, DraftRegistrationFactory class RegistrationsTestBase(OsfTestCase): def setUp(self): @@ -26,9 +26,6 @@ def setUp(self): save=True ) self.non_contrib = AuthUserFactory() - self.group_mem = AuthUserFactory() - self.group = OSFGroupFactory(creator=self.group_mem) - self.node.add_osf_group(self.group, permissions.ADMIN) self.meta_schema = RegistrationSchema.objects.get(name='Open-Ended Registration', schema_version=2) diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 67f0b0fb497..61ec894640b 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -15,7 +15,7 @@ from osf_tests.factories import ( AuthUserFactory, NodeFactory, ProjectFactory, RegistrationFactory, UserFactory, UnconfirmedUserFactory, - UnregUserFactory, OSFGroupFactory + UnregUserFactory ) from osf.utils import tokens from osf.exceptions import ( @@ -195,15 +195,6 @@ def test_non_admin_approval_token_raises_PermissionsError(self): assert self.registration.is_pending_retraction assert not self.registration.is_retracted - # group admin on node cannot retract registration - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - self.registration.registered_from.add_osf_group(group, permissions.ADMIN) - with pytest.raises(PermissionsError): - self.registration.retraction.approve_retraction(group_mem, approval_token) - assert self.registration.is_pending_retraction - assert not self.registration.is_retracted - def test_one_approval_with_one_admin_retracts(self): self.registration.retract_registration(self.user) self.registration.save() @@ -769,10 +760,6 @@ def setUp(self): self.retraction_get_url = self.registration.web_url_for('node_registration_retraction_get') self.justification = fake.sentence() - self.group_mem = AuthUserFactory() - self.group = OSFGroupFactory(creator=self.group_mem) - self.registration.registered_from.add_osf_group(self.group, permissions.ADMIN) - def test_GET_retraction_page_when_pending_retraction_returns_HTTPError_BAD_REQUEST(self): self.registration.retract_registration(self.user) self.registration.save() diff --git a/tests/test_serializers.py b/tests/test_serializers.py index f86e983cbd3..08f92f7d232 100644 --- a/tests/test_serializers.py +++ b/tests/test_serializers.py @@ -7,7 +7,6 @@ UserFactory, RegistrationFactory, NodeFactory, - OSFGroupFactory, CollectionFactory, ) from osf.models import NodeRelation @@ -187,23 +186,6 @@ def test_serialize_node_summary_child_exists(self): result = _view_project(parent_node, Auth(user)) assert result['node']['child_exists'] == True - def test_serialize_node_summary_is_contributor_osf_group(self): - project = ProjectFactory() - user = UserFactory() - group = OSFGroupFactory(creator=user) - project.add_osf_group(group, permissions.WRITE) - - res = _view_project( - project, auth=Auth(user), - ) - assert not res['user']['is_contributor'] - assert res['user']['is_contributor_or_group_member'] - assert not res['user']['is_admin'] - assert res['user']['can_edit'] - assert res['user']['has_read_permissions'] - assert set(res['user']['permissions']) == {permissions.READ, permissions.WRITE} - assert res['user']['can_comment'] - def test_serialize_node_search_returns_only_visible_contributors(self): node = NodeFactory() non_visible_contributor = UserFactory() diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 3e1c455c078..5a30ca79d78 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -57,7 +57,6 @@ CollectionFactory, CommentFactory, NodeFactory, - OSFGroupFactory, PreprintFactory, PreprintProviderFactory, PrivateLinkFactory, diff --git a/website/osf_groups/__init__.py b/website/osf_groups/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/website/osf_groups/signals.py b/website/osf_groups/signals.py deleted file mode 100644 index 6edd7aa3603..00000000000 --- a/website/osf_groups/signals.py +++ /dev/null @@ -1,7 +0,0 @@ -import blinker - -signals = blinker.Namespace() - -member_added = signals.signal('member-added') -unreg_member_added = signals.signal('unreg-member-added') -group_added_to_node = signals.signal('group-added') diff --git a/website/osf_groups/views.py b/website/osf_groups/views.py deleted file mode 100644 index b8b9d6aa638..00000000000 --- a/website/osf_groups/views.py +++ /dev/null @@ -1,135 +0,0 @@ -import logging - -from framework.utils import get_timestamp, throttle_period_expired - -from website import mails, settings -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications.utils import ( - check_if_all_global_subscriptions_are_none, - subscribe_user_to_notifications, -) -from website.osf_groups.signals import ( - unreg_member_added, - member_added, - group_added_to_node, -) -logger = logging.getLogger(__name__) - - -@member_added.connect -def notify_added_group_member(group, user, permission, auth=None, throttle=None, email_template='default', *args, **kwargs): - if email_template == 'false': - return - - throttle = throttle or settings.GROUP_MEMBER_ADDED_EMAIL_THROTTLE - - member_record = user.member_added_email_records.get(group._id, {}) - if member_record: - timestamp = member_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return - else: - user.member_added_email_records[group._id] = {} - - if user.is_registered: - email_template = mails.GROUP_MEMBER_ADDED - mails.send_mail( - to_addr=user.username, - mail=email_template, - user=user, - group_name=group.name, - permission=permission, - referrer_name=auth.user.fullname if auth else '', - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - user.member_added_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - - else: - unreg_member_added.send(group, user=user, permission=permission, auth=auth, throttle=throttle, email_template=email_template) - - -def send_claim_member_email(email, user, group, permission, auth=None, throttle=None, email_template='default'): - """ - Unregistered user claiming a user account as a group member of an OSFGroup. Send an email for claiming the account. - Sends to the given email - - :param str email: The address given in the claim user form - :param User user: The User record to claim. - :param OSFGroup group: The group where the user claimed their account. - :return - - """ - - claimer_email = email.lower().strip() - claim_url = user.get_claim_url(group._id, external=True) - - throttle = throttle or settings.GROUP_MEMBER_ADDED_EMAIL_THROTTLE - - mails.send_mail( - to_addr=claimer_email, - mail=email_template, - user=user, - group_name=group.name, - referrer_name=auth.user.fullname if auth else '', - permission=permission, - claim_url=claim_url, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - user.member_added_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - - return claimer_email - - -@unreg_member_added.connect -def finalize_invitation(group, user, permission, auth, throttle, email_template='default'): - email_template = mails.GROUP_MEMBER_UNREGISTERED_ADDED - - try: - record = user.get_unclaimed_record(group._id) - except ValueError: - pass - else: - if record['email']: - send_claim_member_email(record['email'], user, group, permission, auth=auth, throttle=throttle, email_template=email_template) - - -@group_added_to_node.connect -def notify_added_node_group_member(group, node, user, permission, auth, throttle=None): - throttle = throttle or settings.GROUP_CONNECTED_EMAIL_THROTTLE - - node_group_record = user.group_connected_email_records.get(group._id, {}) - if node_group_record: - timestamp = node_group_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return - else: - user.group_connected_email_records[group._id] = {} - - if (not auth or auth.user != user) and user.is_registered: - email_template = mails.GROUP_ADDED_TO_NODE - mails.send_mail( - to_addr=user.username, - mail=email_template, - user=user, - node=node, - all_global_subscriptions_none=check_if_all_global_subscriptions_are_none(user), - group_name=group.name, - permission=permission, - referrer_name=auth.user.fullname if auth else '', - osf_contact_email=settings.OSF_CONTACT_EMAIL, - ) - - user.group_connected_email_records[group._id]['last_sent'] = get_timestamp() - user.save() - -@group_added_to_node.connect -def subscribe_group_member(group, node, user, permission, auth, throttle=None): - try: - subscribe_user_to_notifications(node, user) - except InvalidSubscriptionError as err: - logger.warning(f'Skipping subscription of user {user} to node {node._id}') - logger.warning(f'Reason: {str(err)}') diff --git a/website/project/decorators.py b/website/project/decorators.py index 2d60be5359b..39db4099fd5 100644 --- a/website/project/decorators.py +++ b/website/project/decorators.py @@ -11,7 +11,7 @@ from framework.auth.decorators import collect_auth from framework.database import get_or_http_error -from osf.models import AbstractNode, Guid, Preprint, OSFGroup, Registration +from osf.models import AbstractNode, Guid, Preprint, Registration from osf.utils.permissions import WRITE from website import language from website.util import web_url_for @@ -88,10 +88,6 @@ def wrapped(*args, **kwargs): return func(*args, **kwargs) - if groups_valid and OSFGroup.load(kwargs.get('pid')): - kwargs['node'] = OSFGroup.load(kwargs.get('pid')) - return func(*args, **kwargs) - _inject_nodes(kwargs) if getattr(kwargs['node'], 'is_collection', True) or (getattr(kwargs['node'], 'is_quickfiles', True) and not quickfiles_valid): diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 485298eb8cb..f3e06aff3fc 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -19,7 +19,7 @@ from framework.utils import get_timestamp, throttle_period_expired from osf.models import Tag from osf.exceptions import NodeStateError -from osf.models import AbstractNode, DraftRegistration, OSFGroup, OSFUser, Preprint, PreprintProvider, RecentlyAddedContributor +from osf.models import AbstractNode, DraftRegistration, OSFUser, Preprint, PreprintProvider, RecentlyAddedContributor from osf.utils import sanitize from osf.utils.permissions import ADMIN from website import mails, language, settings @@ -732,18 +732,12 @@ def claim_user_registered(auth, node, **kwargs): if should_claim: node.replace_contributor(old=unreg_user, new=current_user) node.save() - if isinstance(node, OSFGroup): - status.push_status_message( - 'You are now a member of this OSFGroup.', - kind='success', - trust=False - ) - else: - status.push_status_message( - 'You are now a contributor to this project.', - kind='success', - trust=False - ) + + status.push_status_message( + 'You are now a contributor to this project.', + kind='success', + trust=False + ) return redirect(node.url) if is_json_request(): form_ret = forms.utils.jsonify(form) diff --git a/website/project/views/node.py b/website/project/views/node.py index 9bad3713d3b..bc48ab8561e 100644 --- a/website/project/views/node.py +++ b/website/project/views/node.py @@ -216,10 +216,6 @@ def project_new_node(auth, node, **kwargs): else: new_component.add_contributor(contributor, permissions=perm, auth=auth) - for group in node.osf_groups: - if group.is_manager(user): - new_component.add_osf_group(group, group.get_permission_to_node(node), auth=auth) - new_component.save() redirect_url = new_component.url + 'contributors/' message = ( @@ -837,7 +833,6 @@ def _view_project(node, auth, primary=False, 'storage_location': node.osfstorage_region.name, 'waterbutler_url': node.osfstorage_region.waterbutler_url, 'mfr_url': node.osfstorage_region.mfr_url, - 'groups': list(node.osf_groups.values_list('name', flat=True)), 'storage_limit_status': get_storage_limits_css(node), }, 'parent_node': { diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py index 36d528c8e33..c0201e6fdfd 100644 --- a/website/search/elastic_search.py +++ b/website/search/elastic_search.py @@ -19,7 +19,6 @@ from osf.models import BaseFileNode from osf.models import GuidMetadataRecord from osf.models import Institution -from osf.models import OSFGroup from osf.models import QuickFilesNode from osf.models import Preprint from osf.models import SpamStatus @@ -59,7 +58,6 @@ 'institution': Institution, 'preprint': Preprint, 'collectionSubmission': CollectionSubmission, - 'group': OSFGroup } # Prevent tokenizing and stop word removal. @@ -337,8 +335,6 @@ def load_parent(parent_id): def get_doctype_from_node(node): if isinstance(node, Preprint): return 'preprint' - if isinstance(node, OSFGroup): - return 'group' if node.is_registration: return 'registration' elif node.parent_node is None: @@ -367,15 +363,6 @@ def update_preprint_async(self, preprint_id, index=None, bulk=False): except Exception as exc: self.retry(exc=exc) -@celery_app.task(bind=True, max_retries=5, default_retry_delay=60) -def update_group_async(self, group_id, index=None, bulk=False, deleted_id=None): - OSFGroup = apps.get_model('osf.OSFGroup') - group = OSFGroup.load(group_id) - try: - update_group(group=group, index=index, bulk=bulk, async_update=True, deleted_id=deleted_id) - except Exception as exc: - self.retry(exc=exc) - @celery_app.task(bind=True, max_retries=5, default_retry_delay=60) def update_user_async(self, user_id, index=None): OSFUser = apps.get_model('osf.OSFUser') @@ -400,13 +387,6 @@ def serialize_node(node, category): for x in node.contributor_set.filter(visible=True).order_by('_order') .values('user__fullname', 'user__guids___id', 'user__is_active') ], - 'groups': [ - { - 'name': x['name'], - 'url': '/{}/'.format(x['_id']) - } - for x in node.osf_groups.values('name', '_id') - ], 'title': node.title, 'normalized_title': normalized_title, 'category': category, diff --git a/website/search_migration/migrate.py b/website/search_migration/migrate.py index 7ae7f1431e4..545a43a66ac 100644 --- a/website/search_migration/migrate.py +++ b/website/search_migration/migrate.py @@ -15,7 +15,7 @@ JSON_UPDATE_FILES_SQL, JSON_DELETE_FILES_SQL, JSON_UPDATE_USERS_SQL, JSON_DELETE_USERS_SQL) from scripts import utils as script_utils -from osf.models import OSFUser, Institution, AbstractNode, BaseFileNode, Preprint, OSFGroup, CollectionSubmission +from osf.models import OSFUser, Institution, AbstractNode, BaseFileNode, Preprint, CollectionSubmission from website import settings from website.app import init_app from website.search.elastic_search import client as es_client @@ -109,15 +109,6 @@ def migrate_preprint_files(index, delete): logger.info(f'Updating page {page_number} / {paginator.num_pages}') search.bulk_update_nodes(serialize, paginator.page(page_number).object_list, index=index, category='file') -def migrate_groups(index, delete): - logger.info(f'Migrating groups to index: {index}') - groups = OSFGroup.objects.all() - increment = 100 - paginator = Paginator(groups, increment) - for page_number in paginator.page_range: - logger.info(f'Updating page {page_number} / {paginator.num_pages}') - OSFGroup.bulk_update_search(paginator.page(page_number).object_list, index=index) - def migrate_files(index, delete, increment=10000): logger.info(f'Migrating files to index: {index}') max_fid = BaseFileNode.objects.last().id @@ -217,7 +208,6 @@ def migrate(delete, remove=False, index=None, app=None): migrate_preprints(new_index, delete=delete) migrate_preprint_files(new_index, delete=delete) migrate_collected_metadata(new_index, delete=delete) - migrate_groups(new_index, delete=delete) set_up_alias(index, new_index) diff --git a/website/views.py b/website/views.py index aa523f80fd1..1fa72dc509d 100644 --- a/website/views.py +++ b/website/views.py @@ -67,22 +67,6 @@ def serialize_contributors_for_summary(node, max_count=3): 'others_count': others_count, } -def serialize_groups_for_summary(node): - groups = node.osf_groups - n_groups = len(groups) - group_string = '' - for index, group in enumerate(groups): - if index == n_groups - 1: - separator = '' - elif index == n_groups - 2: - separator = ' & ' - else: - separator = ', ' - - group_string = group_string + group.name + separator - - return group_string - def serialize_node_summary(node, auth, primary=True, show_path=False): is_registration = node.is_registration @@ -140,7 +124,6 @@ def serialize_node_summary(node, auth, primary=True, show_path=False): 'show_path': show_path, 'contributors': contributor_data['contributors'], 'others_count': contributor_data['others_count'], - 'groups': serialize_groups_for_summary(node), 'description': node.description if len(node.description) <= 150 else node.description[0:150] + '...', }) else: From 509d958843f134ac9a2b9d6bad4d4049e844199c Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 1 May 2025 15:06:15 +0300 Subject: [PATCH 003/176] remove osf groups --- api/nodes/permissions.py | 2 +- api/users/serializers.py | 7 ---- .../views/test_draft_registration_list.py | 4 -- ...ode_contributors_and_group_members_list.py | 27 ++----------- osf_tests/test_draft_registration.py | 2 - osf_tests/test_user.py | 4 +- tests/test_registrations/test_retractions.py | 12 ------ tests/test_registrations/test_views.py | 8 ---- website/templates/project/contributors.mako | 39 ------------------- website/templates/project/project.mako | 14 ------- website/templates/search.mako | 10 ----- website/templates/util/render_node.mako | 5 --- 12 files changed, 7 insertions(+), 127 deletions(-) diff --git a/api/nodes/permissions.py b/api/nodes/permissions.py index 0b74d61c645..5fc16f6cf16 100644 --- a/api/nodes/permissions.py +++ b/api/nodes/permissions.py @@ -217,7 +217,7 @@ class NodeGroupDetailPermissions(permissions.BasePermission): """Permissions for node group detail - involving who can update the relationship between a node and an OSF Group.""" - acceptable_models = (AbstractNode, ) + acceptable_models = (AbstractNode,) def load_resource(self, context, view): return AbstractNode.load(context[view.node_lookup_url_kwarg]) diff --git a/api/users/serializers.py b/api/users/serializers.py index e7e306f9194..308bb717410 100644 --- a/api/users/serializers.py +++ b/api/users/serializers.py @@ -118,13 +118,6 @@ class UserSerializer(JSONAPISerializer): ), ) - groups = HideIfDisabled( - RelationshipField( - related_view='users:user-groups', - related_view_kwargs={'user_id': '<_id>'}, - ), - ) - registrations = HideIfDisabled( RelationshipField( related_view='users:user-registrations', diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index 1126af09ad3..a126cf37d70 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -53,10 +53,6 @@ def user_read_contrib(self): def user_non_contrib(self): return AuthUserFactory() - @pytest.fixture() - def group_mem(self): - return AuthUserFactory() - @pytest.fixture() def project(self, user): return ProjectFactory(creator=user) diff --git a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py index 6c7d6657660..a3e800204f0 100644 --- a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py +++ b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py @@ -5,7 +5,7 @@ ProjectFactory, AuthUserFactory, ) -from osf.utils.permissions import READ, WRITE +from osf.utils.permissions import WRITE @pytest.fixture() def non_contributor(): @@ -20,31 +20,19 @@ def write_contributor(): return AuthUserFactory() @pytest.fixture() -def group_manager(): - user = AuthUserFactory() - user.given_name = 'Dawn' - user.save() - return user - -@pytest.fixture() -def group_member(): - return AuthUserFactory() - -@pytest.fixture() -def project(admin_contributor, write_contributor, group_member_and_contributor): +def project(admin_contributor, write_contributor): project = ProjectFactory( creator=admin_contributor ) project.add_contributor(write_contributor, WRITE) - project.add_contributor(group_member_and_contributor, READ) return project @pytest.mark.django_db class TestNodeContributorsAndGroupMembers: def test_list_and_filter_contributors_and_group_members( - self, app, project, admin_contributor, write_contributor, group_manager, - group_member, group_member_and_contributor, non_contributor): + self, app, project, admin_contributor, write_contributor, + non_contributor): url = f'/{API_BASE}nodes/{project._id}/contributors_and_group_members/' # unauthenticated @@ -72,13 +60,6 @@ def test_list_and_filter_contributors_and_group_members( assert actual == expected - url = f'/{API_BASE}nodes/{project._id}/contributors_and_group_members/?filter[given_name]={group_manager.given_name}' - res = app.get(url, auth=admin_contributor.auth) - assert res.status_code == 200 - assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 1 - assert res.json['data'][0]['id'] == group_manager._id - url = f'/{API_BASE}nodes/{project._id}/contributors_and_group_members/?filter[given_name]=NOT_EVEN_A_NAME' res = app.get(url, auth=admin_contributor.auth) assert res.status_code == 200 diff --git a/osf_tests/test_draft_registration.py b/osf_tests/test_draft_registration.py index 4551d4d998b..c5b38632230 100644 --- a/osf_tests/test_draft_registration.py +++ b/osf_tests/test_draft_registration.py @@ -233,8 +233,6 @@ def test_create_from_node_existing(self, user): assert draft.category == category assert user in draft.contributors.all() assert write_contrib in draft.contributors.all() - assert member not in draft.contributors.all() - assert not draft.has_permission(member, 'read') assert draft.get_permissions(user) == [READ, WRITE, ADMIN] assert draft.get_permissions(write_contrib) == [READ, WRITE] diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index ba9a144ec4a..6ee847686d0 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -822,7 +822,7 @@ def test_n_projects_in_common(self, user, auth): project.save() assert user.n_projects_in_common(user2) == 1 - assert user.n_projects_in_common(user3) == 1 + assert user.n_projects_in_common(user3) == 0 class TestCookieMethods: @@ -1787,7 +1787,7 @@ def test_all_nodes_property(self): project_three.save() user_nodes = self.user.all_nodes - assert user_nodes.count() == 2 + assert user_nodes.count() == 1 assert project in user_nodes assert project_three not in user_nodes diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 61ec894640b..5dad9b35b42 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -848,10 +848,6 @@ def test_POST_retraction_by_non_admin_retract_HTTPError_UNAUTHORIZED(self): self.registration.reload() assert self.registration.retraction is None - # group admin POST fails - res = self.app.post(self.retraction_post_url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN - @mock.patch('website.mails.send_mail') def test_POST_retraction_without_justification_returns_HTTPOK(self, mock_send): res = self.app.post( @@ -913,10 +909,6 @@ def test_non_contributor_GET_approval_returns_HTTPError_UNAUTHORIZED(self): assert self.registration.is_pending_retraction assert not self.registration.is_retracted - # group admin on node fails disapproval GET - res = self.app.get(approval_url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_401_UNAUTHORIZED - def test_non_contributor_GET_disapproval_returns_HTTPError_UNAUTHORIZED(self): non_contributor = AuthUserFactory() self.registration.retract_registration(self.user) @@ -927,7 +919,3 @@ def test_non_contributor_GET_disapproval_returns_HTTPError_UNAUTHORIZED(self): assert res.status_code == http_status.HTTP_401_UNAUTHORIZED assert self.registration.is_pending_retraction assert not self.registration.is_retracted - - # group admin on node fails disapproval GET - res = self.app.get(disapproval_url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_401_UNAUTHORIZED diff --git a/tests/test_registrations/test_views.py b/tests/test_registrations/test_views.py index 34c7577540e..034b7b31ae4 100644 --- a/tests/test_registrations/test_views.py +++ b/tests/test_registrations/test_views.py @@ -343,10 +343,6 @@ def test_update_draft_registration_non_admin(self): res = self.app.put(url, json=payload, auth=self.non_admin.auth) assert res.status_code == http_status.HTTP_403_FORBIDDEN - # group admin cannot update draft registration - res = self.app.put(url, json=payload, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN - def test_delete_draft_registration(self): assert 1 == DraftRegistration.objects.filter(deleted__isnull=True).count() url = self.node.api_url_for('delete_draft_registration', draft_id=self.draft._id) @@ -363,10 +359,6 @@ def test_delete_draft_registration_non_admin(self): assert res.status_code == http_status.HTTP_403_FORBIDDEN assert 1 == DraftRegistration.objects.filter(deleted__isnull=True).count() - # group admin cannot delete draft registration - res = self.app.delete(url, auth=self.group_mem.auth) - assert res.status_code == http_status.HTTP_403_FORBIDDEN - @mock.patch('website.archiver.tasks.archive') def test_delete_draft_registration_registered(self, mock_register_draft): self.draft.register(auth=self.auth, save=True) diff --git a/website/templates/project/contributors.mako b/website/templates/project/contributors.mako index d58a04f7bd7..ff3a5194183 100644 --- a/website/templates/project/contributors.mako +++ b/website/templates/project/contributors.mako @@ -63,45 +63,6 @@

    Drag and drop contributors to change listing order.

    % endif -
    - -
    -
    No contributors found
    diff --git a/website/templates/project/project.mako b/website/templates/project/project.mako index bcba7b24ff6..edd88d87d65 100644 --- a/website/templates/project/project.mako +++ b/website/templates/project/project.mako @@ -171,20 +171,6 @@ % endif
    - % if node['groups']: -
    - Groups: - %for i, group_name in enumerate(node['groups']): -
      - % if i == len(node['groups']) - 1: - ${group_name} - % else: - ${group_name}, - % endif -
    - %endfor -
    - % endif % if enable_institutions and not node['anonymous']: % if (permissions.ADMIN in user['permissions'] and not node['is_registration']) and (len(node['institutions']) != 0 or len(user['institutions']) != 0): Affiliated Institutions: diff --git a/website/templates/search.mako b/website/templates/search.mako index 78ec1b10e3d..bad65e38d81 100644 --- a/website/templates/search.mako +++ b/website/templates/search.mako @@ -249,16 +249,6 @@

    - -

    - Groups: - - - -  - - -

    -

    Affiliated institutions: diff --git a/website/templates/util/render_node.mako b/website/templates/util/render_node.mako index 69af32bb624..0c2cb4c51ce 100644 --- a/website/templates/util/render_node.mako +++ b/website/templates/util/render_node.mako @@ -100,11 +100,6 @@

    ${contributor_list.render_contributors(contributors=summary['contributors'], others_count=summary['others_count'], node_url=summary['url'])}
    - % if summary['groups']: -
    - ${summary['groups']} -
    - % endif % else:
    Anonymous Contributors
    % endif From ff7244b0af98575816bdcd7fa237a0196ecd3047 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 1 May 2025 16:10:02 +0300 Subject: [PATCH 004/176] remove osf groups --- api/nodes/serializers.py | 5 ----- api/registrations/serializers.py | 7 ------- api/users/views.py | 1 - api_tests/nodes/views/test_node_files_list.py | 1 - api_tests/nodes/views/test_node_links_detail.py | 1 - api_tests/nodes/views/test_node_links_list.py | 1 - api_tests/nodes/views/test_node_logs.py | 1 - .../nodes/views/test_node_registrations_list.py | 1 - api_tests/users/views/test_user_list.py | 1 - website/search_migration/__init__.py | 17 +---------------- 10 files changed, 1 insertion(+), 35 deletions(-) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index fd67d6f85f7..dfbd95e73e7 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -386,11 +386,6 @@ class NodeSerializer(TaxonomizableSerializerMixin, JSONAPISerializer): related_meta={'count': 'get_forks_count'}, ) - groups = RelationshipField( - related_view='nodes:node-groups', - related_view_kwargs={'node_id': '<_id>'}, - ) - node_links = ShowIfVersion( RelationshipField( related_view='nodes:node-pointers', diff --git a/api/registrations/serializers.py b/api/registrations/serializers.py index 33ffb1bda18..e4f61eda46e 100644 --- a/api/registrations/serializers.py +++ b/api/registrations/serializers.py @@ -302,13 +302,6 @@ class RegistrationSerializer(NodeSerializer): ), ) - groups = HideIfRegistration( - RelationshipField( - related_view='nodes:node-groups', - related_view_kwargs={'node_id': '<_id>'}, - ), - ) - node_links = ShowIfVersion( HideIfWithdrawal( RelationshipField( diff --git a/api/users/views.py b/api/users/views.py index 7228e99546e..3061e67321c 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -11,7 +11,6 @@ from api.addons.views import AddonSettingsMixin from api.base import permissions as base_permissions from api.users.permissions import UserMessagePermissions -from api.base.waffle_decorators import require_flag from api.base.exceptions import Conflict, UserGone, Gone from api.base.filters import ListFilterMixin, PreprintFilterMixin from api.base.parsers import ( diff --git a/api_tests/nodes/views/test_node_files_list.py b/api_tests/nodes/views/test_node_files_list.py index c5d6a475283..c07ce12a217 100644 --- a/api_tests/nodes/views/test_node_files_list.py +++ b/api_tests/nodes/views/test_node_files_list.py @@ -24,7 +24,6 @@ AuthUserFactory, PrivateLinkFactory ) -from osf.utils.permissions import READ from dateutil.parser import parse as parse_date from website import settings from osf.features import ENABLE_GV diff --git a/api_tests/nodes/views/test_node_links_detail.py b/api_tests/nodes/views/test_node_links_detail.py index 24b0050b437..0ad22bd6a78 100644 --- a/api_tests/nodes/views/test_node_links_detail.py +++ b/api_tests/nodes/views/test_node_links_detail.py @@ -8,7 +8,6 @@ RegistrationFactory, AuthUserFactory, ) -from osf.utils.permissions import WRITE, READ from rest_framework import exceptions from tests.utils import assert_latest_log diff --git a/api_tests/nodes/views/test_node_links_list.py b/api_tests/nodes/views/test_node_links_list.py index cfd747cda98..a244dc369db 100644 --- a/api_tests/nodes/views/test_node_links_list.py +++ b/api_tests/nodes/views/test_node_links_list.py @@ -8,7 +8,6 @@ RegistrationFactory, AuthUserFactory ) -from osf.utils.permissions import WRITE, READ from rest_framework import exceptions from tests.utils import assert_latest_log diff --git a/api_tests/nodes/views/test_node_logs.py b/api_tests/nodes/views/test_node_logs.py index 5b44d894917..220fadd1792 100644 --- a/api_tests/nodes/views/test_node_logs.py +++ b/api_tests/nodes/views/test_node_logs.py @@ -10,7 +10,6 @@ RegistrationFactory, EmbargoFactory, ) -from osf.utils.permissions import READ from tests.base import assert_datetime_equal from api_tests.utils import disconnected_from_listeners from website.project.signals import contributor_removed diff --git a/api_tests/nodes/views/test_node_registrations_list.py b/api_tests/nodes/views/test_node_registrations_list.py index 72b58ea58c6..e8861b4ed6e 100644 --- a/api_tests/nodes/views/test_node_registrations_list.py +++ b/api_tests/nodes/views/test_node_registrations_list.py @@ -7,7 +7,6 @@ RegistrationFactory, AuthUserFactory, ) -from osf.utils.permissions import READ def node_url_for(n_id): diff --git a/api_tests/users/views/test_user_list.py b/api_tests/users/views/test_user_list.py index 715ce328b42..23c3b762f97 100644 --- a/api_tests/users/views/test_user_list.py +++ b/api_tests/users/views/test_user_list.py @@ -16,7 +16,6 @@ UserFactory, ProjectFactory, ApiOAuth2ScopeFactory, - RegistrationFactory, Auth, ) from osf.utils.permissions import CREATOR_PERMISSIONS diff --git a/website/search_migration/__init__.py b/website/search_migration/__init__.py index d0dcc8e1a2d..6b673d96f53 100644 --- a/website/search_migration/__init__.py +++ b/website/search_migration/__init__.py @@ -27,22 +27,7 @@ LEFT OUTER JOIN osf_guid AS USER_GUID ON (U.id = USER_GUID.object_id AND (USER_GUID.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser'))) WHERE (CONTRIB.node_id = N.id AND CONTRIB.visible = TRUE)) - , 'groups', (SELECT json_agg(json_build_object( - 'url', '/' || osf_osfgroup._id || '/' - , 'name', osf_osfgroup.name - )) - FROM osf_osfgroup - WHERE osf_osfgroup.id IN ( - SELECT GGOP.content_object_id AS osfgroup_id - FROM osf_osfgroupgroupobjectpermission GGOP - WHERE GGOP.group_id IN ( - SELECT DISTINCT AG.id AS osfgroup_id - FROM auth_group AG - INNER JOIN osf_nodegroupobjectpermission NGOP - ON (AG.id = NGOP.group_id) - WHERE (NGOP.content_object_id = N.id AND UPPER(AG.name::text) LIKE UPPER('%osfgroup_%')) - ) - )) + , 'groups', NULL , 'extra_search_terms', CASE WHEN strpos(N.title, '-') + strpos(N.title, '_') + strpos(N.title, '.') > 0 THEN translate(N.title, '-_.', ' ') From 0d78f9ebecac4090a79c80bf86f6d8509cafdea1 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 1 May 2025 16:50:12 +0300 Subject: [PATCH 005/176] remove osf groups --- .../views/test_node_contributors_and_group_members_list.py | 2 +- api_tests/users/serializers/test_serializers.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py index a3e800204f0..f79bdd56f98 100644 --- a/api_tests/nodes/views/test_node_contributors_and_group_members_list.py +++ b/api_tests/nodes/views/test_node_contributors_and_group_members_list.py @@ -51,7 +51,7 @@ def test_list_and_filter_contributors_and_group_members( res = app.get(url, auth=admin_contributor.auth) assert res.status_code == 200 assert res.content_type == 'application/vnd.api+json' - assert len(res.json['data']) == 5 + assert len(res.json['data']) == 2 expected = { admin_contributor._id, write_contributor._id, diff --git a/api_tests/users/serializers/test_serializers.py b/api_tests/users/serializers/test_serializers.py index f0967890b06..6d311c776b2 100644 --- a/api_tests/users/serializers/test_serializers.py +++ b/api_tests/users/serializers/test_serializers.py @@ -108,7 +108,7 @@ class TestUserSerializer: 'test_related_counts_equal_related_views': [{ 'field_name': 'nodes', 'expected_count': { - 'user': 5, # this counts the private nodes created by RegistrationFactory + 'user': 4, # this counts the private nodes created by RegistrationFactory 'other_user': 1, 'no_auth': 1 }, @@ -181,7 +181,6 @@ def test_user_serializer(self, user): assert 'institutions' in relationships assert 'preprints' in relationships assert 'registrations' in relationships - assert 'groups' in relationships def test_related_counts_equal_related_views(self, request, From c217e0441b869bb4238374b9c7757e7396dfe678 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 1 May 2025 17:19:06 +0300 Subject: [PATCH 006/176] remove osf groups --- .../views/test_draft_registration_list.py | 2 +- .../views/test_user_registrations_list.py | 19 +------------------ 2 files changed, 2 insertions(+), 19 deletions(-) diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index a126cf37d70..2f9f4c31eeb 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -315,7 +315,7 @@ def test_read_only_contributor_cannot_create_draft( assert res.status_code == 403 def test_non_authenticated_user_cannot_create_draft( - self, app, user_write_contrib, payload_alt, group, url_draft_registrations + self, app, user_write_contrib, payload_alt, url_draft_registrations ): res = app.post_json_api( url_draft_registrations, diff --git a/api_tests/users/views/test_user_registrations_list.py b/api_tests/users/views/test_user_registrations_list.py index fd92a9dc057..1ad5d0701ee 100644 --- a/api_tests/users/views/test_user_registrations_list.py +++ b/api_tests/users/views/test_user_registrations_list.py @@ -116,12 +116,11 @@ def reg_project_private_group_member(self, user_one, project_private_group_membe is_private=True) def test_user_registrations( - self, app, user_one, user_two, group_member, + self, app, user_one, user_two, reg_project_public_user_one, reg_project_public_user_two, reg_project_private_user_one, reg_project_private_user_two, - reg_project_private_group_member, folder, folder_deleted, project_deleted_user_one): @@ -179,22 +178,6 @@ def test_user_registrations( assert folder_deleted._id not in ids assert project_deleted_user_one._id not in ids - # test_get_registrations_logged_in_group_member - url = f'/{API_BASE}users/{group_member._id}/registrations/' - res = app.get(url, auth=group_member.auth) - node_json = res.json['data'] - - ids = [each['id'] for each in node_json] - assert reg_project_public_user_one._id not in ids - assert reg_project_private_user_one._id not in ids - assert reg_project_public_user_two._id not in ids - assert reg_project_private_user_two._id not in ids - assert folder._id not in ids - assert folder_deleted._id not in ids - assert project_deleted_user_one._id not in ids - # project group members not copied to registration. - assert reg_project_private_group_member not in ids - class TestRegistrationListFiltering( RegistrationListFilteringMixin, From e1eb72af265dbb5ec562d723497c5ec38a799158 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Wed, 7 May 2025 17:10:26 +0300 Subject: [PATCH 007/176] Remove Meetings, Comments and OSF Groups Notifications --- admin/templates/base.html | 3 - .../transfer_quickfiles_to_projects.py | 16 - osf/models/comment.py | 5 - osf_tests/test_comment.py | 49 -- tests/test_notifications.py | 642 +----------------- website/conferences/signals.py | 5 - website/mails/listeners.py | 19 - website/mails/mails.py | 20 - website/notifications/constants.py | 4 - website/project/signals.py | 2 - website/project/views/comment.py | 67 -- website/settings/defaults.py | 3 - website/signals.py | 2 - .../emails/comment_replies.html.mako | 17 - website/templates/emails/comments.html.mako | 19 - .../emails/conference_deprecation.html.mako | 17 - .../emails/conference_failed.html.mako | 16 - .../emails/conference_inactive.html.mako | 15 - .../emails/conference_submitted.html.mako | 34 - .../emails/confirm_agu_conference.html.mako | 26 - .../confirm_agu_conference_2023.html.mako | 25 - .../emails/group_added_to_node.html.mako | 23 - .../emails/group_member_added.html.mako | 24 - .../group_member_unregistered_added.html.mako | 24 - .../emails/quickfiles_migrated.html.mako | 31 - 25 files changed, 4 insertions(+), 1104 deletions(-) delete mode 100644 website/conferences/signals.py delete mode 100644 website/templates/emails/comment_replies.html.mako delete mode 100644 website/templates/emails/comments.html.mako delete mode 100644 website/templates/emails/conference_deprecation.html.mako delete mode 100644 website/templates/emails/conference_failed.html.mako delete mode 100644 website/templates/emails/conference_inactive.html.mako delete mode 100644 website/templates/emails/conference_submitted.html.mako delete mode 100644 website/templates/emails/confirm_agu_conference.html.mako delete mode 100644 website/templates/emails/confirm_agu_conference_2023.html.mako delete mode 100644 website/templates/emails/group_added_to_node.html.mako delete mode 100644 website/templates/emails/group_member_added.html.mako delete mode 100644 website/templates/emails/group_member_unregistered_added.html.mako delete mode 100644 website/templates/emails/quickfiles_migrated.html.mako diff --git a/admin/templates/base.html b/admin/templates/base.html index a4e6d3b52e3..e6f10794c29 100644 --- a/admin/templates/base.html +++ b/admin/templates/base.html @@ -288,9 +288,6 @@ {% endif %} {% endif %} - {% if perms.osf.view_conference %} -
  • Meetings
  • - {% endif %} {% if perms.osf.view_metrics %}
  • Metrics
  • {% endif %} diff --git a/osf/management/commands/transfer_quickfiles_to_projects.py b/osf/management/commands/transfer_quickfiles_to_projects.py index c4f5a8450f1..4d26eb02f4a 100644 --- a/osf/management/commands/transfer_quickfiles_to_projects.py +++ b/osf/management/commands/transfer_quickfiles_to_projects.py @@ -16,11 +16,9 @@ ) from osf.models.base import generate_guid from osf.models.quickfiles import get_quickfiles_project_title -from osf.models.queued_mail import QueuedMail from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField from addons.osfstorage.models import OsfStorageFile -from website import mails, settings from django.contrib.contenttypes.models import ContentType logger = logging.getLogger(__name__) @@ -65,7 +63,6 @@ def remove_quickfiles(): logger.info(f'Created {len(guids)} Guids') node_logs = [] - queued_mail = [] pbar = tqdm(total=target_count) for node in quick_files_nodes: node_logs.append(NodeLog( @@ -75,17 +72,6 @@ def remove_quickfiles(): params={'node': node._id}, action=NodeLog.MIGRATED_QUICK_FILES )) - queued_mail.append(QueuedMail( - user=node.creator, - to_addr=node.creator.email, - send_at=QUICKFILES_DATE, - email_type=mails.QUICKFILES_MIGRATED.tpl_prefix, - data=dict( - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - quickfiles_link=node.absolute_url - ) - )) node.logs.update( params=Func( F('params'), @@ -100,8 +86,6 @@ def remove_quickfiles(): logger.info('Updated logs') NodeLog.objects.bulk_create(node_logs) logger.info(f'Created {len(node_logs)} logs') - QueuedMail.objects.bulk_create(queued_mail) - logger.info(f'Created {len(queued_mail)} mails') quick_files_nodes.update(description=QUICKFILES_DESC, type='osf.node') logger.info(f'Projectified {target_count} QuickFilesNodes') diff --git a/osf/models/comment.py b/osf/models/comment.py index 586763956ee..032085cd0e9 100644 --- a/osf/models/comment.py +++ b/osf/models/comment.py @@ -13,7 +13,6 @@ from framework.exceptions import PermissionsError from website import settings from website.util import api_v2_url -from website.project import signals as project_signals from website.project.model import get_valid_mentioned_users_guids @@ -164,7 +163,6 @@ def create(cls, auth, **kwargs): comment.save() new_mentions = get_valid_mentioned_users_guids(comment, comment.node.contributors_and_group_members) if new_mentions: - project_signals.mention_added.send(comment, new_mentions=new_mentions, auth=auth) comment.ever_mentioned.add(*comment.node.contributors.filter(guids___id__in=new_mentions)) comment.save() @@ -177,8 +175,6 @@ def create(cls, auth, **kwargs): ) comment.node.save() - project_signals.comment_added.send(comment, auth=auth, new_mentions=new_mentions) - return comment def edit(self, content, auth, save=False): @@ -198,7 +194,6 @@ def edit(self, content, auth, save=False): if save: if new_mentions: - project_signals.mention_added.send(self, new_mentions=new_mentions, auth=auth) self.ever_mentioned.add(*self.node.contributors.filter(guids___id__in=new_mentions)) self.save() self.node.add_log( diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py index 4b2c4a91acf..7f247d403d5 100644 --- a/osf_tests/test_comment.py +++ b/osf_tests/test_comment.py @@ -14,7 +14,6 @@ from website import settings from addons.osfstorage import settings as osfstorage_settings from website.project.views.comment import update_file_guid_referent -from website.project.signals import comment_added, mention_added from framework.exceptions import PermissionsError from tests.base import capture_signals from osf.models import Comment, NodeLog, Guid, BaseFileNode @@ -219,36 +218,12 @@ class TestCommentModel: ] create_cases = [ - # Make sure valid mentions send signals - { - 'comment_content': comment_mention_valid, - 'expected_signals': {comment_added, mention_added}, - 'expected_error_msg': None, - }, - # User mentions a contributor - { - 'comment_content': comment_contributor_mentioned, - 'expected_signals': {comment_added, mention_added}, - 'expected_error_msg': None, - }, # Make sure comments aren't NoneType { 'comment_content': None, 'expected_signals': set(), 'expected_error_msg': "{'content': ['This field cannot be null.']}", }, - # User makes valid comment - { - 'comment_content': comment_valid, - 'expected_signals': {comment_added}, - 'expected_error_msg': None, - }, - # User mentions themselves - { - 'comment_content': comment_self_mentioned, - 'expected_signals': {comment_added, mention_added}, - 'expected_error_msg': None, - }, # Prevent user from entering a comment that's too long with a mention { 'comment_content': comment_too_long_with_mention, @@ -257,41 +232,17 @@ class TestCommentModel: }, ] edit_cases = [ - # Send if mention is valid - { - 'comment_content': comment_mention_valid, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, - }, - # User mentions a contributor - { - 'comment_content': comment_contributor_mentioned, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, - }, # User edits valid comment { 'comment_content': comment_valid, 'expected_signals': set(), 'expected_error_msg': None, }, - # User mentions themselves - { - 'comment_content': comment_self_mentioned, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, - }, # Don't send mention if already mentioned { 'comment_content': comment_mention_edited_twice, 'expected_signals': set(), 'expected_error_msg': None, - }, - # Send mention if already mentioned - { - 'comment_content': comment_mention_project_with_contributor, - 'expected_signals': {mention_added}, - 'expected_error_msg': None, } ] params = { diff --git a/tests/test_notifications.py b/tests/test_notifications.py index b52190ca999..ec7f0106dfe 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -109,9 +109,8 @@ def test_new_project_creator_is_subscribed(self): user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] - assert len(user_subscriptions) == 2 # subscribed to both file_updated and comments + assert len(user_subscriptions) == 1 # subscribed to file_updated assert 'file_updated' in event_types - assert 'comments' in event_types def test_new_node_creator_is_not_subscribed(self): user = factories.UserFactory() @@ -123,70 +122,34 @@ def test_new_node_creator_is_not_subscribed(self): def test_new_project_creator_is_subscribed_with_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - factories.NotificationSubscriptionFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'none') - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_digest') - node = factories.ProjectFactory(creator=user) user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') assert len(user_subscriptions) == 5 # subscribed to both node and user settings assert 'file_updated' in event_types - assert 'comments' in event_types assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_mentions' in event_types assert file_updated_subscription.none.count() == 1 assert file_updated_subscription.email_transactional.count() == 0 - assert comments_subscription.email_digest.count() == 1 - assert comments_subscription.email_transactional.count() == 0 def test_new_node_creator_is_not_subscribed_with_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - factories.NotificationSubscriptionFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'none') - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.NodeFactory(creator=user) user_subscriptions = list(utils.get_all_user_subscriptions(user)) @@ -194,9 +157,6 @@ def test_new_node_creator_is_not_subscribed_with_global_settings(self): assert len(user_subscriptions) == 4 # subscribed to only user settings assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types def test_subscribe_user_to_global_notfiications(self): user = factories.UserFactory() @@ -213,128 +173,64 @@ def test_subscribe_user_to_registration_notifications(self): def test_new_project_creator_is_subscribed_with_default_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.ProjectFactory(creator=user) user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') assert len(user_subscriptions) == 6 # subscribed to both node and user settings assert 'file_updated' in event_types - assert 'comments' in event_types assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types assert file_updated_subscription.email_transactional.count() == 1 - assert comments_subscription.email_transactional.count() == 1 def test_new_fork_creator_is_subscribed_with_default_global_settings(self): user = factories.UserFactory() project = factories.ProjectFactory(creator=user) - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.ForkFactory(project=project) user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] node_file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - node_comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') project_file_updated_subscription = NotificationSubscription.objects.get(_id=project._id + '_file_updated') - project_comments_subscription = NotificationSubscription.objects.get(_id=project._id + '_comments') assert len(user_subscriptions) == 7 # subscribed to project, fork, and user settings assert 'file_updated' in event_types - assert 'comments' in event_types assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_mentions' in event_types assert node_file_updated_subscription.email_transactional.count() == 1 - assert node_comments_subscription.email_transactional.count() == 1 assert project_file_updated_subscription.email_transactional.count() == 1 - assert project_comments_subscription.email_transactional.count() == 1 def test_new_node_creator_is_not_subscribed_with_default_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.NodeFactory(creator=user) user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] - assert len(user_subscriptions) == 4 # subscribed to only user settings + assert len(user_subscriptions) == 1 # subscribed to only user settings assert 'global_file_updated' in event_types - assert 'global_comments' in event_types - assert 'global_comment_replies' in event_types - assert 'global_mentions' in event_types def test_contributor_subscribed_when_added_to_project(self): @@ -345,20 +241,13 @@ def test_contributor_subscribed_when_added_to_project(self): contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) event_types = [sub.event_name for sub in contributor_subscriptions] - assert len(contributor_subscriptions) == 2 + assert len(contributor_subscriptions) == 1 assert 'file_updated' in event_types - assert 'comments' in event_types def test_contributor_subscribed_when_added_to_component(self): user = factories.UserFactory() contributor = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=contributor._id + '_' + 'global_comments', - user=contributor, - event_name='global_comments' - ).add_user_to_subscription(contributor, 'email_transactional') - factories.NotificationSubscriptionFactory( _id=contributor._id + '_' + 'global_file_updated', user=contributor, @@ -372,15 +261,11 @@ def test_contributor_subscribed_when_added_to_component(self): event_types = [sub.event_name for sub in contributor_subscriptions] file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') assert len(contributor_subscriptions) == 4 # subscribed to both node and user settings assert 'file_updated' in event_types - assert 'comments' in event_types assert 'global_file_updated' in event_types - assert 'global_comments' in event_types assert file_updated_subscription.email_transactional.count() == 1 - assert comments_subscription.email_transactional.count() == 1 def test_unregistered_contributor_not_subscribed_when_added_to_project(self): user = factories.AuthUserFactory() @@ -397,163 +282,6 @@ def test_unregistered_contributor_not_subscribed_when_added_to_project(self): assert len(contributor_subscriptions) == 0 -class TestSubscriptionView(OsfTestCase): - - def setUp(self): - super().setUp() - self.node = factories.NodeFactory() - self.user = self.node.creator - self.registration = factories.RegistrationFactory(creator=self.user) - - def test_create_new_subscription(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) - - # check that user was added to notification_type field - assert payload['id'] == s.owner._id - assert payload['event'] == s.event_name - assert self.node.creator in getattr(s, payload['notification_type']).all() - - # change subscription - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - assert not self.node.creator in getattr(s, payload['notification_type']).all() - assert self.node.creator in getattr(s, new_payload['notification_type']).all() - - def test_cannot_create_registration_subscription(self): - payload = { - 'id': self.registration._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - res = self.app.post(url, json=payload, auth=self.registration.creator.auth) - assert res.status_code == 400 - - def test_adopt_parent_subscription_default(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - event_id = self.node._id + '_' + 'comments' - # confirm subscription was created because parent had default subscription - s = NotificationSubscription.objects.filter(_id=event_id).count() - assert 0 == s - - def test_change_subscription_to_adopt_parent_subscription_removes_user(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) - - # change subscription to adopt_parent - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - - # assert that user is removed from the subscription entirely - for n in constants.NOTIFICATION_TYPES: - assert not self.node.creator in getattr(s, n).all() - - def test_configure_subscription_adds_node_id_to_notifications_configured(self): - project = factories.ProjectFactory(creator=self.user) - assert not project._id in self.user.notifications_configured - payload = { - 'id': project._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=project.creator.auth) - - self.user.reload() - - assert project._id in self.user.notifications_configured - - -class TestRemoveContributor(OsfTestCase): - - def setUp(self): - super(OsfTestCase, self).setUp() - self.project = factories.ProjectFactory() - self.contributor = factories.UserFactory() - self.project.add_contributor(contributor=self.contributor, permissions=permissions.READ) - self.project.save() - - self.subscription = NotificationSubscription.objects.get( - node=self.project, - _id=self.project._id + '_comments' - ) - - self.node = factories.NodeFactory(parent=self.project) - self.node.add_contributor(contributor=self.project.creator, permissions=permissions.ADMIN) - self.node.save() - - self.node_subscription = NotificationSubscription.objects.get( - _id=self.node._id + '_comments', - node=self.node - ) - self.node_subscription.add_user_to_subscription(self.node.creator, 'email_transactional') - - def test_removed_non_admin_contributor_is_removed_from_subscriptions(self): - assert self.contributor in self.subscription.email_transactional.all() - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert self.contributor not in self.project.contributors.all() - self.subscription.reload() - assert self.contributor not in self.subscription.email_transactional.all() - - def test_removed_non_parent_admin_contributor_is_removed_from_subscriptions(self): - assert self.node.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.node.creator, auth=Auth(self.node.creator)) - assert self.node.creator not in self.node.contributors.all() - self.node_subscription.reload() - assert self.node.creator not in self.node_subscription.email_transactional.all() - - def test_removed_contributor_admin_on_parent_not_removed_from_node_subscription(self): - # Admin on parent project is removed as a contributor on a component. Check - # that admin is not removed from component subscriptions, as the admin - # now has read-only access. - assert self.project.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.project.creator, auth=Auth(self.project.creator)) - assert self.project.creator not in self.node.contributors.all() - assert self.project.creator in self.node_subscription.email_transactional.all() - - def test_remove_contributor_signal_called_when_contributor_is_removed(self): - with capture_signals() as mock_signals: - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert mock_signals.signals_sent() == {contributor_removed} - - class TestRemoveNodeSignal(OsfTestCase): def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): @@ -973,33 +701,6 @@ def test_format_user_subscriptions(self): }, 'kind': 'event', 'children': [] - }, { - 'event': { - 'title': 'global_comment_replies', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_comments', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_mentions', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_mentions'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] }, { 'event': { 'title': 'global_reviews', @@ -1056,102 +757,6 @@ def test_format_data_user_settings(self): }] assert data == expected - def test_serialize_user_level_event(self): - user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)] - user_subscription = None - for subscription in user_subscriptions: - if 'global_comment_replies' in getattr(subscription, 'event_name'): - user_subscription = subscription - data = utils.serialize_event(self.user, event_description='global_comment_replies', - subscription=user_subscription) - expected = { - 'event': { - 'title': 'global_comment_replies', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - assert data == expected - - def test_serialize_node_level_event(self): - node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)] - data = utils.serialize_event(user=self.user, event_description='comments', - subscription=node_subscriptions[0], node=self.node) - expected = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - assert data == expected - - def test_serialize_node_level_event_that_adopts_parent_settings(self): - user = factories.UserFactory() - self.project.add_contributor(contributor=user, permissions=permissions.READ) - self.project.save() - self.node.add_contributor(contributor=user, permissions=permissions.READ) - self.node.save() - - # set up how it was in original test - remove existing subscriptions - node_subscriptions = utils.get_all_node_subscriptions(user, self.node) - for subscription in node_subscriptions: - subscription.remove_user_from_subscription(user) - - node_subscriptions = utils.get_all_node_subscriptions(user, self.node) - data = utils.serialize_event(user=user, event_description='comments', - subscription=node_subscriptions, node=self.node) - expected = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'adopt_parent', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - assert data == expected - - -class TestNotificationsDict(OsfTestCase): - def test_notifications_dict_add_message_returns_proper_format(self): - d = utils.NotificationsDict() - message = { - 'message': 'Freddie commented on your project', - 'timestamp': timezone.now() - } - message2 = { - 'message': 'Mercury commented on your component', - 'timestamp': timezone.now() - } - - d.add_message(['project'], message) - d.add_message(['project', 'node'], message2) - - expected = { - 'messages': [], - 'children': collections.defaultdict( - utils.NotificationsDict, { - 'project': { - 'messages': [message], - 'children': collections.defaultdict(utils.NotificationsDict, { - 'node': { - 'messages': [message2], - 'children': collections.defaultdict(utils.NotificationsDict, {}) - } - }) - } - } - )} - assert d == expected - class TestCompileSubscriptions(NotificationTestCase): def setUp(self): @@ -1407,228 +1012,6 @@ def setUp(self): super().setUp() self.user = factories.AuthUserFactory() self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_' + 'comments', - node=self.project, - event_name='comments' - ) - self.project_subscription.save() - self.project_subscription.email_transactional.add(self.project.creator) - self.project_subscription.save() - - self.node = factories.NodeFactory(parent=self.project) - self.node_subscription = factories.NotificationSubscriptionFactory( - _id=self.node._id + '_comments', - node=self.node, - event_name='comments' - ) - self.node_subscription.save() - self.user_subscription = factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comment_replies', - node=self.node, - event_name='global_comment_replies' - ) - self.user_subscription.email_transactional.add(self.user) - self.user_subscription.save() - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_no_subscription(self, mock_store): - node = factories.ProjectFactory() - user = factories.AuthUserFactory() - emails.notify('comments', user=user, node=node, timestamp=timezone.now()) - assert not mock_store.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_no_subscribers(self, mock_store): - node = factories.NodeFactory() - node_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_comments', - node=node, - event_name='comments' - ) - node_subscription.save() - emails.notify('comments', user=self.user, node=node, timestamp=timezone.now()) - assert not mock_store.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_with_correct_args(self, mock_store): - time_now = timezone.now() - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now) - assert mock_store.called - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', self.user, - self.node, time_now) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_to_exclude(self, mock_store): - time_now = timezone.now() - context = {'exclude':[self.project.creator._id]} - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, **context) - assert mock_store.call_count == 0 - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store): - node = factories.NodeFactory() - user = factories.UserFactory() - node_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_comments', - node=node, - event_name='comments' - ) - node_subscription.save() - node_subscription.none.add(user) - node_subscription.save() - sent = emails.notify('comments', user=user, node=node, timestamp=timezone.now()) - assert not mock_store.called - assert sent == [] - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_not_send_to_mentioned_users_subscribed_to_none(self, mock_store): - node = factories.NodeFactory() - user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'none') - time_now = timezone.now() - sent = emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert not mock_store.called - assert sent == [] - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_send_to_mentioned_users(self, mock_store): - user = factories.UserFactory() - factories.NotificationSubscriptionFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.ProjectFactory(creator=user) - time_now = timezone.now() - emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert mock_store.called - mock_store.assert_called_with( - [node.creator._id], - 'email_transactional', - 'global_mentions', - user, - node, - time_now, - template=None, - new_mentions=[node.creator._id], - is_creator=(user == node.creator), - ) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_reply_event_if_comment_is_direct_reply(self, mock_store): - time_now = timezone.now() - emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, target_user=self.project.creator) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comment_replies', - self.user, self.node, time_now, target_user=self.project.creator) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_reply_when_target_user_is_subscribed_via_user_settings(self, mock_store): - time_now = timezone.now() - emails.notify('global_comment_replies', user=self.project.creator, node=self.node, timestamp=time_now, target_user=self.user) - mock_store.assert_called_with([self.user._id], 'email_transactional', 'comment_replies', - self.project.creator, self.node, time_now, target_user=self.user) - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply(self, mock_store): - user = factories.UserFactory() - time_now = timezone.now() - emails.notify('comments', user=user, node=self.node, timestamp=time_now, target_user=user) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, - self.node, time_now, target_user=user) - - @mock.patch('website.mails.send_mail') - @mock.patch('website.notifications.emails.store_emails') - def test_notify_does_not_send_comment_if_they_reply_to_their_own_comment(self, mock_store, mock_send_mail): - time_now = timezone.now() - emails.notify('comments', user=self.project.creator, node=self.project, timestamp=time_now, - target_user=self.project.creator) - assert not mock_store.called - assert not mock_send_mail.called - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply_on_component(self, mock_store): - # Test that comment replies on components that are not direct replies to the subscriber use the - # "comments" email template. - user = factories.UserFactory() - time_now = timezone.now() - emails.notify('comments', user, self.node, time_now, target_user=user) - mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user, - self.node, time_now, target_user=user) - - def test_check_node_node_none(self): - subs = emails.check_node(None, 'comments') - assert subs == {'email_transactional': [], 'email_digest': [], 'none': []} - - def test_check_node_one(self): - subs = emails.check_node(self.project, 'comments') - assert subs == {'email_transactional': [self.project.creator._id], 'email_digest': [], 'none': []} - - @mock.patch('website.project.views.comment.notify') - def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user(self, mock_notify): - # user subscribed to comment replies - user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( - _id=user._id + '_comments', - user=user, - event_name='comment_replies' - ) - user_subscription.email_transactional.add(user) - user_subscription.save() - - # user is not subscribed to project comment notifications - project = factories.ProjectFactory() - - # user comments on project - target = factories.CommentFactory(node=project, user=user) - content = 'hammer to fall' - - # reply to user (note: notify is called from Comment.create) - reply = Comment.create( - auth=Auth(project.creator), - user=project.creator, - node=project, - content=content, - target=Guid.load(target._id), - root_target=Guid.load(project._id), - ) - assert mock_notify.called - assert mock_notify.call_count == 2 - - @mock.patch('website.project.views.comment.notify') - def test_check_user_comment_reply_only_calls_once(self, mock_notify): - # user subscribed to comment replies - user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( - _id=user._id + '_comments', - user=user, - event_name='comment_replies' - ) - user_subscription.email_transactional.add(user) - user_subscription.save() - - project = factories.ProjectFactory() - - # user comments on project - target = factories.CommentFactory(node=project, user=user) - content = 'P-Hacking: A user guide' - - mock_notify.return_value = [user._id] - # reply to user (note: notify is called from Comment.create) - reply = Comment.create( - auth=Auth(project.creator), - user=project.creator, - node=project, - content=content, - target=Guid.load(target._id), - root_target=Guid.load(project._id), - ) - assert mock_notify.called - assert mock_notify.call_count == 1 def test_get_settings_url_for_node(self): url = emails.get_settings_url(self.project._id, self.user) @@ -1757,15 +1140,6 @@ def test_group_notifications_by_user_transactional(self): def test_group_notifications_by_user_digest(self): send_type = 'email_digest' - d = factories.NotificationDigestFactory( - user=self.user_1, - send_type=send_type, - event='comment_replies', - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d.save() d2 = factories.NotificationDigestFactory( user=self.user_2, send_type=send_type, @@ -1784,14 +1158,6 @@ def test_group_notifications_by_user_digest(self): d3.save() user_groups = list(get_users_emails(send_type)) expected = [ - { - 'user_id': str(self.user_1._id), - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': str(d._id) - }] - }, { 'user_id': str(self.user_2._id), 'info': [{ @@ -1804,7 +1170,7 @@ def test_group_notifications_by_user_digest(self): assert len(user_groups) == 2 assert user_groups == expected - digest_ids = [d._id, d2._id, d3._id] + digest_ids = [d2._id, d3._id] remove_notifications(email_notification_ids=digest_ids) @mock.patch('website.mails.send_mail') diff --git a/website/conferences/signals.py b/website/conferences/signals.py deleted file mode 100644 index ef4459bcf16..00000000000 --- a/website/conferences/signals.py +++ /dev/null @@ -1,5 +0,0 @@ -import blinker - -signals = blinker.Namespace() - -osf4m_user_created = signals.signal('osf4m-user-created') diff --git a/website/mails/listeners.py b/website/mails/listeners.py index 8304559d9ba..3f411d52f87 100644 --- a/website/mails/listeners.py +++ b/website/mails/listeners.py @@ -7,7 +7,6 @@ from website import settings from framework.auth import signals as auth_signals from website.project import signals as project_signals -from website.conferences import signals as conference_signals @auth_signals.unconfirmed_user_created.connect @@ -43,21 +42,3 @@ def queue_first_public_project_email(user, node, meeting_creation): project_title=node.title, osf_support_email=settings.OSF_SUPPORT_EMAIL, ) - -@conference_signals.osf4m_user_created.connect -def queue_osf4m_welcome_email(user, conference, node): - """Queue an email once a new user is created for OSF Meetings""" - from osf.models.queued_mail import queue_mail, WELCOME_OSF4M - root = (node.get_addon('osfstorage')).get_root() - root_children = [child for child in root.children if child.is_file] - queue_mail( - to_addr=user.username, - mail=WELCOME_OSF4M, - send_at=timezone.now() + settings.WELCOME_OSF4M_WAIT_TIME, - user=user, - conference=conference.name, - fullname=user.fullname, - fid=root_children[0]._id if len(root_children) else None, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - domain=settings.DOMAIN, - ) diff --git a/website/mails/mails.py b/website/mails/mails.py index 61c466fdfb8..ab632e780ec 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -511,21 +511,6 @@ def get_english_article(word): subject='Your ${document_type} has been withdrawn', ) -GROUP_MEMBER_ADDED = Mail( - 'group_member_added', - subject='You have been added as a ${permission} of the group ${group_name}', -) - -GROUP_MEMBER_UNREGISTERED_ADDED = Mail( - 'group_member_unregistered_added', - subject='You have been added as a ${permission} of the group ${group_name}', -) - -GROUP_ADDED_TO_NODE = Mail( - 'group_added_to_node', - subject='Your group, ${group_name}, has been added to an OSF Project' -) - WITHDRAWAL_REQUEST_DECLINED = Mail( 'withdrawal_request_declined', subject='Your withdrawal request has been declined', @@ -599,11 +584,6 @@ def get_english_article(word): subject='The updates for ${resource_type} ${title} were not accepted' ) -QUICKFILES_MIGRATED = Mail( - 'quickfiles_migrated', - subject='Your Quick Files have moved' -) - ADDONS_BOA_JOB_COMPLETE = Mail( 'addons_boa_job_complete', subject='Your Boa job has completed' diff --git a/website/notifications/constants.py b/website/notifications/constants.py index 4068367c505..ce3c9db4315 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -1,5 +1,4 @@ NODE_SUBSCRIPTIONS_AVAILABLE = { - 'comments': 'Comments added', 'file_updated': 'Files updated' } @@ -7,10 +6,7 @@ # subscription. If no notification type has been assigned, the user subscription # will default to 'email_transactional'. USER_SUBSCRIPTIONS_AVAILABLE = { - 'global_comment_replies': 'Replies to your comments', - 'global_comments': 'Comments added', 'global_file_updated': 'Files updated', - 'global_mentions': 'Mentions added', 'global_reviews': 'Preprint submissions updated' } diff --git a/website/project/signals.py b/website/project/signals.py index 70f25418c5a..1b8b0222b88 100644 --- a/website/project/signals.py +++ b/website/project/signals.py @@ -1,8 +1,6 @@ import blinker signals = blinker.Namespace() -comment_added = signals.signal('comment-added') -mention_added = signals.signal('mention-added') contributor_added = signals.signal('contributor-added') project_created = signals.signal('project-created') contributor_removed = signals.signal('contributor-removed') diff --git a/website/project/views/comment.py b/website/project/views/comment.py index eb8d6b16271..5e274052f18 100644 --- a/website/project/views/comment.py +++ b/website/project/views/comment.py @@ -9,11 +9,8 @@ from addons.base.signals import file_updated from osf.models import BaseFileNode, TrashedFileNode from osf.models import Comment -from website.notifications.constants import PROVIDERS -from website.notifications.emails import notify, notify_mentions from website.project.decorators import must_be_contributor_or_public from osf.models import Node -from website.project.signals import comment_added, mention_added @file_updated.connect @@ -107,70 +104,6 @@ def render_email_markdown(content): return markdown.markdown(content, extensions=['markdown_del_ins', 'markdown.extensions.tables', 'markdown.extensions.fenced_code']) -@comment_added.connect -def send_comment_added_notification(comment, auth, new_mentions=None): - if not new_mentions: - new_mentions = [] - node = comment.node - target = comment.target - - context = dict( - profile_image_url=auth.user.profile_image_url(), - content=render_email_markdown(comment.content), - page_type=comment.get_comment_page_type(), - page_title=comment.get_comment_page_title(), - provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '', - target_user=target.referent.user if is_reply(target) else None, - parent_comment=target.referent.content if is_reply(target) else '', - url=comment.get_comment_page_url(), - exclude=new_mentions, - ) - time_now = timezone.now() - sent_subscribers = notify( - event='comments', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - if is_reply(target): - if target.referent.user and target.referent.user._id not in sent_subscribers: - notify( - event='global_comment_replies', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - -@mention_added.connect -def send_mention_added_notification(comment, new_mentions, auth): - node = comment.node - target = comment.target - - context = dict( - profile_image_url=auth.user.profile_image_url(), - content=render_email_markdown(comment.content), - page_type='file' if comment.page == Comment.FILES else node.project_or_component, - page_title=comment.root_target.referent.name if comment.page == Comment.FILES else '', - provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '', - target_user=target.referent.user if is_reply(target) else None, - parent_comment=target.referent.content if is_reply(target) else '', - new_mentions=new_mentions, - url=comment.get_comment_page_url() - ) - time_now = timezone.now() - notify_mentions( - event='global_mentions', - user=auth.user, - node=node, - timestamp=time_now, - **context - ) - - def is_reply(target): return isinstance(target.referent, Comment) diff --git a/website/settings/defaults.py b/website/settings/defaults.py index d891e886873..afa3698c73b 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -295,9 +295,6 @@ def parent_dir(path): # Seconds before another notification email can be sent to a contributor when added to a project CONTRIBUTOR_ADDED_EMAIL_THROTTLE = 24 * 3600 -# Seconds before another notification email can be sent to a member when added to an OSFGroup -GROUP_MEMBER_ADDED_EMAIL_THROTTLE = 24 * 3600 - # Seconds before another notification email can be sent to group members when added to a project GROUP_CONNECTED_EMAIL_THROTTLE = 24 * 3600 diff --git a/website/signals.py b/website/signals.py index c1b8660dcd4..990610565f7 100644 --- a/website/signals.py +++ b/website/signals.py @@ -8,8 +8,6 @@ ALL_SIGNALS = [ # TODO: Fix - project.comment_added, - project.mention_added, project.unreg_contributor_added, project.contributor_added, project.contributor_removed, diff --git a/website/templates/emails/comment_replies.html.mako b/website/templates/emails/comment_replies.html.mako deleted file mode 100644 index ab0cc25a0d1..00000000000 --- a/website/templates/emails/comment_replies.html.mako +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - -
    avatar - ${user.fullname} - replied to your comment "${parent_comment}" on your ${provider + ' ' if page_type == 'file' else ''}${page_type} - %if page_type == 'file' or page_type == 'wiki': - ${page_title} - %endif - at ${localized_timestamp}: - ${content} -
    diff --git a/website/templates/emails/comments.html.mako b/website/templates/emails/comments.html.mako deleted file mode 100644 index 2537189bb09..00000000000 --- a/website/templates/emails/comments.html.mako +++ /dev/null @@ -1,19 +0,0 @@ -<% from osf.models import OSFUser %> - - - - - - - -
    avatar - ${user.fullname} - commented on your ${provider + ' ' if page_type == 'file' else ''}${page_type} - %if page_type == 'file' or page_type == 'wiki': - ${page_title} - %endif - at ${localized_timestamp}: - ${content} -
    diff --git a/website/templates/emails/conference_deprecation.html.mako b/website/templates/emails/conference_deprecation.html.mako deleted file mode 100644 index 4453c4db36d..00000000000 --- a/website/templates/emails/conference_deprecation.html.mako +++ /dev/null @@ -1,17 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - You recently attempted to interact with the Meeting service via email, but this service has been discontinued and is no longer available for new interactions.
    -
    - Existing meetings and past submissions remain unchanged. If you have any questions or need further assistance, please contact our support team at [ ${support_email} ].
    -
    - Sincerely yours,
    -
    - The OSF Robot
    - - - \ No newline at end of file diff --git a/website/templates/emails/conference_failed.html.mako b/website/templates/emails/conference_failed.html.mako deleted file mode 100644 index c64e44f210e..00000000000 --- a/website/templates/emails/conference_failed.html.mako +++ /dev/null @@ -1,16 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - You recently tried to create a project on the Open Science Framework via email, but your message did not contain any file attachments. Please try again, making sure to attach the files you'd like to upload to your message.
    -
    - - Sincerely yours,
    -
    - The OSF Robot
    - - - diff --git a/website/templates/emails/conference_inactive.html.mako b/website/templates/emails/conference_inactive.html.mako deleted file mode 100644 index f5547a50b06..00000000000 --- a/website/templates/emails/conference_inactive.html.mako +++ /dev/null @@ -1,15 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - You recently tried to create a project on the Open Science Framework via email, but the conference you attempted to submit to is not currently accepting new submissions. For a list of conferences, see [ ${presentations_url} ].
    -
    - Sincerely yours,
    -
    - The OSF Robot
    - - - diff --git a/website/templates/emails/conference_submitted.html.mako b/website/templates/emails/conference_submitted.html.mako deleted file mode 100644 index 60f190cf353..00000000000 --- a/website/templates/emails/conference_submitted.html.mako +++ /dev/null @@ -1,34 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${fullname},
    -
    - Congratulations! You have successfully added your ${conf_full_name} ${presentation_type} to OSF.
    -
    - % if user_created: - Your account on OSF has been created. To claim your account, please create a password by clicking here: ${set_password_url}. Please verify your profile information at: ${profile_url}.
    -
    - % endif - You now have a permanent, citable URL, that you can share: ${node_url}. All submissions for ${conf_full_name} may be viewed at the following link: ${conf_view_url}.
    -
    - % if is_spam: - Your email was flagged as spam by our mail processing service. To prevent potential spam, we have made your project private. If this is a real project, please log in to your account, browse to your project, and click the "Make Public" button so that other users can view it.
    -
    - % endif - Get more from OSF by enhancing your project with the following:
    -
    - * Collaborators/contributors to the submission
    - * Charts, graphs, and data that didn't make it onto the submission
    - * Links to related publications or reference lists
    - * Connecting other accounts, like Dropbox, Google Drive, GitHub, figshare and Mendeley via add-on integration. Learn more and read the full list of available add-ons here.
    -
    - To learn more about OSF, read the Guides.
    -
    - Sincerely,
    -
    - The OSF Team
    - - - diff --git a/website/templates/emails/confirm_agu_conference.html.mako b/website/templates/emails/confirm_agu_conference.html.mako deleted file mode 100644 index 603e2c39e8d..00000000000 --- a/website/templates/emails/confirm_agu_conference.html.mako +++ /dev/null @@ -1,26 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${user.fullname},
    -
    - - Thank you for joining us at the AGU Open Science Pavilion, and welcome to the Open Science Framework (OSF). - - We are pleased to offer a special AGU attendees exclusive 1:1 consultation to continue our conversation and to help - you get oriented on the OSF. This is an opportunity for us to show you useful OSF features, talk about - open science in Earth and space sciences, and for you to ask any questions you may have. - You can sign up to participate by completing this form, and a member of our team will be in touch to - determine your availability: -
    - https://docs.google.com/forms/d/e/1FAIpQLSeJ23YPaEMdbLY1OqbcP85Tt6rhLpFoOtH0Yg4vY_wSKULRcw/viewform?usp=sf_link -

    - To confirm your OSF account, please verify your email address by visiting this link:
    -
    - ${confirmation_url}
    -
    - From the team at the Center for Open Science
    - - - diff --git a/website/templates/emails/confirm_agu_conference_2023.html.mako b/website/templates/emails/confirm_agu_conference_2023.html.mako deleted file mode 100644 index 429ec911410..00000000000 --- a/website/templates/emails/confirm_agu_conference_2023.html.mako +++ /dev/null @@ -1,25 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - Hello ${user.fullname},
    -
    - - Thank you for joining us at the AGU Open Science Pavilion, and welcome to the Open Science Framework. - - We are pleased to offer a special AGU attendees exclusive community call to continue our conversation and to help - you get oriented on the OSF. This is an opportunity for us to show you useful OSF features, talk about - open science in your domains, and for you to ask any questions you may have. - You can register for this free event here: -
    - https://cos-io.zoom.us/meeting/register/tZAuceCvrjotHNG3n6XzLFDv1Rnn2hkjczHr -

    - To confirm your OSF account, please verify your email address by visiting this link:
    -
    - ${confirmation_url}
    -
    - From the team at the Center for Open Science
    - - - diff --git a/website/templates/emails/group_added_to_node.html.mako b/website/templates/emails/group_added_to_node.html.mako deleted file mode 100644 index cf4f42aa102..00000000000 --- a/website/templates/emails/group_added_to_node.html.mako +++ /dev/null @@ -1,23 +0,0 @@ -<%inherit file="notify_base.mako" /> -<%def name="content()"> - - - <%! - from website import settings - %> - Hello ${user.fullname},
    -
    - ${referrer_name + ' has given your group, ' + group_name + ',' if referrer_name else 'Your group, ' + group_name + ', has been given'} ${permission} permissions to the project "${node.title}" on OSF: ${node.absolute_url}
    -
    - You will ${'not receive ' if all_global_subscriptions_none else 'be automatically subscribed to '}notification emails for this project. To change your email notification preferences, visit your project or your user settings: ${settings.DOMAIN + "settings/notifications/"}
    -
    - Sincerely,
    -
    - Open Science Framework Robot
    -
    - Want more information? Visit https://osf.io/ to learn about the Open Science Framework, or https://cos.io/ for information about its supporting organization, the Center for Open Science.
    -
    - Questions? Email ${osf_contact_email}
    - - - diff --git a/website/templates/emails/group_member_added.html.mako b/website/templates/emails/group_member_added.html.mako deleted file mode 100644 index 2b8532f0190..00000000000 --- a/website/templates/emails/group_member_added.html.mako +++ /dev/null @@ -1,24 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%! - from website import settings - %> - Hello ${user.fullname},
    -
    - ${referrer_name + ' has added you' if referrer_name else 'You have been added'} as a ${permission} of the group "${group_name}" on OSF.
    -
    - If you have erroneously been added to the group "${group_name}," please contact a group administrator.
    -
    - Sincerely,
    -
    - Open Science Framework Robot
    -
    - Want more information? Visit https://osf.io/ to learn about the Open Science Framework, or https://cos.io/ for information about its supporting organization, the Center for Open Science.
    -
    - Questions? Email ${osf_contact_email}
    - - - diff --git a/website/templates/emails/group_member_unregistered_added.html.mako b/website/templates/emails/group_member_unregistered_added.html.mako deleted file mode 100644 index bc9dee5cc24..00000000000 --- a/website/templates/emails/group_member_unregistered_added.html.mako +++ /dev/null @@ -1,24 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%! - from website import settings - %> - Hello ${user.fullname},
    -
    - ${referrer_name + ' has added you' if referrer_name else 'You have been added'} to the group "${group_name}" on OSF. To set a password for your account, visit:
    -
    - ${claim_url}
    -
    - Once you have set a password, you will be able to create your own groups and projects. -
    - If you are not ${user.fullname} or you are erroneously being associated with "${group_name}," please email ${osf_contact_email} with the subject line "Claiming Error" to report the problem.
    -
    - Sincerely,
    -
    - The OSF Team
    -
    - - diff --git a/website/templates/emails/quickfiles_migrated.html.mako b/website/templates/emails/quickfiles_migrated.html.mako deleted file mode 100644 index 94948b2a066..00000000000 --- a/website/templates/emails/quickfiles_migrated.html.mako +++ /dev/null @@ -1,31 +0,0 @@ -<%inherit file="notify_base.mako" /> - -<%def name="content()"> - - - <%!from website import settings%> - Hello ${user.fullname}, -

    - The Quick Files feature has been discontinued and your files have been migrated into an OSF Project. You can find the new Project on your My Projects page, entitled "${user.fullname}'s Quick Files". Your favorite Quick Files features are still present; you can view, download, and share your files from their new location. Your file URL's will also continue to resolve properly, and you can still move your files between Projects by linking your Projects. Contact ${settings.OSF_CONTACT_EMAIL} if you have any questions or concerns. -

    -

    - Thank you for partnering with us as a stakeholder in open science and in the success of the infrastructure that help make it possible. -

    -

    - The Center for Open Science Team -

    -

    - Sincerely,
    - The OSF Team -

    -

    - Want more information? Visit ${settings.DOMAIN} to learn about the OSF, - or https://cos.io/ for information about its supporting organization, - the Center for Open Science. -

    -

    - Questions? Email ${settings.OSF_CONTACT_EMAIL} -

    - - - From b20470f61315c88e0a6045efa8cabe2b17252103 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 8 May 2025 15:23:00 +0300 Subject: [PATCH 008/176] Clean up imports --- website/conferences/views.py | 4 +--- website/signals.py | 2 -- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/website/conferences/views.py b/website/conferences/views.py index 4f3e7cd79ee..cf7dbfd6d3b 100644 --- a/website/conferences/views.py +++ b/website/conferences/views.py @@ -13,7 +13,7 @@ from osf import features from osf.models import AbstractNode, Node, Conference, OSFUser from website import settings -from website.conferences import utils, signals +from website.conferences import utils from website.conferences.message import ConferenceMessage, ConferenceError from website.ember_osf_web.decorators import ember_flag_is_active from website.mails import CONFERENCE_SUBMITTED, CONFERENCE_INACTIVE, CONFERENCE_FAILED, CONFERENCE_DEPRECATION @@ -154,8 +154,6 @@ def add_poster_by_email(conference, message): can_change_preferences=False, logo=settings.OSF_MEETINGS_LOGO ) - if user_created: - signals.osf4m_user_created.send(user, conference=conference, node=node) def conference_data(meeting): try: diff --git a/website/signals.py b/website/signals.py index 990610565f7..3d1d2233adf 100644 --- a/website/signals.py +++ b/website/signals.py @@ -3,7 +3,6 @@ from framework.auth import signals as auth from website.project import signals as project from addons.base import signals as event -from website.conferences import signals as conference from website.reviews import signals as reviews @@ -21,6 +20,5 @@ auth.user_account_merged, auth.unconfirmed_user_created, event.file_updated, - conference.osf4m_user_created, reviews.reviews_email ] From b0119911970c6be2e2885ad776bcf160e1ec019f Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 8 May 2025 16:46:18 +0300 Subject: [PATCH 009/176] Clean up tests --- .../views/test_subscriptions_list.py | 2 +- tests/test_conferences.py | 12 ----- tests/test_notifications.py | 50 ++++--------------- 3 files changed, 12 insertions(+), 52 deletions(-) diff --git a/api_tests/subscriptions/views/test_subscriptions_list.py b/api_tests/subscriptions/views/test_subscriptions_list.py index cda043314b1..8802c5c58e7 100644 --- a/api_tests/subscriptions/views/test_subscriptions_list.py +++ b/api_tests/subscriptions/views/test_subscriptions_list.py @@ -35,7 +35,7 @@ def test_list_complete(self, app, user, provider, node, global_user_notification res = app.get(url, auth=user.auth) notification_ids = [item['id'] for item in res.json['data']] # There should only be 4 notifications: users' global, node's comments, node's file updates and provider's preprint added. - assert len(notification_ids) == 4 + assert len(notification_ids) == 3 assert f'{user._id}_global' in notification_ids assert f'{provider._id}_new_pending_submissions' in notification_ids assert f'{node._id}_comments' in notification_ids diff --git a/tests/test_conferences.py b/tests/test_conferences.py index 2f431df55bd..8c6c62ee304 100644 --- a/tests/test_conferences.py +++ b/tests/test_conferences.py @@ -231,18 +231,6 @@ def test_upload_no_file_name(self, mock_put, mock_get_url): cookies={settings.COOKIE_NAME: self.user.get_or_create_cookie().decode()}, ) - @mock.patch('website.conferences.utils.upload_attachments') - def test_add_poster_by_email(self, mock_upload_attachments): - conference = ConferenceFactory() - - with self.make_context(data={'from': 'bdawk@sb52champs.com', 'subject': 'It\'s PARTY TIME!'}): - msg = message.ConferenceMessage() - views.add_poster_by_email(conference, msg) - - user = OSFUser.objects.get(username='bdawk@sb52champs.com') - assert user.email == 'bdawk@sb52champs.com' - assert user.fullname == user._id # user's shouldn't be able to use email as fullname, so we use the guid. - class TestMessage(ContextTestCase): PUSH_CONTEXT = False diff --git a/tests/test_notifications.py b/tests/test_notifications.py index ec7f0106dfe..4c9db2e8eb5 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -135,7 +135,7 @@ def test_new_project_creator_is_subscribed_with_global_settings(self): file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - assert len(user_subscriptions) == 5 # subscribed to both node and user settings + assert len(user_subscriptions) == 2 # subscribed to both node and user settings assert 'file_updated' in event_types assert 'global_file_updated' in event_types assert file_updated_subscription.none.count() == 1 @@ -155,7 +155,7 @@ def test_new_node_creator_is_not_subscribed_with_global_settings(self): user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] - assert len(user_subscriptions) == 4 # subscribed to only user settings + assert len(user_subscriptions) == 1 # subscribed to only user settings assert 'global_file_updated' in event_types def test_subscribe_user_to_global_notfiications(self): @@ -186,7 +186,7 @@ def test_new_project_creator_is_subscribed_with_default_global_settings(self): file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - assert len(user_subscriptions) == 6 # subscribed to both node and user settings + assert len(user_subscriptions) == 2 # subscribed to both node and user settings assert 'file_updated' in event_types assert 'global_file_updated' in event_types assert file_updated_subscription.email_transactional.count() == 1 @@ -209,7 +209,7 @@ def test_new_fork_creator_is_subscribed_with_default_global_settings(self): node_file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') project_file_updated_subscription = NotificationSubscription.objects.get(_id=project._id + '_file_updated') - assert len(user_subscriptions) == 7 # subscribed to project, fork, and user settings + assert len(user_subscriptions) == 3 # subscribed to project, fork, and user settings assert 'file_updated' in event_types assert 'global_file_updated' in event_types assert node_file_updated_subscription.email_transactional.count() == 1 @@ -262,7 +262,7 @@ def test_contributor_subscribed_when_added_to_component(self): file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - assert len(contributor_subscriptions) == 4 # subscribed to both node and user settings + assert len(contributor_subscriptions) == 2 # subscribed to both node and user settings assert 'file_updated' in event_types assert 'global_file_updated' in event_types assert file_updated_subscription.email_transactional.count() == 1 @@ -289,10 +289,10 @@ def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): component = factories.NodeFactory(parent=project, creator=project.creator) s = NotificationSubscription.objects.filter(email_transactional=project.creator) - assert s.count() == 2 + assert s.count() == 1 s = NotificationSubscription.objects.filter(email_transactional=component.creator) - assert s.count() == 2 + assert s.count() == 1 with capture_signals() as mock_signals: project.remove_node(auth=Auth(project.creator)) @@ -399,38 +399,12 @@ def setUp(self): self.user = factories.UserFactory() self.project = factories.ProjectFactory(creator=self.user) - self.project_subscription = NotificationSubscription.objects.get( - node=self.project, - _id=self.project._id + '_comments', - event_name='comments' - ) - self.user.notifications_configured[self.project._id] = True self.user.save() self.node = factories.NodeFactory(parent=self.project, creator=self.user) - self.node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=self.node._id + '_' + 'comments', - node=self.node, - event_name='comments' - ) - self.node_comments_subscription.save() - self.node_comments_subscription.email_transactional.add(self.user) - self.node_comments_subscription.save() - - self.node_subscription = list(NotificationSubscription.objects.filter(node=self.node)) - - self.user_subscription = [factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'comment_replies', - user=self.user, - event_name='comment_replies' - ), - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_comment', - user=self.user, - event_name='global_comment' - ), + self.user_subscription = [ factories.NotificationSubscriptionFactory( _id=self.user._id + '_' + 'global_file_updated', user=self.user, @@ -719,10 +693,6 @@ def test_get_global_notification_type(self): notification_type = utils.get_global_notification_type(self.user_subscription[1] ,self.user) assert 'email_transactional' == notification_type - def test_check_if_all_global_subscriptions_are_none_false(self): - all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user) - assert not all_global_subscriptions_none - # # Business logic prevents this from being an applicable unit test; # # global_mentions cannot be unsubscribed from # def test_check_if_all_global_subscriptions_are_none_true(self): @@ -1012,6 +982,8 @@ def setUp(self): super().setUp() self.user = factories.AuthUserFactory() self.project = factories.ProjectFactory() + self.node = factories.NodeFactory(parent=self.project) + def test_get_settings_url_for_node(self): url = emails.get_settings_url(self.project._id, self.user) @@ -1168,7 +1140,7 @@ def test_group_notifications_by_user_digest(self): } ] - assert len(user_groups) == 2 + assert len(user_groups) == 1 assert user_groups == expected digest_ids = [d2._id, d3._id] remove_notifications(email_notification_ids=digest_ids) From d9004bc3e124f8406a49f94de5806db3694afa5c Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 8 May 2025 17:29:26 +0300 Subject: [PATCH 010/176] Clean up tests --- .../views/test_subscriptions_list.py | 3 +- tests/test_notifications.py | 200 ------------------ 2 files changed, 1 insertion(+), 202 deletions(-) diff --git a/api_tests/subscriptions/views/test_subscriptions_list.py b/api_tests/subscriptions/views/test_subscriptions_list.py index 8802c5c58e7..2e5718dafdf 100644 --- a/api_tests/subscriptions/views/test_subscriptions_list.py +++ b/api_tests/subscriptions/views/test_subscriptions_list.py @@ -34,11 +34,10 @@ def url(self, user, node): def test_list_complete(self, app, user, provider, node, global_user_notification, url): res = app.get(url, auth=user.auth) notification_ids = [item['id'] for item in res.json['data']] - # There should only be 4 notifications: users' global, node's comments, node's file updates and provider's preprint added. + # There should only be 3 notifications: users' global, node's file updates and provider's preprint added. assert len(notification_ids) == 3 assert f'{user._id}_global' in notification_ids assert f'{provider._id}_new_pending_submissions' in notification_ids - assert f'{node._id}_comments' in notification_ids assert f'{node._id}_file_updated' in notification_ids def test_unauthenticated(self, app, url): diff --git a/tests/test_notifications.py b/tests/test_notifications.py index 4c9db2e8eb5..05c76618ef2 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -429,26 +429,6 @@ def test_from_subscription_key(self): 'event': 'comment_replies' } - def test_get_all_user_subscriptions(self): - user_subscriptions = list(utils.get_all_user_subscriptions(self.user)) - assert self.project_subscription in user_subscriptions - assert self.node_comments_subscription in user_subscriptions - for x in self.user_subscription: - assert x in user_subscriptions - assert len(user_subscriptions) == 6 - - def test_get_all_node_subscriptions_given_user_subscriptions(self): - user_subscriptions = utils.get_all_user_subscriptions(self.user) - node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node, - user_subscriptions=user_subscriptions)] - expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert node_subscription_ids == expected_node_subscription_ids - - def test_get_all_node_subscriptions_given_user_and_node(self): - node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node)] - expected_node_subscription_ids = [x._id for x in self.node_subscription] - assert node_subscription_ids == expected_node_subscription_ids - def test_get_configured_project_ids_does_not_return_user_or_node_ids(self): configured_nodes = utils.get_configured_projects(self.user) configured_ids = [n._id for n in configured_nodes] @@ -498,171 +478,6 @@ def test_get_configured_project_ids_excludes_private_projects_if_no_subscription configured_project_nodes = utils.get_configured_projects(user) assert private_project not in configured_project_nodes - def test_get_parent_notification_type(self): - nt = utils.get_parent_notification_type(self.node, 'comments', self.user) - assert nt == 'email_transactional' - - def test_get_parent_notification_type_no_parent_subscriptions(self): - node = factories.NodeFactory() - nt = utils.get_parent_notification_type(node._id, 'comments', self.user) - assert nt is None - - def test_get_parent_notification_type_no_parent(self): - project = factories.ProjectFactory() - nt = utils.get_parent_notification_type(project._id, 'comments', self.user) - assert nt is None - - def test_get_parent_notification_type_handles_user_id(self): - nt = utils.get_parent_notification_type(self.user._id, 'comments', self.user) - assert nt is None - - def test_format_data_project_settings(self): - data = utils.format_data(self.user, [self.project]) - parent_event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - child_event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [] - } - expected_new = [['event'], 'event'] - schema = subscription_schema(self.project, expected_new) - assert schema.validate(data) - assert has(data, parent_event) - assert has(data, child_event) - - def test_format_data_node_settings(self): - data = utils.format_data(self.user, [self.node]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [] - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_includes_admin_view_only_component_subscriptions(self): - # Test private components in which parent project admins are not contributors still appear in their - # notifications settings. - node = factories.NodeFactory(parent=self.project) - data = utils.format_data(self.user, [self.project]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'adopt_parent', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event'], ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_excludes_pointers(self): - project = factories.ProjectFactory() - pointed = factories.ProjectFactory() - project.add_pointer(pointed, Auth(project.creator)) - project.creator.notifications_configured[project._id] = True - project.creator.save() - configured_project_nodes = utils.get_configured_projects(project.creator) - data = utils.format_data(project.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_includes_private_parent_if_configured_children(self): - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_if_children_points_to_parent(self): - private_project = factories.ProjectFactory(creator=self.user) - node = factories.NodeFactory(parent=private_project, creator=self.user) - node.save() - node_comments_subscription = factories.NotificationSubscriptionFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - def test_format_user_subscriptions(self): data = utils.format_user_subscriptions(self.user) expected = [ @@ -689,21 +504,6 @@ def test_format_user_subscriptions(self): assert data == expected - def test_get_global_notification_type(self): - notification_type = utils.get_global_notification_type(self.user_subscription[1] ,self.user) - assert 'email_transactional' == notification_type - - # # Business logic prevents this from being an applicable unit test; - # # global_mentions cannot be unsubscribed from - # def test_check_if_all_global_subscriptions_are_none_true(self): - # for x in self.user_subscription: - # x.none.add(self.user) - # x.email_transactional.remove(self.user) - # for x in self.user_subscription: - # x.save() - # all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user) - # assert all_global_subscriptions_none - def test_format_data_user_settings(self): data = utils.format_user_and_project_subscriptions(self.user) expected = [ From 91d048d8f4b7b0a7ad0bffd5a59fc6ad9cc2bea3 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Mon, 19 May 2025 14:37:37 +0300 Subject: [PATCH 011/176] Update send_mail mocks --- addons/boa/tests/test_tasks.py | 4 +- .../views/test_crossref_email_response.py | 10 ++--- ...est_draft_registration_contributor_list.py | 6 +-- .../views/test_draft_registration_list.py | 4 +- .../test_institution_relationship_nodes.py | 4 +- .../views/test_node_contributors_list.py | 10 ++--- api_tests/nodes/views/test_node_forks_list.py | 4 +- .../test_node_relationship_institutions.py | 14 +++--- .../views/test_preprint_contributors_list.py | 14 +++--- ...est_collections_provider_moderator_list.py | 14 +++--- .../test_preprint_provider_moderator_list.py | 12 +++--- api_tests/providers/tasks/test_bulk_upload.py | 6 +-- .../views/test_registration_detail.py | 4 +- .../test_node_request_institutional_access.py | 16 +++---- .../requests/views/test_node_request_list.py | 4 +- .../views/test_preprint_request_list.py | 2 +- .../views/test_request_actions_create.py | 12 +++--- api_tests/users/views/test_user_claim.py | 2 +- api_tests/users/views/test_user_list.py | 16 +++---- .../test_user_message_institutional_access.py | 10 ++--- api_tests/users/views/test_user_settings.py | 10 ++--- .../users/views/test_user_settings_detail.py | 6 +-- .../test_check_crossref_dois.py | 2 +- .../test_email_all_users.py | 6 +-- osf_tests/test_archiver.py | 20 ++++----- osf_tests/test_collection.py | 8 ++-- osf_tests/test_collection_submission.py | 40 ++++++++--------- osf_tests/test_institution.py | 4 +- osf_tests/test_merging_users.py | 2 +- osf_tests/test_node.py | 6 +-- osf_tests/test_queued_mail.py | 26 +++++------ ...t_registration_moderation_notifications.py | 22 +++++----- osf_tests/test_reviewable.py | 2 +- osf_tests/test_schema_responses.py | 40 ++++++++--------- osf_tests/test_user.py | 6 +-- .../test_deactivate_requested_accounts.py | 5 +-- scripts/tests/test_send_queued_mails.py | 4 +- scripts/tests/test_triggered_mails.py | 2 +- tests/test_adding_contributor_views.py | 43 +++++++++---------- tests/test_auth.py | 6 +-- tests/test_auth_views.py | 21 +++++---- tests/test_conferences.py | 10 ++--- tests/test_misc_views.py | 8 ++-- tests/test_notifications.py | 6 +-- tests/test_preprints.py | 2 +- tests/test_registrations/test_embargoes.py | 2 +- tests/test_registrations/test_retractions.py | 8 ++-- tests/test_spam_mixin.py | 2 +- tests/test_user_profile_view.py | 10 ++--- tests/test_webtests.py | 24 +++++------ website/mails/mails.py | 32 +++++++++----- 51 files changed, 280 insertions(+), 273 deletions(-) diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py index a4842d6c417..e3bbf7f9c76 100644 --- a/addons/boa/tests/test_tasks.py +++ b/addons/boa/tests/test_tasks.py @@ -53,7 +53,7 @@ def test_boa_error_code(self): assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7 def test_handle_boa_error(self): - with mock.patch('addons.boa.tasks.send_mail', return_value=None) as mock_send_mail, \ + with mock.patch('addons.boa.tasks.execute_email_send', return_value=None) as mock_send_mail, \ mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error: return_value = handle_boa_error( @@ -179,7 +179,7 @@ async def test_submit_success(self): mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \ mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None) as mock_async_sleep, \ - mock.patch('addons.boa.tasks.send_mail', return_value=None) as mock_send_mail, \ + mock.patch('addons.boa.tasks.execute_email_send', return_value=None) as mock_send_mail, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: return_value = await submit_to_boa_async( self.host, diff --git a/api_tests/crossref/views/test_crossref_email_response.py b/api_tests/crossref/views/test_crossref_email_response.py index 2504c2a092e..4594ea5ff16 100644 --- a/api_tests/crossref/views/test_crossref_email_response.py +++ b/api_tests/crossref/views/test_crossref_email_response.py @@ -158,7 +158,7 @@ def test_wrong_request_context_raises_permission_error(self, app, url, error_xml def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml): assert not preprint.get_identifier_value('doi') - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: + with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: context_data = self.make_mailgun_payload(crossref_response=error_xml) app.post(url, context_data) assert mock_send_mail.called @@ -167,7 +167,7 @@ def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, def test_success_response_sets_doi(self, app, url, preprint, success_xml): assert not preprint.get_identifier_value('doi') - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: + with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: context_data = self.make_mailgun_payload(crossref_response=success_xml) app.post(url, context_data) @@ -181,7 +181,7 @@ def test_update_success_response(self, app, preprint, url): preprint.set_identifier_value(category='doi', value=initial_value) update_xml = self.update_success_xml(preprint) - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: + with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: context_data = self.make_mailgun_payload(crossref_response=update_xml) app.post(url, context_data) @@ -195,7 +195,7 @@ def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, u update_xml = self.update_success_xml(preprint) pre_created = preprint.preprint_doi_created - with mock.patch('framework.auth.views.mails.send_mail'): + with mock.patch('framework.auth.views.mails.execute_email_send'): context_data = self.make_mailgun_payload(crossref_response=update_xml) app.post(url, context_data) @@ -219,7 +219,7 @@ def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint): preprint.set_identifier_value(category='legacy_doi', value=legacy_value) update_xml = self.update_success_xml(preprint) - with mock.patch('framework.auth.views.mails.send_mail') as mock_send_mail: + with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: context_data = self.make_mailgun_payload(crossref_response=update_xml) app.post(url, context_data) diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index 54b2d23ad5d..bb59fca7d59 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -214,7 +214,7 @@ def url_project_contribs(self, project_public): # Overrides TestNodeContributorCreateEmail return f'/{API_BASE}draft_registrations/{project_public._id}/contributors/' - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_sends_email( self, mock_mail, app, user, user_two, url_project_contribs): @@ -264,7 +264,7 @@ def test_add_contributor_signal_if_default( assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' # Overrides TestNodeContributorCreateEmail - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_sends_email( self, mock_mail, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=draft_registration' @@ -301,7 +301,7 @@ def test_add_unregistered_contributor_signal_if_default( assert 'draft_registration' == kwargs['email_template'] # Overrides TestNodeContributorCreateEmail - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_without_email_no_email( self, mock_mail, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=draft_registration' diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index 2f9f4c31eeb..b2c23123df0 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -337,7 +337,7 @@ def test_logged_in_non_contributor_cannot_create_draft( assert res.status_code == 403 def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload): - with mock.patch.object(mails, 'send_mail') as mock_send_mail: + with mock.patch.object(mails, 'execute_email_send') as mock_send_mail: app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) assert not mock_send_mail.called @@ -430,7 +430,7 @@ def test_admin_can_create_draft( def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload): # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor - with mock.patch.object(mails, 'send_mail') as mock_send_mail: + with mock.patch.object(mails, 'execute_email_send') as mock_send_mail: resp = app.post_json_api( f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, diff --git a/api_tests/institutions/views/test_institution_relationship_nodes.py b/api_tests/institutions/views/test_institution_relationship_nodes.py index 901c2e552ca..50f78d53c74 100644 --- a/api_tests/institutions/views/test_institution_relationship_nodes.py +++ b/api_tests/institutions/views/test_institution_relationship_nodes.py @@ -378,7 +378,7 @@ def test_email_sent_on_affiliation_addition(self, app, user, institution, node_w current_institution = InstitutionFactory() node_without_institution.affiliated_institutions.add(current_institution) - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.post_json_api( url_institution_nodes, { @@ -398,7 +398,7 @@ def test_email_sent_on_affiliation_removal(self, app, admin, institution, node_p current_institution = InstitutionFactory() node_public.affiliated_institutions.add(current_institution) - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.delete_json_api( url_institution_nodes, { diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index c11389fea97..56401f685dc 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -1208,7 +1208,7 @@ class TestNodeContributorCreateEmail(NodeCRUDTestCase): def url_project_contribs(self, project_public): return f'/{API_BASE}nodes/{project_public._id}/contributors/' - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_no_email_if_false( self, mock_mail, app, user, url_project_contribs ): @@ -1223,7 +1223,7 @@ def test_add_contributor_no_email_if_false( assert res.status_code == 201 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_sends_email( self, mock_mail, app, user, user_two, url_project_contribs ): @@ -1281,7 +1281,7 @@ def test_add_contributor_signal_preprint_email_disallowed( == 'preprint is not a valid email preference.' ) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_sends_email( self, mock_mail, app, user, url_project_contribs ): @@ -1329,7 +1329,7 @@ def test_add_unregistered_contributor_signal_preprint_email_disallowed( == 'preprint is not a valid email preference.' ) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_invalid_send_email_param( self, mock_mail, app, user, url_project_contribs ): @@ -1347,7 +1347,7 @@ def test_add_contributor_invalid_send_email_param( ) assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_without_email_no_email( self, mock_mail, app, user, url_project_contribs ): diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index fd5e4ac8c47..cbabc57351a 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -421,7 +421,7 @@ def test_send_email_success( self, app, user, public_project_url, fork_data_with_title, public_project): - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: + with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: res = app.post_json_api( public_project_url, fork_data_with_title, @@ -440,7 +440,7 @@ def test_send_email_failed( fork_data_with_title, public_project): with mock.patch.object(NodeForksSerializer, 'save', side_effect=Exception()): - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: + with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: with pytest.raises(Exception): app.post_json_api( public_project_url, diff --git a/api_tests/nodes/views/test_node_relationship_institutions.py b/api_tests/nodes/views/test_node_relationship_institutions.py index 47c607f3ed0..476bf8841e7 100644 --- a/api_tests/nodes/views/test_node_relationship_institutions.py +++ b/api_tests/nodes/views/test_node_relationship_institutions.py @@ -206,7 +206,7 @@ def test_user_with_institution_and_permissions( @mock.patch('website.mails.settings.USE_EMAIL', True) def test_user_with_institution_and_permissions_through_patch(self, app, user, institution_one, institution_two, node, node_institutions_url): - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.patch_json_api( node_institutions_url, self.create_payload([institution_one, institution_two]), @@ -237,7 +237,7 @@ def test_remove_institutions_with_affiliated_user(self, app, user, institution_o node.save() assert institution_one in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.put_json_api( node_institutions_url, { @@ -262,7 +262,7 @@ def test_using_post_making_no_changes_returns_201(self, app, user, institution_o node.save() assert institution_one in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.post_json_api( node_institutions_url, self.create_payload([institution_one]), @@ -297,7 +297,7 @@ def test_add_through_patch_one_inst_to_node_with_inst( assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.patch_json_api( node_institutions_url, self.create_payload([institution_one, institution_two]), @@ -324,7 +324,7 @@ def test_add_through_patch_one_inst_while_removing_other( assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.patch_json_api( node_institutions_url, self.create_payload([institution_two]), @@ -358,7 +358,7 @@ def test_add_one_inst_with_post_to_node_with_inst( assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.post_json_api( node_institutions_url, self.create_payload([institution_two]), @@ -388,7 +388,7 @@ def test_delete_existing_inst(self, app, user, institution_one, node, node_insti node.affiliated_institutions.add(institution_one) node.save() - with mock.patch('osf.models.mixins.mails.send_mail') as mocked_send_mail: + with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: res = app.delete_json_api( node_institutions_url, self.create_payload([institution_one]), diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index ce69697ff0c..9c4fd2fa57b 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -1351,7 +1351,7 @@ class TestPreprintContributorCreateEmail(NodeCRUDTestCase): def url_preprint_contribs(self, preprint_published): return f'/{API_BASE}preprints/{preprint_published._id}/contributors/' - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_no_email_if_false( self, mock_mail, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=false' @@ -1368,7 +1368,7 @@ def test_add_contributor_no_email_if_false( assert res.status_code == 201 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_needs_preprint_filter_to_send_email( self, mock_mail, app, user, user_two, url_preprint_contribs): @@ -1419,7 +1419,7 @@ def test_add_contributor_signal_if_preprint( assert mock_send.call_count == 1 assert 'preprint' == kwargs['email_template'] - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_sends_email( self, mock_mail, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=preprint' @@ -1455,7 +1455,7 @@ def test_add_unregistered_contributor_signal_if_preprint( assert 'preprint' == kwargs['email_template'] assert mock_send.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_invalid_send_email_param( self, mock_mail, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=true' @@ -1475,7 +1475,7 @@ def test_add_contributor_invalid_send_email_param( assert res.json['errors'][0]['detail'] == 'true is not a valid email preference.' assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_without_email_no_email( self, mock_mail, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=preprint' @@ -1494,7 +1494,7 @@ def test_add_unregistered_contributor_without_email_no_email( assert res.status_code == 201 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated') def test_publishing_preprint_sends_emails_to_contributors( self, mock_update, mock_mail, app, user, url_preprint_contribs, preprint_unpublished): @@ -1535,7 +1535,7 @@ def test_contributor_added_signal_not_specified( assert 'preprint' == kwargs['email_template'] assert mock_send.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_contributor_added_not_sent_if_unpublished( self, mock_mail, app, user, preprint_unpublished): url = f'/{API_BASE}preprints/{preprint_unpublished._id}/contributors/?send_email=preprint' diff --git a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py index 107cd5ac054..abdd218fbb3 100644 --- a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py +++ b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py @@ -92,14 +92,14 @@ def test_GET_admin_with_filter(self, app, url, nonmoderator, moderator, admin, p @pytest.mark.django_db class TestPOSTCollectionsModeratorList: - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_POST_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, expect_errors=True) assert res.status_code == 401 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_POST_forbidden(self, mock_mail, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') @@ -111,7 +111,7 @@ def test_POST_forbidden(self, mock_mail, app, url, nonmoderator, moderator, prov assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_POST_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') @@ -121,14 +121,14 @@ def test_POST_admin_success_existing_user(self, mock_mail, app, url, nonmoderato assert res.json['data']['attributes']['permission_group'] == 'moderator' assert mock_mail.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_POST_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): payload = make_payload(user_id=moderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_POST_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Jalen Hurts', 'email': '1eagles@allbatman.org'} # test_user_with_no_moderator_admin_permissions @@ -145,14 +145,14 @@ def test_POST_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator assert mock_mail.call_count == 1 assert mock_mail.call_args[0][0] == unreg_user['email'] - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_POST_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='citizen') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_POST_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) diff --git a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py index fbcfd32a99b..2ea44e3209e 100644 --- a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py +++ b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py @@ -68,7 +68,7 @@ def test_list_get_admin_with_filter(self, app, url, nonmoderator, moderator, adm assert res.json['data'][0]['id'] == admin._id assert res.json['data'][0]['attributes']['permission_group'] == permissions.ADMIN - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_list_post_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, expect_errors=True) @@ -82,7 +82,7 @@ def test_list_post_unauthorized(self, mock_mail, app, url, nonmoderator, moderat assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_list_post_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') @@ -92,14 +92,14 @@ def test_list_post_admin_success_existing_user(self, mock_mail, app, url, nonmod assert res.json['data']['attributes']['permission_group'] == 'moderator' assert mock_mail.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_list_post_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): payload = self.create_payload(user_id=moderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_list_post_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Son Goku', 'email': 'goku@dragonball.org'} # test_user_with_no_moderator_admin_permissions @@ -116,14 +116,14 @@ def test_list_post_admin_failure_unreg_moderator(self, mock_mail, app, url, mode assert mock_mail.call_count == 1 assert mock_mail.call_args[0][0] == unreg_user['email'] - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_list_post_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='citizen') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_list_post_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) diff --git a/api_tests/providers/tasks/test_bulk_upload.py b/api_tests/providers/tasks/test_bulk_upload.py index 40003fb6931..99942a74e92 100644 --- a/api_tests/providers/tasks/test_bulk_upload.py +++ b/api_tests/providers/tasks/test_bulk_upload.py @@ -321,7 +321,7 @@ def test_bulk_creation_dry_run(self, registration_row_1, registration_row_2, upl assert not upload_job_done_full.email_sent @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) + @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) def test_bulk_creation_done_full(self, mock_send_mail, registration_row_1, registration_row_2, upload_job_done_full, provider, initiator, read_contributor, write_contributor): @@ -350,7 +350,7 @@ def test_bulk_creation_done_full(self, mock_send_mail, registration_row_1, regis ) @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) + @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) def test_bulk_creation_done_partial(self, mock_send_mail, registration_row_3, registration_row_invalid_extra_bib_1, upload_job_done_partial, provider, initiator, read_contributor, write_contributor): @@ -387,7 +387,7 @@ def test_bulk_creation_done_partial(self, mock_send_mail, registration_row_3, ) @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) + @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) def test_bulk_creation_done_error(self, mock_send_mail, registration_row_invalid_extra_bib_2, registration_row_invalid_affiliation, upload_job_done_error, provider, initiator, read_contributor, write_contributor, institution): diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 5a08bdc378f..2cede11165c 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -744,7 +744,7 @@ def test_initiate_withdraw_registration_fails( res = app.put_json_api(public_url, public_payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_initiate_withdrawal_success(self, mock_send_mail, app, user, public_registration, public_url, public_payload): res = app.put_json_api(public_url, public_payload, auth=user.auth) assert res.status_code == 200 @@ -775,7 +775,7 @@ def test_initiate_withdrawal_with_embargo_ends_embargo( assert public_registration.is_pending_retraction assert not public_registration.is_pending_embargo - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_withdraw_request_does_not_send_email_to_unregistered_admins( self, mock_send_mail, app, user, public_registration, public_url, public_payload): unreg = UnregUserFactory() diff --git a/api_tests/requests/views/test_node_request_institutional_access.py b/api_tests/requests/views/test_node_request_institutional_access.py index ca2a2c477e4..13551df4647 100644 --- a/api_tests/requests/views/test_node_request_institutional_access.py +++ b/api_tests/requests/views/test_node_request_institutional_access.py @@ -208,8 +208,8 @@ def test_institutional_admin_unauth_institution(self, app, project, institution_ assert res.status_code == 403 assert 'Institutional request access is not enabled.' in res.json['errors'][0]['detail'] - @mock.patch('api.requests.serializers.send_mail') - @mock.patch('osf.utils.machines.mails.send_mail') + @mock.patch('api.requests.serializers.execute_email_send') + @mock.patch('osf.utils.machines.mails.execute_email_send') def test_email_send_institutional_request_specific_email( self, mock_send_mail_machines, @@ -255,7 +255,7 @@ def test_email_send_institutional_request_specific_email( } ) - @mock.patch('api.requests.serializers.send_mail') + @mock.patch('api.requests.serializers.execute_email_send') def test_email_not_sent_without_recipient(self, mock_mail, app, project, institutional_admin, url, create_payload, institution): """ @@ -269,7 +269,7 @@ def test_email_not_sent_without_recipient(self, mock_mail, app, project, institu # Check that an email is sent assert not mock_mail.called - @mock.patch('api.requests.serializers.send_mail') + @mock.patch('api.requests.serializers.execute_email_send') def test_email_not_sent_outside_institution(self, mock_mail, app, project, institutional_admin, url, create_payload, user_without_affiliation, institution): """ @@ -283,7 +283,7 @@ def test_email_not_sent_outside_institution(self, mock_mail, app, project, insti # Check that an email is sent assert not mock_mail.called - @mock.patch('api.requests.serializers.send_mail') + @mock.patch('api.requests.serializers.execute_email_send') def test_email_sent_on_creation( self, mock_mail, @@ -319,7 +319,7 @@ def test_email_sent_on_creation( } ) - @mock.patch('api.requests.serializers.send_mail') + @mock.patch('api.requests.serializers.execute_email_send') def test_bcc_institutional_admin( self, mock_mail, @@ -357,7 +357,7 @@ def test_bcc_institutional_admin( } ) - @mock.patch('api.requests.serializers.send_mail') + @mock.patch('api.requests.serializers.execute_email_send') def test_reply_to_institutional_admin( self, mock_mail, @@ -410,7 +410,7 @@ def test_access_requests_disabled_raises_permission_denied( assert res.status_code == 403 assert f"{node_with_disabled_access_requests._id} does not have Access Requests enabled" in res.json['errors'][0]['detail'] - @mock.patch('api.requests.serializers.send_mail') + @mock.patch('api.requests.serializers.execute_email_send') def test_placeholder_text_when_comment_is_empty( self, mock_mail, diff --git a/api_tests/requests/views/test_node_request_list.py b/api_tests/requests/views/test_node_request_list.py index ec62194b466..4396d524f6e 100644 --- a/api_tests/requests/views/test_node_request_list.py +++ b/api_tests/requests/views/test_node_request_list.py @@ -80,7 +80,7 @@ def test_requests_disabled_list(self, app, url, create_payload, project, admin): res = app.get(url, create_payload, auth=admin.auth, expect_errors=True) assert res.status_code == 403 - @mock.patch('website.mails.mails.send_mail') + @mock.patch('website.mails.mails.execute_email_send') def test_email_sent_to_all_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): project.is_public = True project.save() @@ -88,7 +88,7 @@ def test_email_sent_to_all_admins_on_submit(self, mock_mail, app, project, nonco assert res.status_code == 201 assert mock_mail.call_count == 2 - @mock.patch('website.mails.mails.send_mail') + @mock.patch('website.mails.mails.execute_email_send') def test_email_not_sent_to_parent_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): component = NodeFactory(parent=project, creator=second_admin) component.is_public = True diff --git a/api_tests/requests/views/test_preprint_request_list.py b/api_tests/requests/views/test_preprint_request_list.py index d23736aa312..3ff21c73ec1 100644 --- a/api_tests/requests/views/test_preprint_request_list.py +++ b/api_tests/requests/views/test_preprint_request_list.py @@ -65,7 +65,7 @@ def test_requester_cannot_submit_again(self, app, admin, create_payload, pre_mod assert res.json['errors'][0]['detail'] == 'Users may not have more than one withdrawal request per preprint.' @pytest.mark.skip('TODO: IN-284 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') + @mock.patch('website.reviews.listeners.mails.execute_email_send') def test_email_sent_to_moderators_on_submit(self, mock_mail, app, admin, create_payload, moderator, post_mod_preprint): res = app.post_json_api(self.url(post_mod_preprint), create_payload, auth=admin.auth) assert res.status_code == 201 diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index 732cbdd83b0..05404e5c0bb 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -190,7 +190,7 @@ def test_rejects_fail_with_requests_disabled(self, app, admin, url, node_request assert initial_state == node_request.machine_state assert node_request.creator not in node_request.target.contributors - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors @@ -202,7 +202,7 @@ def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): assert node_request.creator in node_request.target.contributors assert mock_mail.call_count == 1 - @mock.patch('website.mails.mails.send_mail') + @mock.patch('website.mails.mails.execute_email_send') def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors @@ -214,7 +214,7 @@ def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): assert node_request.creator not in node_request.target.contributors assert mock_mail.call_count == 1 - @mock.patch('website.mails.mails.send_mail') + @mock.patch('website.mails.mails.execute_email_send') def test_email_not_sent_on_reject(self, mock_mail, app, requester, url, node_request): initial_state = node_request.machine_state initial_comment = node_request.comment @@ -384,7 +384,7 @@ def test_write_contrib_and_noncontrib_cannot_edit_comment(self, app, write_contr assert initial_state == request.machine_state assert initial_comment == request.comment - @mock.patch('website.reviews.listeners.mails.send_mail') + @mock.patch('website.reviews.listeners.mails.execute_email_send') def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state @@ -400,7 +400,7 @@ def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request assert mock_mail.call_count == 4 @pytest.mark.skip('TODO: IN-331 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') + @mock.patch('website.reviews.listeners.mails.execute_email_send') def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state @@ -414,7 +414,7 @@ def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, assert mock_mail.call_count == 2 @pytest.mark.skip('TODO: IN-284/331 -- add emails') - @mock.patch('website.reviews.listeners.mails.send_mail') + @mock.patch('website.reviews.listeners.mails.execute_email_send') def test_email_not_sent_on_edit_comment(self, mock_mail, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state diff --git a/api_tests/users/views/test_user_claim.py b/api_tests/users/views/test_user_claim.py index 68e6cfd52dd..fbe273872f4 100644 --- a/api_tests/users/views/test_user_claim.py +++ b/api_tests/users/views/test_user_claim.py @@ -17,7 +17,7 @@ class TestClaimUser: @pytest.fixture def mock_mail(self): - with mock.patch('website.project.views.contributor.mails.send_mail') as patch: + with mock.patch('website.project.views.contributor.mails.execute_email_send') as patch: yield patch @pytest.fixture() diff --git a/api_tests/users/views/test_user_list.py b/api_tests/users/views/test_user_list.py index 23c3b762f97..04b2f644171 100644 --- a/api_tests/users/views/test_user_list.py +++ b/api_tests/users/views/test_user_list.py @@ -277,7 +277,7 @@ def tearDown(self, app): app.reset() # clears cookies OSFUser.remove() - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_logged_in_user_with_basic_auth_cannot_create_other_user_or_send_mail( self, mock_mail, app, user, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 @@ -292,7 +292,7 @@ def test_logged_in_user_with_basic_auth_cannot_create_other_user_or_send_mail( assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_logged_out_user_cannot_create_other_user_or_send_mail( self, mock_mail, app, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 @@ -307,7 +307,7 @@ def test_logged_out_user_cannot_create_other_user_or_send_mail( assert mock_mail.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cookied_requests_can_create_and_email( self, mock_mail, app, user, email_unconfirmed, data, url_base): # NOTE: skipped tests are not tested during session refactor, only updated to fix import @@ -327,7 +327,7 @@ def test_cookied_requests_can_create_and_email( assert mock_mail.call_count == 1 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( @@ -367,7 +367,7 @@ def test_properly_scoped_token_can_create_and_send_email( assert mock_mail.call_count == 1 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( @@ -409,7 +409,7 @@ def test_properly_scoped_token_does_not_send_email_without_kwarg( assert mock_mail.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( @@ -454,7 +454,7 @@ def test_properly_scoped_token_can_create_without_username_but_not_send_email( assert OSFUser.objects.filter(fullname='No Email').count() == 1 assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') def test_improperly_scoped_token_can_not_create_or_email( self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): @@ -492,7 +492,7 @@ def test_improperly_scoped_token_can_not_create_or_email( assert mock_mail.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( diff --git a/api_tests/users/views/test_user_message_institutional_access.py b/api_tests/users/views/test_user_message_institutional_access.py index 36f2a59e252..ef507cacbb3 100644 --- a/api_tests/users/views/test_user_message_institutional_access.py +++ b/api_tests/users/views/test_user_message_institutional_access.py @@ -85,7 +85,7 @@ def payload(self, institution, user): } } - @mock.patch('osf.models.user_message.send_mail') + @mock.patch('osf.models.user_message.execute_email_send') def test_institutional_admin_can_create_message(self, mock_send_mail, app, institutional_admin, institution, url_with_affiliation, payload): """ Ensure an institutional admin can create a `UserMessage` with a `message` and `institution`. @@ -110,7 +110,7 @@ def test_institutional_admin_can_create_message(self, mock_send_mail, app, insti assert 'Requesting user access for collaboration' in mock_send_mail.call_args[1]['message_text'] assert user_message._id == data['id'] - @mock.patch('osf.models.user_message.send_mail') + @mock.patch('osf.models.user_message.execute_email_send') def test_institutional_admin_can_not_create_message(self, mock_send_mail, app, institutional_admin_on_institution_without_access, institution_without_access, url_with_affiliation_on_institution_without_access, payload): @@ -197,7 +197,7 @@ def test_admin_cannot_message_user_outside_institution( assert ('Cannot send to a recipient that is not affiliated with the provided institution.' in res.json['errors'][0]['detail']['user']) - @mock.patch('osf.models.user_message.send_mail') + @mock.patch('osf.models.user_message.execute_email_send') def test_cc_institutional_admin( self, mock_send_mail, @@ -239,7 +239,7 @@ def test_cc_institutional_admin( institution=institution, ) - @mock.patch('osf.models.user_message.send_mail') + @mock.patch('osf.models.user_message.execute_email_send') def test_cc_field_defaults_to_false(self, mock_send_mail, app, institutional_admin, url_with_affiliation, user_with_affiliation, institution, payload): """ Ensure the `cc` field defaults to `false` when not provided in the payload. @@ -261,7 +261,7 @@ def test_cc_field_defaults_to_false(self, mock_send_mail, app, institutional_adm institution=institution, ) - @mock.patch('osf.models.user_message.send_mail') + @mock.patch('osf.models.user_message.execute_email_send') def test_reply_to_header_set(self, mock_send_mail, app, institutional_admin, user_with_affiliation, institution, url_with_affiliation, payload): """ Ensure that the 'Reply-To' header is correctly set to the sender's email address. diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index cd4e25ff654..7e7f6284edb 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -48,7 +48,7 @@ def test_get(self, app, user_one, url): res = app.get(url, auth=user_one.auth, expect_errors=True) assert res.status_code == 405 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_post(self, mock_mail, app, user_one, user_two, url, payload): # Logged out res = app.post_json_api(url, payload, expect_errors=True) @@ -66,7 +66,7 @@ def test_post(self, mock_mail, app, user_one, user_two, url, payload): assert user_one.email_last_sent is not None assert mock_mail.call_count == 1 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_post_invalid_type(self, mock_mail, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' @@ -76,7 +76,7 @@ def test_post_invalid_type(self, mock_mail, app, user_one, url, payload): assert user_one.email_last_sent is None assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_exceed_throttle(self, mock_mail, app, user_one, url, payload): assert user_one.email_last_sent is None res = app.post_json_api(url, payload, auth=user_one.auth) @@ -192,7 +192,7 @@ def csrf_token(self): def test_get(self, app, url, user_one): encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: + with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: res = app.get(url) assert res.status_code == 200 @@ -206,7 +206,7 @@ def test_get(self, app, url, user_one): def test_get_invalid_email(self, app, url): url = f'{url}?email={'invalid_email'}' - with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail: + with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: res = app.get(url) assert res.status_code == 200 assert not mock_send_mail.called diff --git a/api_tests/users/views/test_user_settings_detail.py b/api_tests/users/views/test_user_settings_detail.py index cf9194409f6..c26a4628d56 100644 --- a/api_tests/users/views/test_user_settings_detail.py +++ b/api_tests/users/views/test_user_settings_detail.py @@ -241,7 +241,7 @@ def payload(self, user_one): } } - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_patch_requested_deactivation(self, mock_mail, app, user_one, user_two, url, payload): # Logged out res = app.patch_json_api(url, payload, expect_errors=True) @@ -271,7 +271,7 @@ def test_patch_requested_deactivation(self, mock_mail, app, user_one, user_two, user_one.reload() assert user_one.requested_deactivation is False - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_patch_invalid_type(self, mock_mail, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' @@ -281,7 +281,7 @@ def test_patch_invalid_type(self, mock_mail, app, user_one, url, payload): assert user_one.email_last_sent is None assert mock_mail.call_count == 0 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_exceed_throttle(self, mock_mail, app, user_one, url, payload): assert user_one.email_last_sent is None res = app.patch_json_api(url, payload, auth=user_one.auth) diff --git a/osf_tests/management_commands/test_check_crossref_dois.py b/osf_tests/management_commands/test_check_crossref_dois.py index c4e37d9c389..df7410c2c21 100644 --- a/osf_tests/management_commands/test_check_crossref_dois.py +++ b/osf_tests/management_commands/test_check_crossref_dois.py @@ -60,7 +60,7 @@ def test_check_crossref_dois(self, crossref_response, stuck_preprint, preprint): assert stuck_preprint.identifiers.count() == 1 assert stuck_preprint.identifiers.first().value == doi - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_report_stuck_dois(self, mock_email, stuck_preprint): report_stuck_dois(dry_run=False) guid = stuck_preprint.guids.first()._id diff --git a/osf_tests/management_commands/test_email_all_users.py b/osf_tests/management_commands/test_email_all_users.py index c10c84b49d1..f9477cf93de 100644 --- a/osf_tests/management_commands/test_email_all_users.py +++ b/osf_tests/management_commands/test_email_all_users.py @@ -42,7 +42,7 @@ def unregistered_user(self): return UserFactory(is_registered=False) @pytest.mark.django_db - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_email_all_users_dry(self, mock_email, superuser): email_all_users('TOU_NOTIF', dry_run=True) @@ -53,7 +53,7 @@ def test_email_all_users_dry(self, mock_email, superuser): ) @pytest.mark.django_db - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_dont_email_inactive_users( self, mock_email, deleted_user, inactive_user, unconfirmed_user, unregistered_user): @@ -62,7 +62,7 @@ def test_dont_email_inactive_users( mock_email.assert_not_called() @pytest.mark.django_db - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_email_all_users_offset(self, mock_email, user, user2): email_all_users('TOU_NOTIF', offset=1, start_id=0) diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 59c178b839d..eb8b9d6f6d9 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -717,7 +717,7 @@ def test_archive_success_same_file_in_component(self): class TestArchiverUtils(ArchiverTestCase): - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_handle_archive_fail(self, mock_send_mail): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, @@ -730,7 +730,7 @@ def test_handle_archive_fail(self, mock_send_mail): self.dst.reload() assert self.dst.is_deleted - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_handle_archive_fail_copy(self, mock_send_mail): url = settings.INTERNAL_DOMAIN + self.src._id archiver_utils.handle_archive_fail( @@ -762,7 +762,7 @@ def test_handle_archive_fail_copy(self, mock_send_mail): call(**args_desk), ], any_order=True) - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_handle_archive_fail_size(self, mock_send_mail): url = settings.INTERNAL_DOMAIN + self.src._id archiver_utils.handle_archive_fail( @@ -931,14 +931,14 @@ def test_archive_callback_pending(self, mock_delay): ARCHIVER_SUCCESS ) self.dst.archive_job.save() - with mock.patch('website.mails.send_mail') as mock_send: + with mock.patch('website.mails.execute_email_send') as mock_send: with mock.patch('website.archiver.utils.handle_archive_fail') as mock_fail: listeners.archive_callback(self.dst) assert not mock_send.called assert not mock_fail.called assert mock_delay.called - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') def test_archive_callback_done_success(self, mock_send, mock_archive_success): self.dst.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) @@ -946,7 +946,7 @@ def test_archive_callback_done_success(self, mock_send, mock_archive_success): listeners.archive_callback(self.dst) assert mock_send.call_count == 1 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') def test_archive_callback_done_embargoed(self, mock_send, mock_archive_success): end_date = timezone.now() + datetime.timedelta(days=30) @@ -1037,7 +1037,7 @@ def test_archive_tree_finished_false_for_partial_archive(self): rsibling.save() assert not reg.archive_job.archive_tree_finished() - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') def test_archive_callback_on_tree_sends_only_one_email(self, mock_send_success, mock_arhive_success): proj = factories.NodeFactory() @@ -1120,7 +1120,7 @@ def test_archiving_registrations_not_added_to_search_before_archival(self, mock_ assert not mock_update_search.called @mock.patch('osf.models.AbstractNode.update_search') - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock_update_search, mock_send, mock_archive_success): proj = factories.ProjectFactory() @@ -1135,7 +1135,7 @@ def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock @pytest.mark.enable_search @mock.patch('website.search.elastic_search.delete_doc') - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_send, mock_delete_index_node): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj, archive=True) @@ -1148,7 +1148,7 @@ def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_send, assert mock_delete_index_node.called @mock.patch('osf.models.AbstractNode.update_search') - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_archiving_nodes_not_added_to_search_on_archive_incomplete(self, mock_send, mock_update_search): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj) diff --git a/osf_tests/test_collection.py b/osf_tests/test_collection.py index d79e03a8323..3a8cf880520 100644 --- a/osf_tests/test_collection.py +++ b/osf_tests/test_collection.py @@ -131,8 +131,8 @@ def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provi associated_collections = provider_collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering provider_collected_node.set_privacy('private', auth=auth) assert mock_send.called @@ -148,8 +148,8 @@ def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, associated_collections = collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering collected_node.set_privacy('private', auth=auth) assert not mock_send.called diff --git a/osf_tests/test_collection_submission.py b/osf_tests/test_collection_submission.py index 97ea2c8692a..fe99a65f751 100644 --- a/osf_tests/test_collection_submission.py +++ b/osf_tests/test_collection_submission.py @@ -162,8 +162,8 @@ def test_submit(self, moderated_collection_submission): assert moderated_collection_submission.state == CollectionSubmissionStates.PENDING def test_notify_contributors_pending(self, node, moderated_collection): - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering collection_submission = CollectionSubmission( guid=node.guids.first(), @@ -218,8 +218,8 @@ def test_accept_success(self, node, moderated_collection_submission): def test_notify_moderated_accepted(self, node, moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert mock_send.called @@ -244,8 +244,8 @@ def test_reject_success(self, node, moderated_collection_submission): def test_notify_moderated_rejected(self, node, moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert mock_send.called @@ -277,8 +277,8 @@ def test_remove_success(self, node, user_role, moderated_collection_submission): def test_notify_moderated_removed_moderator(self, node, moderated_collection_submission): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering moderated_collection_submission.remove(user=moderator, comment='Test Comment') assert mock_send.called @@ -293,8 +293,8 @@ def test_notify_moderated_removed_moderator(self, node, moderated_collection_sub def test_notify_moderated_removed_admin(self, node, moderated_collection_submission): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering moderated_collection_submission.remove(user=moderator, comment='Test Comment') assert mock_send.called @@ -380,8 +380,8 @@ def test_remove_success(self, user_role, node, unmoderated_collection_submission def test_notify_moderated_removed_admin(self, node, unmoderated_collection_submission): unmoderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering unmoderated_collection_submission.remove(user=moderator, comment='Test Comment') assert mock_send.called @@ -484,8 +484,8 @@ def test_accept_success(self, node, hybrid_moderated_collection_submission): def test_notify_moderated_accepted(self, node, hybrid_moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert mock_send.called @@ -511,8 +511,8 @@ def test_reject_success(self, node, hybrid_moderated_collection_submission): def test_notify_moderated_rejected(self, node, hybrid_moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert mock_send.called @@ -544,8 +544,8 @@ def test_remove_success(self, node, user_role, hybrid_moderated_collection_submi def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') assert mock_send.called @@ -560,8 +560,8 @@ def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collect def test_notify_moderated_removed_admin(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.send_mail - with mock.patch.object(collection_submission_mail, 'send_mail') as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: mock_send.side_effect = send_mail # implicitly test rendering hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') assert mock_send.called diff --git a/osf_tests/test_institution.py b/osf_tests/test_institution.py index 77bf32377b2..449d35b17a5 100644 --- a/osf_tests/test_institution.py +++ b/osf_tests/test_institution.py @@ -147,7 +147,7 @@ def test_reactivate_institution(self): assert institution.deactivated is None @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) + @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) def test_send_deactivation_email_call_count(self, mock_send_mail): institution = InstitutionFactory() user_1 = UserFactory() @@ -160,7 +160,7 @@ def test_send_deactivation_email_call_count(self, mock_send_mail): assert mock_send_mail.call_count == 2 @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.send_mail', return_value=None, side_effect=mails.send_mail) + @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) def test_send_deactivation_email_call_args(self, mock_send_mail): institution = InstitutionFactory() user = UserFactory() diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index 0bb124c4f13..31ee4aa6d52 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -286,7 +286,7 @@ def test_merge_unregistered(self): assert self.user.is_invited is True assert self.user in self.project_with_unreg_contrib.contributors - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_merge_doesnt_send_signal(self, mock_notify): #Explictly reconnect signal as it is disconnected by default for test contributor_added.connect(notify_added_contributor) diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index a8c2245b9a7..030ad52a849 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -1311,7 +1311,7 @@ class TestContributorAddedSignal: def disconnected_signals(self): return None - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_add_contributors_sends_contributor_added_signal(self, mock_send_mail, node, auth): user = UserFactory() contributors = [{ @@ -2222,7 +2222,7 @@ def test_check_spam_on_private_node(self, project, user): assert not project.is_public @pytest.mark.enable_enqueue_task - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) @pytest.mark.skip('Technically still true, but skipping because mocking is outdated') @@ -2253,7 +2253,7 @@ def test_check_spam_on_private_node_bans_new_spam_user(self, mock_send_mail, pro project3.reload() assert project3.is_public is True - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) def test_check_spam_on_private_node_does_not_ban_existing_user(self, mock_send_mail, project, user): diff --git a/osf_tests/test_queued_mail.py b/osf_tests/test_queued_mail.py index 4554b08579a..d9429d9d384 100644 --- a/osf_tests/test_queued_mail.py +++ b/osf_tests/test_queued_mail.py @@ -35,14 +35,14 @@ def queue_mail(self, mail, user, send_at=None, **kwargs): ) return mail - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_no_login_presend_for_active_user(self, mock_mail, user): mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() + dt.timedelta(seconds=10) user.save() assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_no_login_presend_for_inactive_user(self, mock_mail, user): mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() - dt.timedelta(weeks=10) @@ -50,12 +50,12 @@ def test_no_login_presend_for_inactive_user(self, mock_mail, user): assert timezone.now() - dt.timedelta(days=1) > user.date_last_login assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_no_addon_presend(self, mock_mail, user): mail = self.queue_mail(mail=NO_ADDON, user=user) assert mail.send_mail() is True - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_new_public_project_presend_for_no_project(self, mock_mail, user): mail = self.queue_mail( mail=NEW_PUBLIC_PROJECT, @@ -65,7 +65,7 @@ def test_new_public_project_presend_for_no_project(self, mock_mail, user): ) assert bool(mail.send_mail()) is False - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_new_public_project_presend_success(self, mock_mail, user): node = NodeFactory(is_public=True) mail = self.queue_mail( @@ -76,7 +76,7 @@ def test_new_public_project_presend_success(self, mock_mail, user): ) assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_welcome_osf4m_presend(self, mock_mail, user): user.date_last_login = timezone.now() - dt.timedelta(days=13) user.save() @@ -90,7 +90,7 @@ def test_welcome_osf4m_presend(self, mock_mail, user): assert bool(mail.send_mail()) is True assert mail.data['downloads'] == 0 - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_finding_other_emails_sent_to_user(self, mock_mail, user): mail = self.queue_mail( user=user, @@ -100,7 +100,7 @@ def test_finding_other_emails_sent_to_user(self, mock_mail, user): mail.send_mail() assert len(mail.find_sent_of_same_type_and_user()) == 1 - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_user_is_active(self, mock_mail, user): mail = self.queue_mail( user=user, @@ -108,7 +108,7 @@ def test_user_is_active(self, mock_mail, user): ) assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_user_is_not_active_no_password(self, mock_mail): user = UserFactory.build() user.set_unusable_password() @@ -119,7 +119,7 @@ def test_user_is_not_active_no_password(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_user_is_not_active_not_registered(self, mock_mail): user = UserFactory(is_registered=False) mail = self.queue_mail( @@ -128,7 +128,7 @@ def test_user_is_not_active_not_registered(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_user_is_not_active_is_merged(self, mock_mail): other_user = UserFactory() user = UserFactory(merged_by=other_user) @@ -138,7 +138,7 @@ def test_user_is_not_active_is_merged(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_user_is_not_active_is_disabled(self, mock_mail): user = UserFactory(date_disabled=timezone.now()) mail = self.queue_mail( @@ -147,7 +147,7 @@ def test_user_is_not_active_is_disabled(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_user_is_not_active_is_not_confirmed(self, mock_mail): user = UserFactory(date_confirmed=None) mail = self.queue_mail( diff --git a/osf_tests/test_registration_moderation_notifications.py b/osf_tests/test_registration_moderation_notifications.py index ab4c7847e4d..81659d79b08 100644 --- a/osf_tests/test_registration_moderation_notifications.py +++ b/osf_tests/test_registration_moderation_notifications.py @@ -150,8 +150,8 @@ def test_submit_notifications(self, registration, moderator, admin, contrib, pro # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call/args and also implicitly ensures # that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(listeners.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: + send_mail = mails.execute_email_send + with mock.patch.object(listeners.mails, 'execute_email_send', side_effect=send_mail) as mock_send_mail: notify_submit(registration, admin) assert len(mock_send_mail.call_args_list) == 2 @@ -378,8 +378,8 @@ def test_withdrawal_registration_accepted_notifications(self, registration_with_ # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: + send_mail = mails.execute_email_send + with mock.patch.object(machines.mails, 'execute_email_send', side_effect=send_mail) as mock_email: notify_withdraw_registration(registration_with_retraction, withdraw_action) assert len(mock_email.call_args_list) == 2 @@ -436,8 +436,8 @@ def test_withdrawal_registration_rejected_notifications(self, registration, cont # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: + send_mail = mails.execute_email_send + with mock.patch.object(machines.mails, 'execute_email_send', side_effect=send_mail) as mock_email: notify_reject_withdraw_request(registration, withdraw_request_action) assert len(mock_email.call_args_list) == 2 @@ -488,8 +488,8 @@ def test_withdrawal_registration_force_notifications(self, registration_with_ret # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(machines.mails, 'send_mail', side_effect=send_mail) as mock_email: + send_mail = mails.execute_email_send + with mock.patch.object(machines.mails, 'execute_email_send', side_effect=send_mail) as mock_email: notify_withdraw_registration(registration_with_retraction, withdraw_action) assert len(mock_email.call_args_list) == 2 @@ -540,7 +540,7 @@ def test_withdrawal_registration_force_notifications(self, registration_with_ret def test_submissions_and_withdrawals_both_appear_in_moderator_digest(self, digest_type, expected_recipient, registration, admin, provider): # Invoke the fixture function to get the recipient because parametrize expected_recipient = expected_recipient(provider) - with mock.patch('website.reviews.listeners.mails.send_mail'): + with mock.patch('website.reviews.listeners.mails.execute_email_send'): notify_submit(registration, admin) notify_moderator_registration_requests_withdrawal(registration, admin) @@ -571,8 +571,8 @@ def test_moderator_digest_emails_render(self, registration, admin, moderator): # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.send_mail - with mock.patch.object(tasks.mails, 'send_mail', side_effect=send_mail) as mock_send_mail: + send_mail = mails.execute_email_send + with mock.patch.object(tasks.mails, 'execute_email_send', side_effect=send_mail) as mock_send_mail: tasks._send_reviews_moderator_emails('email_transactional') mock_send_mail.assert_called() diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py index 1d25ca4adac..16131961d7e 100644 --- a/osf_tests/test_reviewable.py +++ b/osf_tests/test_reviewable.py @@ -34,7 +34,7 @@ def test_state_changes(self, _): from_db.refresh_from_db() assert from_db.machine_state == DefaultStates.ACCEPTED.value - @mock.patch('website.reviews.listeners.mails.send_mail') + @mock.patch('website.reviews.listeners.mails.execute_email_send') def test_reject_resubmission_sends_emails(self, send_mail): user = AuthUserFactory() preprint = PreprintFactory( diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 6c6699fb74c..5672f832f18 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -143,7 +143,7 @@ def test_create_initial_response_assigns_default_values(self, registration): assert block.response == DEFAULT_SCHEMA_RESPONSE_VALUES[block.schema_key] def test_create_initial_response_does_not_notify(self, registration, admin_user): - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: schema_response.SchemaResponse.create_initial_response( parent=registration, initiator=admin_user ) @@ -254,8 +254,8 @@ def test_create_from_previous_response(self, registration, initial_response): def test_create_from_previous_response_notification( self, initial_response, admin_user, notification_recipients): - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: mock_send.side_effect = send_mail # implicitly test rendering schema_response.SchemaResponse.create_from_previous_response( previous_response=initial_response, initiator=admin_user @@ -584,8 +584,8 @@ def test_submit_response_notification( revised_response.revision_justification = 'has for valid revision_justification for submission' revised_response.save() - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: mock_send.side_effect = send_mail # implicitly test rendering revised_response.submit(user=admin_user, required_approvers=[admin_user]) @@ -598,7 +598,7 @@ def test_no_submit_notification_on_initial_response(self, initial_response, admi initial_response.update_responses({'q1': 'must change one response or can\'t submit'}) initial_response.revision_justification = 'has for valid revision_justification for submission' initial_response.save() - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: initial_response.submit(user=admin_user, required_approvers=[admin_user]) assert not mock_send.called @@ -687,8 +687,8 @@ def test_approve_response_notification( revised_response.save() revised_response.pending_approvers.add(admin_user, alternate_user) - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: mock_send.side_effect = send_mail # implicitly test rendering revised_response.approve(user=admin_user) assert not mock_send.called # Should only send email on final approval @@ -703,7 +703,7 @@ def test_no_approve_notification_on_initial_response(self, initial_response, adm initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: initial_response.approve(user=admin_user) assert not mock_send.called @@ -761,8 +761,8 @@ def test_reject_response_notification( revised_response.save() revised_response.pending_approvers.add(admin_user) - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: mock_send.side_effect = send_mail # implicitly test rendering revised_response.reject(user=admin_user) @@ -775,7 +775,7 @@ def test_no_reject_notification_on_initial_response(self, initial_response, admi initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: initial_response.reject(user=admin_user) assert not mock_send.called @@ -875,8 +875,8 @@ def test_accept_notification_sent_on_admin_approval(self, revised_response, admi revised_response.save() revised_response.pending_approvers.add(admin_user) - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: mock_send.side_effect = send_mail revised_response.approve(user=admin_user) assert mock_send.called @@ -929,8 +929,8 @@ def test_moderator_accept_notification( revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: mock_send.side_effect = send_mail # implicitly test rendering revised_response.accept(user=moderator) @@ -943,7 +943,7 @@ def test_no_moderator_accept_notification_on_initial_response( initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: initial_response.accept(user=moderator) assert not mock_send.called @@ -973,8 +973,8 @@ def test_moderator_reject_notification( revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - send_mail = mails.send_mail - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + send_mail = mails.execute_email_send + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: mock_send.side_effect = send_mail # implicitly test rendering revised_response.reject(user=moderator) @@ -987,7 +987,7 @@ def test_no_moderator_reject_notification_on_initial_response( initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - with mock.patch.object(schema_response.mails, 'send_mail', autospec=True) as mock_send: + with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: initial_response.reject(user=moderator) assert not mock_send.called diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index 6ee847686d0..0df3ead35e8 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -896,21 +896,21 @@ def test_change_password(self, user): user.change_password(old_password, new_password, confirm_password) assert bool(user.check_password(new_password)) is True - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_set_password_notify_default(self, mock_send_mail, user): old_password = 'password' user.set_password(old_password) user.save() assert mock_send_mail.called is True - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_set_password_no_notify(self, mock_send_mail, user): old_password = 'password' user.set_password(old_password, notify=False) user.save() assert mock_send_mail.called is False - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_check_password_upgrade_hasher_no_notify(self, mock_send_mail, user, settings): # NOTE: settings fixture comes from pytest-django. # changes get reverted after tests run diff --git a/scripts/tests/test_deactivate_requested_accounts.py b/scripts/tests/test_deactivate_requested_accounts.py index 765bf29bb18..eb14fc43278 100644 --- a/scripts/tests/test_deactivate_requested_accounts.py +++ b/scripts/tests/test_deactivate_requested_accounts.py @@ -26,7 +26,7 @@ def user_requested_deactivation_with_node(self): user.save() return user - @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.send_mail') + @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.execute_email_send') def test_deactivate_user_with_no_content(self, mock_mail, user_requested_deactivation): deactivate_requested_accounts(dry_run=False) @@ -41,7 +41,7 @@ def test_deactivate_user_with_no_content(self, mock_mail, user_requested_deactiv contact_email=settings.OSF_CONTACT_EMAIL, user=user_requested_deactivation) - @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.send_mail') + @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.execute_email_send') def test_deactivate_user_with_content(self, mock_mail, user_requested_deactivation_with_node): deactivate_requested_accounts(dry_run=False) @@ -53,4 +53,3 @@ def test_deactivate_user_with_content(self, mock_mail, user_requested_deactivati mail=mails.REQUEST_DEACTIVATION, to_addr=settings.OSF_SUPPORT_EMAIL, user=user_requested_deactivation_with_node) - diff --git a/scripts/tests/test_send_queued_mails.py b/scripts/tests/test_send_queued_mails.py index 142eb75c4a6..1fad2a23c37 100644 --- a/scripts/tests/test_send_queued_mails.py +++ b/scripts/tests/test_send_queued_mails.py @@ -29,13 +29,13 @@ def queue_mail(self, mail_type=NO_ADDON, user=None, send_at=None): fullname=user.fullname if user else self.user.fullname, ) - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_queue_addon_mail(self, mock_send): self.queue_mail() main(dry_run=False) assert mock_send.called - @mock.patch('osf.models.queued_mail.send_mail') + @mock.patch('osf.models.queued_mail.execute_email_send') def test_no_two_emails_to_same_person(self, mock_send): user = UserFactory() user.osf_mailing_lists[settings.OSF_HELP_LIST] = True diff --git a/scripts/tests/test_triggered_mails.py b/scripts/tests/test_triggered_mails.py index ca583423fbb..56c006b9c43 100644 --- a/scripts/tests/test_triggered_mails.py +++ b/scripts/tests/test_triggered_mails.py @@ -38,7 +38,7 @@ def test_trigger_no_login_mail(self, mock_queue): send_at=mock.ANY, ) - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self, mock_mail): user_active = UserFactory(fullname='Spot') user_inactive = UserFactory(fullname='Nucha') diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 83ca7180388..a493e19fac6 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -211,7 +211,7 @@ def test_add_contributors_post_only_sends_one_email_to_unreg_user( # finalize_invitation should only have been called once assert mock_send_claim_email.call_count == 1 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_add_contributors_post_only_sends_one_email_to_registered_user(self, mock_send_mail): # Project has components comp1 = NodeFactory(creator=self.creator, parent=self.project) @@ -239,7 +239,7 @@ def test_add_contributors_post_only_sends_one_email_to_registered_user(self, moc # send_mail should only have been called once assert mock_send_mail.call_count == 1 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self, mock_send_mail): # Project has a component with a sub-component component = NodeFactory(creator=self.creator, parent=self.project) @@ -286,7 +286,7 @@ def test_email_sent_when_unreg_user_is_added(self, send_mail): self.app.post(url, json=payload, follow_redirects=True, auth=self.creator.auth) send_mail.assert_called_with(email, ANY,ANY,notify=True, email_template='default') - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_email_sent_when_reg_user_is_added(self, send_mail): contributor = UserFactory() contributors = [{ @@ -315,7 +315,7 @@ def test_email_sent_when_reg_user_is_added(self, send_mail): ) assert contributor.contributor_added_email_records[project._id]['last_sent'] == approx(int(time.time()), rel=1) - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_contributor_added_email_sent_to_unreg_user(self, send_mail): unreg_user = UnregUserFactory() project = ProjectFactory() @@ -323,20 +323,20 @@ def test_contributor_added_email_sent_to_unreg_user(self, send_mail): project.save() assert send_mail.called - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_forking_project_does_not_send_contributor_added_email(self, send_mail): project = ProjectFactory() project.fork_node(auth=Auth(project.creator)) assert not send_mail.called - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_templating_project_does_not_send_contributor_added_email(self, send_mail): project = ProjectFactory() project.use_as_template(auth=Auth(project.creator)) assert not send_mail.called @mock.patch('website.archiver.tasks.archive') - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_registering_project_does_not_send_contributor_added_email(self, send_mail, mock_archive): project = ProjectFactory() provider = RegistrationProviderFactory() @@ -349,7 +349,7 @@ def test_registering_project_does_not_send_contributor_added_email(self, send_ma ) assert not send_mail.called - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_notify_contributor_email_does_not_send_before_throttle_expires(self, send_mail): contributor = UserFactory() project = ProjectFactory() @@ -361,7 +361,7 @@ def test_notify_contributor_email_does_not_send_before_throttle_expires(self, se notify_added_contributor(project, contributor, auth) assert send_mail.call_count == 1 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_notify_contributor_email_sends_after_throttle_expires(self, send_mail): throttle = 0.5 @@ -375,7 +375,7 @@ def test_notify_contributor_email_sends_after_throttle_expires(self, send_mail): notify_added_contributor(project, contributor, auth, throttle=throttle) assert send_mail.call_count == 2 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_add_contributor_to_fork_sends_email(self, send_mail): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) @@ -384,7 +384,7 @@ def test_add_contributor_to_fork_sends_email(self, send_mail): assert send_mail.called assert send_mail.call_count == 1 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_add_contributor_to_template_sends_email(self, send_mail): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) @@ -393,13 +393,13 @@ def test_add_contributor_to_template_sends_email(self, send_mail): assert send_mail.called assert send_mail.call_count == 1 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_creating_fork_does_not_email_creator(self, send_mail): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) assert not send_mail.called - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_creating_template_does_not_email_creator(self, send_mail): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) @@ -534,7 +534,7 @@ def test_invite_contributor_requires_fullname(self): ) assert res.status_code == http_status.HTTP_400_BAD_REQUEST - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_send_claim_email_to_given_email(self, send_mail): project = ProjectFactory() given_email = fake_email() @@ -561,7 +561,7 @@ def test_send_claim_email_to_given_email(self, send_mail): osf_contact_email=settings.OSF_CONTACT_EMAIL ) - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_send_claim_email_to_referrer(self, send_mail): project = ProjectFactory() referrer = project.creator @@ -590,7 +590,7 @@ def test_send_claim_email_to_referrer(self, send_mail): osf_contact_email=settings.OSF_CONTACT_EMAIL ) - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_send_claim_email_before_throttle_expires(self, send_mail): project = ProjectFactory() given_email = fake_email() @@ -737,7 +737,7 @@ def test_claim_user_invited_with_no_email_posts_to_claim_form(self): }) assert res.status_code == 400 - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_claim_user_post_with_registered_user_id(self, send_mail): # registered user who is attempting to claim the unclaimed contributor reg_user = UserFactory() @@ -766,7 +766,7 @@ def test_claim_user_post_with_registered_user_id(self, send_mail): 'fullname': self.given_name, } - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_send_claim_registered_email(self, mock_send_mail): reg_user = UserFactory() send_claim_registered_email( @@ -780,7 +780,7 @@ def test_send_claim_registered_email(self, mock_send_mail): second_call_args = mock_send_mail.call_args_list[1][0] assert second_call_args[0] == reg_user.username - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mail): reg_user = UserFactory() send_claim_registered_email( @@ -973,7 +973,7 @@ def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_sea assert unreg.given_name == parsed_name['given_name'] assert unreg.family_name == parsed_name['family_name'] - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_claim_user_post_returns_fullname(self, send_mail): url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' res = self.app.post( @@ -1003,7 +1003,7 @@ def test_claim_user_post_returns_fullname(self, send_mail): ) - @mock.patch('website.project.views.contributor.mails.send_mail') + @mock.patch('website.project.views.contributor.mails.execute_email_send') def test_claim_user_post_if_email_is_different_from_given_email(self, send_mail): email = fake_email() # email that is different from the one the referrer gave url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' @@ -1062,4 +1062,3 @@ def test_claim_user_with_preprint_id_adds_corresponding_claimed_tag_to_user(self assert res.status_code == 302 self.user.reload() assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') in self.user.system_tags - diff --git a/tests/test_auth.py b/tests/test_auth.py index b59c1c065ab..da72118da43 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -71,7 +71,7 @@ def test_unreg_user_can_register(self): assert user.get_confirmation_token(user.username) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_confirm_email(self, mock_mail): user = UnregUserFactory() @@ -163,7 +163,7 @@ def test_successful_external_first_login_without_attributes(self, mock_service_v cas.make_response_from_ticket(ticket, service_url) assert user == mock_external_first_login_authenticate.call_args[0][0] - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_password_change_sends_email(self, mock_mail): user = UserFactory() user.set_password('killerqueen') @@ -211,7 +211,7 @@ def test_validate_recaptcha_empty_response(self, req_post): # ensure None short circuits execution (no call to google) assert not validate_recaptcha(None) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_sign_up_twice_sends_two_confirmation_emails_only(self, mock_mail): # Regression test for https://openscience.atlassian.net/browse/OSF-7060 url = api_url_for('register_user') diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index 7ab356ddba8..6d7b5fb0514 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -50,7 +50,7 @@ def setUp(self): self.user = AuthUserFactory() self.auth = self.user.auth - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_register_ok(self, _): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' @@ -68,7 +68,7 @@ def test_register_ok(self, _): assert user.accepted_terms_of_service is None # Regression test for https://github.com/CenterForOpenScience/osf.io/issues/2902 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_register_email_case_insensitive(self, _): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' @@ -84,7 +84,7 @@ def test_register_email_case_insensitive(self, _): user = OSFUser.objects.get(username=email) assert user.fullname == name - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_register_email_with_accepted_tos(self, _): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' @@ -101,7 +101,7 @@ def test_register_email_with_accepted_tos(self, _): user = OSFUser.objects.get(username=email) assert user.accepted_terms_of_service - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_register_email_without_accepted_tos(self, _): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' @@ -195,7 +195,7 @@ def test_register_blocked_email_domain(self): assert users.count() == 0 @mock.patch('framework.auth.views.validate_recaptcha', return_value=True) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_register_good_captcha(self, _, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' @@ -217,7 +217,7 @@ def test_register_good_captcha(self, _, validate_recaptcha): assert user.fullname == name @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_register_missing_captcha(self, _, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' @@ -236,7 +236,7 @@ def test_register_missing_captcha(self, _, validate_recaptcha): assert resp.status_code == http_status.HTTP_400_BAD_REQUEST @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_register_bad_captcha(self, _, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' @@ -317,7 +317,7 @@ def test_register_sends_user_registered_signal(self, mock_send_confirm_email): assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} assert mock_send_confirm_email.called - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_resend_confirmation(self, send_mail: MagicMock): email = 'test@mail.com' token = self.user.add_unconfirmed_email(email) @@ -344,7 +344,7 @@ def test_resend_confirmation(self, send_mail: MagicMock): with pytest.raises(InvalidTokenError): self.user.get_unconfirmed_email_for_token(token) - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_click_confirmation_email(self, send_mail): # TODO: check in qa url encoding email = 'test@mail.com' @@ -509,7 +509,7 @@ def test_resend_confirmation_not_work_for_confirmed_email(self): assert res.status_code == 400 assert res.json['message_long'] == 'Cannnot resend confirmation for confirmed emails' - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_resend_confirmation_does_not_send_before_throttle_expires(self, send_mail): email = 'test@mail.com' self.user.save() @@ -940,4 +940,3 @@ def test_reset_password_logs_out_user(self): assert 'reauth' not in location assert 'logout?service=' in location assert 'resetpassword' in location - diff --git a/tests/test_conferences.py b/tests/test_conferences.py index 8c6c62ee304..6573d462136 100644 --- a/tests/test_conferences.py +++ b/tests/test_conferences.py @@ -571,7 +571,7 @@ def test_conference_valid_submissions(self): class TestConferenceIntegration(ContextTestCase): - @mock.patch('website.conferences.views.send_mail') + @mock.patch('website.conferences.views.execute_email_send') @mock.patch('website.conferences.utils.upload_attachments') def test_integration(self, mock_upload, mock_send_mail): fullname = 'John Deacon' @@ -619,7 +619,7 @@ def test_integration(self, mock_upload, mock_send_mail): assert_absolute(call_kwargs['file_url']) assert_absolute(call_kwargs['node_url']) - @mock.patch('website.conferences.views.send_mail') + @mock.patch('website.conferences.views.execute_email_send') def test_integration_inactive(self, mock_send_mail): conference = ConferenceFactory(active=False) fullname = 'John Deacon' @@ -658,7 +658,7 @@ def test_integration_inactive(self, mock_send_mail): web_url_for('conference_view', _absolute=True), ) - @mock.patch('website.conferences.views.send_mail') + @mock.patch('website.conferences.views.execute_email_send') @mock.patch('website.conferences.utils.upload_attachments') def test_integration_wo_full_name(self, mock_upload, mock_send_mail): username = 'no_full_name@mail.com' @@ -705,7 +705,7 @@ def test_integration_wo_full_name(self, mock_upload, mock_send_mail): assert_absolute(call_kwargs['file_url']) assert_absolute(call_kwargs['node_url']) - @mock.patch('website.conferences.views.send_mail') + @mock.patch('website.conferences.views.execute_email_send') @mock.patch('website.conferences.utils.upload_attachments') def test_create_conference_node_with_same_name_as_existing_node(self, mock_upload, mock_send_mail): conference = ConferenceFactory() @@ -745,7 +745,7 @@ def test_create_conference_node_with_same_name_as_existing_node(self, mock_uploa assert mock_send_mail.called - @mock.patch('website.conferences.views.send_mail') + @mock.patch('website.conferences.views.execute_email_send') def test_conferences_discontinued(self, mock_send_mail): fullname = 'John Deacon' username = 'deacon@queen.com' diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index fb501b80233..bae2870876c 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -400,7 +400,7 @@ def test_external_login_confirm_email_get_without_destination(self): res = self.app.get(url, auth=self.auth) assert res.status_code == 400, 'bad request' - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_external_login_confirm_email_get_create(self, mock_welcome): # TODO: check in qa url encoding assert not self.user.is_registered @@ -417,7 +417,7 @@ def test_external_login_confirm_email_get_create(self, mock_welcome): assert self.user.is_registered assert self.user.has_usable_password() - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_external_login_confirm_email_get_link(self, mock_link_confirm): self.user.external_identity['orcid'][self.provider_id] = 'LINK' self.user.save() @@ -436,7 +436,7 @@ def test_external_login_confirm_email_get_link(self, mock_link_confirm): assert self.user.is_registered assert self.user.has_usable_password() - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_external_login_confirm_email_get_duped_id(self, mock_confirm): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'CREATE'}}) assert dupe_user.external_identity == self.user.external_identity @@ -454,7 +454,7 @@ def test_external_login_confirm_email_get_duped_id(self, mock_confirm): assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert dupe_user.external_identity == {} - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_external_login_confirm_email_get_duping_id(self, mock_confirm): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'VERIFIED'}}) url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') diff --git a/tests/test_notifications.py b/tests/test_notifications.py index 05c76618ef2..e97bad37732 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -945,7 +945,7 @@ def test_group_notifications_by_user_digest(self): digest_ids = [d2._id, d3._id] remove_notifications(email_notification_ids=digest_ids) - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_send_users_email_called_with_correct_args(self, mock_send_mail): send_type = 'email_transactional' d = factories.NotificationDigestFactory( @@ -973,7 +973,7 @@ def test_send_users_email_called_with_correct_args(self, mock_send_mail): message = group_by_node(user_groups[last_user_index]['info']) assert kwargs['message'] == message - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_send_users_email_ignores_disabled_users(self, mock_send_mail): send_type = 'email_transactional' d = factories.NotificationDigestFactory( @@ -1046,7 +1046,7 @@ def test_reviews_base_notification(self): event_types = [sub.event_name for sub in contributor_subscriptions] assert 'global_reviews' in event_types - @mock.patch('website.mails.mails.send_mail') + @mock.patch('website.mails.mails.execute_email_send') def test_reviews_submit_notification(self, mock_send_email): listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user]) assert mock_send_email.called diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 5528ef28219..15361b4a7bb 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -1994,7 +1994,7 @@ def setUp(self): self.preprint.add_contributor(self.write_contrib, permissions=WRITE) self.preprint_branded = PreprintFactory(creator=self.user, is_published=False) - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_creator_gets_email(self, send_mail): self.preprint.set_published(True, auth=Auth(self.user), save=True) domain = self.preprint.provider.domain or settings.DOMAIN diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py index 8b87cf4e252..1372815927b 100644 --- a/tests/test_registrations/test_embargoes.py +++ b/tests/test_registrations/test_embargoes.py @@ -1125,7 +1125,7 @@ def test_cannot_request_termination_on_component_of_embargo(self): with pytest.raises(NodeStateError): reg._nodes.first().request_embargo_termination(node.creator) - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_embargoed_registration_set_privacy_sends_mail(self, mock_send_mail): """ Integration test for https://github.com/CenterForOpenScience/osf.io/pull/5294#issuecomment-212613668 diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 5dad9b35b42..19129413137 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -783,7 +783,7 @@ def test_POST_retraction_to_private_registration_returns_HTTPError_FORBIDDEN(sel self.registration.reload() assert self.registration.retraction is None - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_POST_retraction_does_not_send_email_to_unregistered_admins(self, mock_send_mail): unreg = UnregUserFactory() self.registration.add_unregistered_contributor( @@ -861,7 +861,7 @@ def test_POST_retraction_without_justification_returns_HTTPOK(self, mock_send): assert self.registration.is_pending_retraction assert self.registration.retraction.justification is None - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_valid_POST_retraction_adds_to_parent_projects_log(self, mock_send): initial_project_logs = self.registration.registered_from.logs.count() self.app.post( @@ -873,7 +873,7 @@ def test_valid_POST_retraction_adds_to_parent_projects_log(self, mock_send): # Logs: Created, registered, retraction initiated assert self.registration.registered_from.logs.count() == initial_project_logs + 1 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_valid_POST_retraction_when_pending_retraction_raises_400(self, mock_send): self.app.post( self.retraction_post_url, @@ -887,7 +887,7 @@ def test_valid_POST_retraction_when_pending_retraction_raises_400(self, mock_sen ) assert res.status_code == 400 - @mock.patch('website.mails.send_mail') + @mock.patch('website.mails.execute_email_send') def test_valid_POST_calls_send_mail_with_username(self, mock_send): self.app.post( self.retraction_post_url, diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index a97bc288e44..28fd3ca1499 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -15,7 +15,7 @@ @pytest.mark.django_db -@mock.patch('framework.auth.views.mails.send_mail') +@mock.patch('framework.auth.views.mails.execute_email_send') def test_throttled_autoban(mock_mail): settings.SPAM_THROTTLE_AUTOBAN = True user = AuthUserFactory() diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 5a30ca79d78..523626fde01 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -412,7 +412,7 @@ def test_cannot_update_user_without_user_id(self): assert res.status_code == 400 assert res.json['message_long'] == '"id" is required' - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_emails_return_emails(self, send_mail): user1 = AuthUserFactory() url = api_url_for('update_user') @@ -426,7 +426,7 @@ def test_add_emails_return_emails(self, send_mail): assert 'emails' in res.json['profile'] assert len(res.json['profile']['emails']) == 2 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_resend_confirmation_return_emails(self, send_mail): user1 = AuthUserFactory() url = api_url_for('resend_confirmation') @@ -439,7 +439,7 @@ def test_resend_confirmation_return_emails(self, send_mail): assert 'emails' in res.json['profile'] assert len(res.json['profile']['emails']) == 2 - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('website.mailchimp_utils.get_mailchimp_api') def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail): email = fake_email() @@ -484,7 +484,7 @@ def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail): ) handlers.celery_teardown_request() - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('website.mailchimp_utils.get_mailchimp_api') def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_mailchimp_api, send_mail): email = fake_email() @@ -792,7 +792,7 @@ def test_password_change_invalid_empty_string_confirm_password(self): def test_password_change_invalid_blank_confirm_password(self): self.test_password_change_invalid_blank_password('password', 'new password', ' ') - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_user_cannot_request_account_export_before_throttle_expires(self, send_mail): url = api_url_for('request_export') self.app.post(url, auth=self.user.auth) diff --git a/tests/test_webtests.py b/tests/test_webtests.py index e06be14a093..f9a74dd03e1 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -731,7 +731,7 @@ def test_resend_confirmation_get(self): assert res.get_form('resendForm') # test that unconfirmed user can receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_can_receive_resend_confirmation_email(self, mock_send_mail): # load resend confirmation page and submit email res = self.app.get(self.get_url) @@ -746,7 +746,7 @@ def test_can_receive_resend_confirmation_email(self, mock_send_mail): assert_in_html('If there is an OSF account', res.text) # test that confirmed user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail): # load resend confirmation page and submit email res = self.app.get(self.get_url) @@ -761,7 +761,7 @@ def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail): assert_in_html('has already been confirmed', res.text) # test that non-existing user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail): # load resend confirmation page and submit email res = self.app.get(self.get_url) @@ -776,7 +776,7 @@ def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail): assert_in_html('If there is an OSF account', res.text) # test that user cannot submit resend confirmation request too quickly - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cannot_resend_confirmation_twice_quickly(self, mock_send_mail): # load resend confirmation page and submit email res = self.app.get(self.get_url) @@ -820,7 +820,7 @@ def test_get_forgot_password(self): assert res.get_form('forgotPasswordForm') # test that existing user can receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_can_receive_reset_password_email(self, mock_send_mail): # load forgot password page and submit email res = self.app.get(self.get_url) @@ -843,7 +843,7 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cannot_receive_reset_password_email(self, mock_send_mail): # load forgot password page and submit email res = self.app.get(self.get_url) @@ -866,7 +866,7 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_not_active_user_no_reset_password_email(self, mock_send_mail): self.user.deactivate_account() self.user.save() @@ -892,7 +892,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cannot_reset_password_twice_quickly(self, mock_send_mail): # load forgot password page and submit email res = self.app.get(self.get_url) @@ -939,7 +939,7 @@ def test_get_forgot_password(self): assert 'campaign=unsupportedinstitution' in location # test that user from disabled institution can receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_can_receive_reset_password_email(self, mock_send_mail): # submit email to institutional forgot-password page res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) @@ -959,7 +959,7 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cannot_receive_reset_password_email(self, mock_send_mail): # load forgot password page and submit email res = self.app.post(self.post_url, data={'forgot_password-email': 'fake' + self.user.username}) @@ -979,7 +979,7 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive institutional reset password email - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_not_active_user_no_reset_password_email(self, mock_send_mail): self.user.deactivate_account() self.user.save() @@ -1001,7 +1001,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.send_mail') + @mock.patch('framework.auth.views.mails.execute_email_send') def test_cannot_reset_password_twice_quickly(self, mock_send_mail): # submit institutional forgot-password request in rapid succession res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) diff --git a/website/mails/mails.py b/website/mails/mails.py index ab632e780ec..39cd8feff0b 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -75,6 +75,20 @@ def render_message(tpl_name, **context): return tpl.render(**context) +def execute_email_send(celery, mailer, kwargs, callback=None): + if settings.USE_EMAIL: + if settings.USE_CELERY and celery: + logger.debug('Sending via celery...') + return mailer.apply_async(kwargs=kwargs, link=callback) + else: + logger.debug('Sending without celery') + ret = mailer(**kwargs) + if callback: + callback() + + return ret + + def send_mail( to_addr, mail, @@ -136,17 +150,13 @@ def send_mail( ) logger.debug('Preparing to send...') - if settings.USE_EMAIL: - if settings.USE_CELERY and celery: - logger.debug('Sending via celery...') - return mailer.apply_async(kwargs=kwargs, link=callback) - else: - logger.debug('Sending without celery') - ret = mailer(**kwargs) - if callback: - callback() - - return ret + ret = execute_email_send( + celery=celery, + mailer=mailer, + kwargs=kwargs, + callback=callback, + ) + return ret def get_english_article(word): From 57b0bd1024b3969cea4f6ca40ebbee0c340e640f Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 20 May 2025 10:06:40 -0400 Subject: [PATCH 012/176] add new data model for notifications --- admin/notifications/views.py | 8 +- admin_tests/notifications/test_views.py | 19 +- api/subscriptions/permissions.py | 4 +- api/subscriptions/views.py | 11 +- .../views/test_subscriptions_detail.py | 4 +- .../views/test_subscriptions_list.py | 4 +- osf/email/__init__.py | 68 ++++ .../commands/add_notification_subscription.py | 9 +- ...ion_provider_notification_subscriptions.py | 4 +- ...ion_provider_notification_subscriptions.py | 4 +- .../0030_new_notifications_model.py | 104 +++++ osf/models/__init__.py | 8 +- osf/models/collection_submission.py | 4 +- osf/models/notification.py | 356 ++++++++++++++++++ osf/models/notifications.py | 7 +- osf/models/provider.py | 4 +- osf_tests/factories.py | 4 +- osf_tests/utils.py | 4 +- scripts/add_global_subscriptions.py | 6 +- ...cation_subscriptions_from_registrations.py | 2 +- tests/test_events.py | 28 +- tests/test_notifications.py | 157 ++++---- website/notifications/emails.py | 7 +- website/notifications/utils.py | 38 +- website/notifications/views.py | 9 +- website/reviews/listeners.py | 12 +- 26 files changed, 715 insertions(+), 170 deletions(-) create mode 100644 osf/email/__init__.py create mode 100644 osf/migrations/0030_new_notifications_model.py create mode 100644 osf/models/notification.py diff --git a/admin/notifications/views.py b/admin/notifications/views.py index 7a3a13a8df8..3546878e9af 100644 --- a/admin/notifications/views.py +++ b/admin/notifications/views.py @@ -1,17 +1,17 @@ -from osf.models.notifications import NotificationSubscription +from osf.models.notifications import NotificationSubscriptionLegacy from django.db.models import Count def delete_selected_notifications(selected_ids): - NotificationSubscription.objects.filter(id__in=selected_ids).delete() + NotificationSubscriptionLegacy.objects.filter(id__in=selected_ids).delete() def detect_duplicate_notifications(node_id=None): - query = NotificationSubscription.objects.values('_id').annotate(count=Count('_id')).filter(count__gt=1) + query = NotificationSubscriptionLegacy.objects.values('_id').annotate(count=Count('_id')).filter(count__gt=1) if node_id: query = query.filter(node_id=node_id) detailed_duplicates = [] for dup in query: - notifications = NotificationSubscription.objects.filter( + notifications = NotificationSubscriptionLegacy.objects.filter( _id=dup['_id'] ).order_by('created') diff --git a/admin_tests/notifications/test_views.py b/admin_tests/notifications/test_views.py index 08ad695edd1..42d182a77e5 100644 --- a/admin_tests/notifications/test_views.py +++ b/admin_tests/notifications/test_views.py @@ -1,10 +1,11 @@ import pytest from django.test import RequestFactory -from osf.models import OSFUser, NotificationSubscription, Node +from osf.models import OSFUser, Node from admin.notifications.views import ( delete_selected_notifications, detect_duplicate_notifications, ) +from osf.models.notifications import NotificationSubscriptionLegacy from tests.base import AdminTestCase pytestmark = pytest.mark.django_db @@ -18,19 +19,19 @@ def setUp(self): self.request_factory = RequestFactory() def test_delete_selected_notifications(self): - notification1 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') - notification2 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event2') - notification3 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event3') + notification1 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') + notification2 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event2') + notification3 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event3') delete_selected_notifications([notification1.id, notification2.id]) - assert not NotificationSubscription.objects.filter(id__in=[notification1.id, notification2.id]).exists() - assert NotificationSubscription.objects.filter(id=notification3.id).exists() + assert not NotificationSubscriptionLegacy.objects.filter(id__in=[notification1.id, notification2.id]).exists() + assert NotificationSubscriptionLegacy.objects.filter(id=notification3.id).exists() def test_detect_duplicate_notifications(self): - NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') - NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') - NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event2') + NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') + NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') + NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event2') duplicates = detect_duplicate_notifications() diff --git a/api/subscriptions/permissions.py b/api/subscriptions/permissions.py index 19dc7bcbd58..f0f3553ad6c 100644 --- a/api/subscriptions/permissions.py +++ b/api/subscriptions/permissions.py @@ -1,12 +1,12 @@ from rest_framework import permissions -from osf.models.notifications import NotificationSubscription +from osf.models.notifications import NotificationSubscriptionLegacy class IsSubscriptionOwner(permissions.BasePermission): def has_object_permission(self, request, view, obj): - assert isinstance(obj, NotificationSubscription), f'obj must be a NotificationSubscription; got {obj}' + assert isinstance(obj, NotificationSubscriptionLegacy), f'obj must be a NotificationSubscriptionLegacy; got {obj}' user_id = request.user.id return obj.none.filter(id=user_id).exists() \ or obj.email_transactional.filter(id=user_id).exists() \ diff --git a/api/subscriptions/views.py b/api/subscriptions/views.py index c1d7e833b49..a3c11a52aa8 100644 --- a/api/subscriptions/views.py +++ b/api/subscriptions/views.py @@ -22,6 +22,7 @@ RegistrationProvider, AbstractProvider, ) +from osf.models.notifications import NotificationSubscriptionLegacy class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): @@ -39,7 +40,7 @@ class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): def get_default_queryset(self): user = self.request.user - return NotificationSubscription.objects.filter( + return NotificationSubscriptionLegacy.objects.filter( Q(none=user) | Q(email_digest=user) | Q( @@ -54,7 +55,7 @@ def get_queryset(self): class AbstractProviderSubscriptionList(SubscriptionList): def get_default_queryset(self): user = self.request.user - return NotificationSubscription.objects.filter( + return NotificationSubscriptionLegacy.objects.filter( provider___id=self.kwargs['provider_id'], provider__type=self.provider_class._typedmodels_type, ).filter( @@ -80,7 +81,7 @@ class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): def get_object(self): subscription_id = self.kwargs['subscription_id'] try: - obj = NotificationSubscription.objects.get(_id=subscription_id) + obj = NotificationSubscriptionLegacy.objects.get(_id=subscription_id) except ObjectDoesNotExist: raise NotFound self.check_object_permissions(self.request, obj) @@ -109,7 +110,7 @@ def get_object(self): if self.kwargs.get('provider_id'): provider = self.provider_class.objects.get(_id=self.kwargs.get('provider_id')) try: - obj = NotificationSubscription.objects.get( + obj = NotificationSubscriptionLegacy.objects.get( _id=subscription_id, provider_id=provider.id, ) @@ -117,7 +118,7 @@ def get_object(self): raise NotFound else: try: - obj = NotificationSubscription.objects.get( + obj = NotificationSubscriptionLegacy.objects.get( _id=subscription_id, provider__type=self.provider_class._typedmodels_type, ) diff --git a/api_tests/subscriptions/views/test_subscriptions_detail.py b/api_tests/subscriptions/views/test_subscriptions_detail.py index 2a8741fc173..f64c835ad10 100644 --- a/api_tests/subscriptions/views/test_subscriptions_detail.py +++ b/api_tests/subscriptions/views/test_subscriptions_detail.py @@ -1,7 +1,7 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, NotificationSubscriptionFactory +from osf_tests.factories import AuthUserFactory, NotificationSubscriptionLegacyFactory @pytest.mark.django_db @@ -17,7 +17,7 @@ def user_no_auth(self): @pytest.fixture() def global_user_notification(self, user): - notification = NotificationSubscriptionFactory(_id=f'{user._id}_global', user=user, event_name='global') + notification = NotificationSubscriptionLegacyFactory(_id=f'{user._id}_global', user=user, event_name='global') notification.add_user_to_subscription(user, 'email_transactional') return notification diff --git a/api_tests/subscriptions/views/test_subscriptions_list.py b/api_tests/subscriptions/views/test_subscriptions_list.py index f1131b1fa72..1eca735c456 100644 --- a/api_tests/subscriptions/views/test_subscriptions_list.py +++ b/api_tests/subscriptions/views/test_subscriptions_list.py @@ -1,7 +1,7 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, PreprintProviderFactory, ProjectFactory, NotificationSubscriptionFactory +from osf_tests.factories import AuthUserFactory, PreprintProviderFactory, ProjectFactory, NotificationSubscriptionLegacyFactory @pytest.mark.django_db @@ -23,7 +23,7 @@ def node(self, user): @pytest.fixture() def global_user_notification(self, user): - notification = NotificationSubscriptionFactory(_id=f'{user._id}_global', user=user, event_name='global') + notification = NotificationSubscriptionLegacyFactory(_id=f'{user._id}_global', user=user, event_name='global') notification.add_user_to_subscription(user, 'email_transactional') return notification diff --git a/osf/email/__init__.py b/osf/email/__init__.py new file mode 100644 index 00000000000..d8cc1d6de5a --- /dev/null +++ b/osf/email/__init__.py @@ -0,0 +1,68 @@ +import logging +import smtplib +from email.mime.text import MIMEText +from sendgrid import SendGridAPIClient +from sendgrid.helpers.mail import Mail +from website import settings + +def send_email_over_smtp(to_addr, notification_type, context): + """Send an email notification using SMTP. This is typically not used in productions as other 3rd party mail services + are preferred. This is to be used for tests and on staging environments and special situations. + + Args: + to_addr (str): The recipient's email address. + notification_type (str): The subject of the notification. + context (dict): The email content context. + """ + if not settings.MAIL_SERVER: + raise NotImplementedError('MAIL_SERVER is not set') + if not settings.MAIL_USERNAME and settings.MAIL_PASSWORD: + raise NotImplementedError('MAIL_USERNAME and MAIL_PASSWORD are required for STMP') + + msg = MIMEText( + notification_type.template.format(context), + 'html', + _charset='utf-8' + ) + msg['Subject'] = notification_type.email_subject_line_template.format(context=context) + + with smtplib.SMTP(settings.MAIL_SERVER) as server: + server.ehlo() + server.starttls() + server.ehlo() + server.login(settings.MAIL_USERNAME, settings.MAIL_PASSWORD) + server.sendmail( + settings.FROM_EMAIL, + [to_addr], + msg.as_string() + ) + + +def send_email_with_send_grid(to_addr, notification_type, context): + """Send an email notification using SendGrid. + + Args: + to_addr (str): The recipient's email address. + notification_type (str): The subject of the notification. + context (dict): The email content context. + """ + if not settings.SENDGRID_API_KEY: + raise NotImplementedError('SENDGRID_API_KEY is required for sendgrid notifications.') + + message = Mail( + from_email=settings.FROM_EMAIL, + to_emails=to_addr, + subject=notification_type, + html_content=context.get('message', '') + ) + + try: + sg = SendGridAPIClient(settings.SENDGRID_API_KEY) + response = sg.send(message) + if response.status_code not in (200, 201, 202): + logging.error(f'SendGrid response error: {response.status_code}, body: {response.body}') + response.raise_for_status() + logging.info(f'Notification email sent to {to_addr} for {notification_type}.') + except Exception as exc: + logging.error(f'Failed to send email notification to {to_addr}: {exc}') + raise exc diff --git a/osf/management/commands/add_notification_subscription.py b/osf/management/commands/add_notification_subscription.py index 7d9a404f37a..46c0a17ec30 100644 --- a/osf/management/commands/add_notification_subscription.py +++ b/osf/management/commands/add_notification_subscription.py @@ -5,6 +5,7 @@ import logging import django + django.setup() from django.core.management.base import BaseCommand @@ -20,9 +21,9 @@ def add_reviews_notification_setting(notification_type, state=None): if state: OSFUser = state.get_model('osf', 'OSFUser') - NotificationSubscription = state.get_model('osf', 'NotificationSubscription') + NotificationSubscriptionLegacy = state.get_model('osf', 'NotificationSubscriptionLegacy') else: - from osf.models import OSFUser, NotificationSubscription + from osf.models import OSFUser, NotificationSubscriptionLegacy active_users = OSFUser.objects.filter(date_confirmed__isnull=False).exclude(date_disabled__isnull=False).exclude(is_active=False).order_by('id') total_active_users = active_users.count() @@ -33,10 +34,10 @@ def add_reviews_notification_setting(notification_type, state=None): for user in active_users.iterator(): user_subscription_id = to_subscription_key(user._id, notification_type) - subscription = NotificationSubscription.load(user_subscription_id) + subscription = NotificationSubscriptionLegacy.load(user_subscription_id) if not subscription: logger.info(f'No {notification_type} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscription(_id=user_subscription_id, owner=user, event_name=notification_type) + subscription = NotificationSubscriptionLegacy(_id=user_subscription_id, owner=user, event_name=notification_type) subscription.save() # Need to save in order to access m2m fields subscription.add_user_to_subscription(user, 'email_transactional') else: diff --git a/osf/management/commands/populate_collection_provider_notification_subscriptions.py b/osf/management/commands/populate_collection_provider_notification_subscriptions.py index 5713b08061b..c3a21eb8d20 100644 --- a/osf/management/commands/populate_collection_provider_notification_subscriptions.py +++ b/osf/management/commands/populate_collection_provider_notification_subscriptions.py @@ -1,7 +1,7 @@ import logging from django.core.management.base import BaseCommand -from osf.models import NotificationSubscription, CollectionProvider +from osf.models import NotificationSubscriptionLegacy, CollectionProvider logger = logging.getLogger(__file__) @@ -12,7 +12,7 @@ def populate_collection_provider_notification_subscriptions(): provider_moderators = provider.get_group('moderator').user_set.all() for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscription.objects.get_or_create( + instance, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_{subscription}', event_name=subscription, provider=provider diff --git a/osf/management/commands/populate_registration_provider_notification_subscriptions.py b/osf/management/commands/populate_registration_provider_notification_subscriptions.py index fe372fcbb80..db4b44acba5 100644 --- a/osf/management/commands/populate_registration_provider_notification_subscriptions.py +++ b/osf/management/commands/populate_registration_provider_notification_subscriptions.py @@ -2,7 +2,7 @@ from django.contrib.auth.models import Group from django.core.management.base import BaseCommand -from osf.models import NotificationSubscription, RegistrationProvider +from osf.models import RegistrationProvider, NotificationSubscriptionLegacy logger = logging.getLogger(__file__) @@ -17,7 +17,7 @@ def populate_registration_provider_notification_subscriptions(): continue for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscription.objects.get_or_create( + instance, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_{subscription}', event_name=subscription, provider=provider diff --git a/osf/migrations/0030_new_notifications_model.py b/osf/migrations/0030_new_notifications_model.py new file mode 100644 index 00000000000..ec044b08a07 --- /dev/null +++ b/osf/migrations/0030_new_notifications_model.py @@ -0,0 +1,104 @@ +import osf +from django.db import migrations, models +from django.conf import settings +import django_extensions.db.fields +import django.db.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0029_remove_abstractnode_keenio_read_key'), + ] + + operations = [ + migrations.RunSQL( + """ + DO $$ + DECLARE + idx record; + BEGIN + FOR idx IN + SELECT indexname + FROM pg_indexes + WHERE tablename = 'osf_notificationsubscription' + LOOP + EXECUTE format( + 'ALTER INDEX %I RENAME TO %I', + idx.indexname, + replace(idx.indexname, 'osf_notificationsubscription', 'osf_notificationsubscription_legacy') + ); + END LOOP; + END$$; + """ + ), + migrations.AlterModelTable( + name='NotificationSubscription', + table='osf_notificationsubscription_legacy', + ), + + migrations.RenameModel( + old_name='NotificationSubscription', + new_name='NotificationSubscriptionLegacy', + ), + migrations.CreateModel( + name='NotificationType', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255, unique=True)), + ('notification_freq', models.CharField( + choices=[('none', 'None'), ('instantly', 'Instantly'), ('daily', 'Daily'), ('weekly', 'Weekly'), + ('monthly', 'Monthly')], default='instantly', max_length=32)), + ('template', models.TextField( + help_text='Template used to render the event_info. Supports Django template syntax.')), + ('object_content_type', models.ForeignKey(blank=True, + help_text='Content type for subscribed objects. Null means global event.', + null=True, on_delete=django.db.models.deletion.SET_NULL, + to='contenttypes.contenttype')), + ], + options={ + 'verbose_name': 'Notification Type', + 'verbose_name_plural': 'Notification Types', + }, + ), + migrations.CreateModel( + name='NotificationSubscription', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', + django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', + django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('message_frequency', models.CharField(max_length=32)), + ('object_id', models.CharField(blank=True, max_length=255, null=True)), + ('content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, + to='contenttypes.contenttype')), + ('notification_type', + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.notificationtype')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', + to=settings.AUTH_USER_MODEL)), + ], + options={ + 'verbose_name': 'Notification Subscription', + 'verbose_name_plural': 'Notification Subscriptions', + }, + bases=(models.Model, osf.models.base.QuerySetExplainMixin), + ), + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('event_context', models.JSONField()), + ('sent', models.DateTimeField(blank=True, null=True)), + ('seen', models.DateTimeField(blank=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True)), + ('subscription', + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', + to='osf.notificationsubscription')), + ], + options={ + 'verbose_name': 'Notification', + 'verbose_name_plural': 'Notifications', + }, + ) + ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 275fd148b6c..d3857e5df34 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -62,7 +62,12 @@ from .node_relation import NodeRelation from .nodelog import NodeLog from .notable_domain import NotableDomain, DomainReference -from .notifications import NotificationDigest, NotificationSubscription +from .notifications import NotificationDigest, NotificationSubscriptionLegacy +from .notification import ( + NotificationSubscription, + Notification, + NotificationType +) from .oauth import ( ApiOAuth2Application, ApiOAuth2PersonalToken, @@ -111,4 +116,3 @@ OSFUser, ) from .user_message import UserMessage - diff --git a/osf/models/collection_submission.py b/osf/models/collection_submission.py index 893533d85d1..56c5a64f659 100644 --- a/osf/models/collection_submission.py +++ b/osf/models/collection_submission.py @@ -132,10 +132,10 @@ def _notify_moderators_pending(self, event_data): 'allow_submissions': True, } - from .notifications import NotificationSubscription + from .notifications import NotificationSubscriptionLegacy from website.notifications.emails import store_emails - provider_subscription, created = NotificationSubscription.objects.get_or_create( + provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{self.collection.provider._id}_new_pending_submissions', provider=self.collection.provider ) diff --git a/osf/models/notification.py b/osf/models/notification.py new file mode 100644 index 00000000000..b95d5140ebc --- /dev/null +++ b/osf/models/notification.py @@ -0,0 +1,356 @@ +import logging + +from django.db import models +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.template import Template, TemplateSyntaxError +from .base import BaseModel +from enum import Enum +from website import settings +from api.base import settings as api_settings +from osf import email + + +class FrequencyChoices(Enum): + NONE = 'none' + INSTANTLY = 'instantly' + DAILY = 'daily' + WEEKLY = 'weekly' + MONTHLY = 'monthly' + + @classmethod + def choices(cls): + return [(key.value, key.name.capitalize()) for key in cls] + + +class NotificationType(models.Model): + class Type(str, Enum): + # Desk notifications + DESK_REQUEST_EXPORT = 'desk_request_export' + DESK_REQUEST_DEACTIVATION = 'desk_request_deactivation' + DESK_OSF_SUPPORT_EMAIL = 'desk_osf_support_email' + DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER = 'desk_registration_bulk_upload_product_owner' + DESK_USER_REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE = 'desk_user_registration_bulk_upload_unexpected_failure' + DESK_ARCHIVE_JOB_EXCEEDED = 'desk_archive_job_exceeded' + DESK_ARCHIVE_JOB_COPY_ERROR = 'desk_archive_job_copy_error' + DESK_ARCHIVE_JOB_FILE_NOT_FOUND = 'desk_archive_job_file_not_found' + DESK_ARCHIVE_JOB_UNCAUGHT_ERROR = 'desk_archive_job_uncaught_error' + + # User notifications + USER_PENDING_VERIFICATION = 'user_pending_verification' + USER_PENDING_VERIFICATION_REGISTERED = 'user_pending_verification_registered' + USER_STORAGE_CAP_EXCEEDED_ANNOUNCEMENT = 'user_storage_cap_exceeded_announcement' + USER_SPAM_BANNED = 'user_spam_banned' + USER_REQUEST_DEACTIVATION_COMPLETE = 'user_request_deactivation_complete' + USER_PRIMARY_EMAIL_CHANGED = 'user_primary_email_changed' + USER_INSTITUTION_DEACTIVATION = 'user_institution_deactivation' + USER_FORGOT_PASSWORD = 'user_forgot_password' + USER_FORGOT_PASSWORD_INSTITUTION = 'user_forgot_password_institution' + USER_REQUEST_EXPORT = 'user_request_export' + USER_CONTRIBUTOR_ADDED_OSF_PREPRINT = 'user_contributor_added_osf_preprint' + USER_CONTRIBUTOR_ADDED_DEFAULT = 'user_contributor_added_default' + USER_DUPLICATE_ACCOUNTS_OSF4I = 'user_duplicate_accounts_osf4i' + USER_EXTERNAL_LOGIN_LINK_SUCCESS = 'user_external_login_link_success' + USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL = 'user_registration_bulk_upload_failure_all' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL = 'user_registration_bulk_upload_success_partial' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL = 'user_registration_bulk_upload_success_all' + USER_ADD_SSO_EMAIL_OSF4I = 'user_add_sso_email_osf4i' + USER_WELCOME_OSF4I = 'user_welcome_osf4i' + USER_ARCHIVE_JOB_EXCEEDED = 'user_archive_job_exceeded' + USER_ARCHIVE_JOB_COPY_ERROR = 'user_archive_job_copy_error' + USER_ARCHIVE_JOB_FILE_NOT_FOUND = 'user_archive_job_file_not_found' + USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' + USER_COMMENT_REPLIES = 'user_comment_replies' + USER_COMMENTS = 'user_comments' + USER_FILE_UPDATED = 'user_file_updated' + USER_COMMENT_MENTIONS = 'user_mentions' + USER_REVIEWS = 'user_reviews' + USER_PASSWORD_RESET = 'user_password_reset' + USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = 'user_contributor_added_draft_registration' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = 'user_external_login_confirm_email_create' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_confirm_email_link' + USER_CONFIRM_MERGE = 'user_confirm_merge' + USER_CONFIRM_EMAIL = 'user_confirm_email' + USER_INITIAL_CONFIRM_EMAIL = 'user_initial_confirm_email' + USER_INVITE_DEFAULT = 'user_invite_default' + USER_PENDING_INVITE = 'user_pending_invite' + USER_FORWARD_INVITE = 'user_forward_invite' + USER_FORWARD_INVITE_REGISTERED = 'user_forward_invite_registered' + USER_INVITE_DRAFT_REGISTRATION = 'user_invite_draft_registration' + USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' + + # Node notifications + NODE_COMMENT = 'node_comments' + NODE_FILES_UPDATED = 'node_files_updated' + NODE_AFFILIATION_CHANGED = 'node_affiliation_changed' + NODE_REQUEST_ACCESS_SUBMITTED = 'node_access_request_submitted' + NODE_REQUEST_ACCESS_DENIED = 'node_request_access_denied' + NODE_FORK_COMPLETED = 'node_fork_completed' + NODE_FORK_FAILED = 'node_fork_failed' + NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = 'node_request_institutional_access_request' + NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'node_contributor_added_access_request' + NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' + NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' + NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' + NODE_PENDING_RETRACTION_ADMIN = 'node_pending_retraction_admin' + NODE_PENDING_REGISTRATION_NON_ADMIN = 'node_pending_registration_non_admin' + NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' + NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' + NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' + + # Provider notifications + PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION = 'provider_reviews_submission_confirmation' + PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION = 'provider_reviews_moderator_submission_confirmation' + PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED = 'preprint_request_withdrawal_requested' + PROVIDER_REVIEWS_REJECT_CONFIRMATION = 'provider_reviews_reject_confirmation' + PROVIDER_REVIEWS_ACCEPT_CONFIRMATION = 'provider_reviews_accept_confirmation' + PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION = 'provider_reviews_resubmission_confirmation' + PROVIDER_REVIEWS_COMMENT_EDITED = 'provider_reviews_comment_edited' + PROVIDER_CONTRIBUTOR_ADDED_PREPRINT = 'provider_contributor_added_preprint' + PROVIDER_CONFIRM_EMAIL_MODERATION = 'provider_confirm_email_moderation' + PROVIDER_MODERATOR_ADDED = 'provider_moderator_added' + PROVIDER_CONFIRM_EMAIL_PREPRINTS = 'provider_confirm_email_preprints' + PROVIDER_USER_INVITE_PREPRINT = 'provider_user_invite_preprint' + + # Preprint notifications + PREPRINT_REQUEST_WITHDRAWAL_APPROVED = 'preprint_request_withdrawal_approved' + PREPRINT_REQUEST_WITHDRAWAL_DECLINED = 'preprint_request_withdrawal_declined' + PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' + + # Collections Submission notifications + NEW_PENDING_SUBMISSIONS = 'new_pending_submissions' + COLLECTION_SUBMISSION_REMOVED_ADMIN = 'collection_submission_removed_admin' + COLLECTION_SUBMISSION_REMOVED_MODERATOR = 'collection_submission_removed_moderator' + COLLECTION_SUBMISSION_REMOVED_PRIVATE = 'collection_submission_removed_private' + COLLECTION_SUBMISSION_SUBMITTED = 'collection_submission_submitted' + COLLECTION_SUBMISSION_ACCEPTED = 'collection_submission_accepted' + COLLECTION_SUBMISSION_REJECTED = 'collection_submission_rejected' + COLLECTION_SUBMISSION_CANCEL = 'collection_submission_cancel' + + # Schema Response notifications + SCHEMA_RESPONSE_REJECTED = 'schema_response_rejected' + SCHEMA_RESPONSE_APPROVED = 'schema_response_approved' + SCHEMA_RESPONSE_SUBMITTED = 'schema_response_submitted' + SCHEMA_RESPONSE_INITIATED = 'schema_response_initiated' + + REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' + + @classmethod + def user_types(cls): + return [member for member in cls if member.name.startswith('USER_')] + + @classmethod + def node_types(cls): + return [member for member in cls if member.name.startswith('NODE_')] + + @classmethod + def preprint_types(cls): + return [member for member in cls if member.name.startswith('PREPRINT_')] + + @classmethod + def provider_types(cls): + return [member for member in cls if member.name.startswith('PROVIDER_')] + + @classmethod + def schema_response_types(cls): + return [member for member in cls if member.name.startswith('SCHEMA_RESPONSE_')] + + @classmethod + def desk_types(cls): + return [member for member in cls if member.name.startswith('DESK_')] + + name: str = models.CharField(max_length=255, unique=True) + notification_freq: str = models.CharField( + max_length=32, + choices=FrequencyChoices.choices(), + default=FrequencyChoices.INSTANTLY.value, + ) + + object_content_type = models.ForeignKey( + ContentType, + on_delete=models.SET_NULL, + null=True, + blank=True, + help_text='Content type for subscribed objects. Null means global event.' + ) + + template: str = models.TextField( + help_text='Template used to render the event_info. Supports Django template syntax.' + ) + + def clean(self): + try: + Template(self.template) + except TemplateSyntaxError as exc: + raise ValidationError({'template': f'Invalid template: {exc}'}) + + def emit(self, user, subscribed_object=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + subscription, created = NotificationSubscription.objects.get_or_create( + notification_type=self, + user=user, + content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + object_id=subscribed_object.pk if subscribed_object else None, + defaults={'message_frequency': self.notification_freq}, + ) + if subscription.message_frequency == 'instantly': + Notification.objects.create( + subscription=subscription, + event_context=event_context + ).send() + + def add_user_to_subscription(self, user, *args, **kwargs): + """ + """ + provider = kwargs.pop('provider', None) + node = kwargs.pop('node', None) + data = {} + if subscribed_object := provider or node: + data = { + 'object_id': subscribed_object.id, + 'content_type_id': ContentType.objects.get_for_model(subscribed_object).id, + } + + notification, created = NotificationSubscription.objects.get_or_create( + user=user, + notification_type=self, + **data, + ) + return notification + + def remove_user_from_subscription(self, user): + """ + """ + notification, _ = NotificationSubscription.objects.update_or_create( + user=user, + notification_type=self, + defaults={'message_frequency': FrequencyChoices.NONE.value} + ) + + def __str__(self) -> str: + return self.name + + class Meta: + verbose_name = 'Notification Type' + verbose_name_plural = 'Notification Types' + + +class NotificationSubscription(BaseModel): + notification_type: NotificationType = models.ForeignKey( + NotificationType, + on_delete=models.CASCADE, + null=False + ) + user = models.ForeignKey('osf.OSFUser', on_delete=models.CASCADE, related_name='subscriptions') + message_frequency: str = models.CharField(max_length=32) + + content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE) + object_id = models.CharField(max_length=255, null=True, blank=True) + subscribed_object = GenericForeignKey('content_type', 'object_id') + + def clean(self): + ct = self.notification_type.object_content_type + + if ct: + if self.content_type != ct: + raise ValidationError('Subscribed object must match type\'s content_type.') + if not self.object_id: + raise ValidationError('Subscribed object ID is required.') + else: + if self.content_type or self.object_id: + raise ValidationError('Global subscriptions must not have an object.') + + if self.message_frequency not in self.notification_type.notification_freq: + raise ValidationError(f'{self.message_frequency!r} is not allowed for {self.notification_type.name!r}.') + + def __str__(self) -> str: + return f'{self.user} subscribes to {self.notification_type.name} ({self.message_frequency})' + + class Meta: + verbose_name = 'Notification Subscription' + verbose_name_plural = 'Notification Subscriptions' + + def emit(self, user, subscribed_object=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + if self.message_frequency == 'instantly': + Notification.objects.create( + subscription=self, + event_context=event_context + ).send() + else: + Notification.objects.create( + subscription=self, + event_context=event_context + ) + +class Notification(models.Model): + subscription = models.ForeignKey( + NotificationSubscription, + on_delete=models.CASCADE, + related_name='notifications' + ) + event_context: dict = models.JSONField() + sent = models.DateTimeField(null=True, blank=True) + seen = models.DateTimeField(null=True, blank=True) + created = models.DateTimeField(auto_now_add=True) + + def send(self, protocol_type='email', recipient=None): + if not protocol_type == 'email': + raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') + + recipient_address = getattr(recipient, 'username', None) or self.subscription.user + + if protocol_type == 'email' and settings.DEV_MODE and settings.ENABLE_TEST_EMAIL: + email.send_email_over_smtp( + recipient_address, + self.subscription.notification_type, + self.event_context + ) + elif protocol_type == 'email' and settings.DEV_MODE: + if not api_settings.CI_ENV: + logging.info( + f"Attempting to send email in DEV_MODE with ENABLE_TEST_EMAIL false just logs:" + f"\nto={recipient_address}" + f"\ntype={self.subscription.notification_type.name}" + f"\ncontext={self.event_context}" + ) + elif protocol_type == 'email': + email.send_email_with_send_grid( + getattr(recipient, 'username', None) or self.subscription.user, + self.subscription.notification_type, + self.event_context + ) + else: + raise NotImplementedError(f'protocol `{protocol_type}` is not supported.') + + self.mark_sent() + + def mark_sent(self) -> None: + raise NotImplementedError('mark_sent must be implemented by subclasses.') + # self.sent = timezone.now() + # self.save(update_fields=['sent']) + + def mark_seen(self) -> None: + raise NotImplementedError('mark_seen must be implemented by subclasses.') + # self.seen = timezone.now() + # self.save(update_fields=['seen']) + + def __str__(self) -> str: + return f'Notification for {self.subscription.user} [{self.subscription.notification_type.name}]' + + class Meta: + verbose_name = 'Notification' + verbose_name_plural = 'Notifications' diff --git a/osf/models/notifications.py b/osf/models/notifications.py index 86be3424832..41ec120b4ee 100644 --- a/osf/models/notifications.py +++ b/osf/models/notifications.py @@ -1,15 +1,16 @@ from django.contrib.postgres.fields import ArrayField from django.db import models + +from website.notifications.constants import NOTIFICATION_TYPES from .node import Node from .user import OSFUser from .base import BaseModel, ObjectIDMixin from .validators import validate_subscription_type from osf.utils.fields import NonNaiveDateTimeField -from website.notifications.constants import NOTIFICATION_TYPES from website.util import api_v2_url -class NotificationSubscription(BaseModel): +class NotificationSubscriptionLegacy(BaseModel): primary_identifier_name = '_id' _id = models.CharField(max_length=100, db_index=True, unique=False) # pxyz_wiki_updated, uabc_comment_replies @@ -29,6 +30,7 @@ class NotificationSubscription(BaseModel): class Meta: # Both PreprintProvider and RegistrationProvider default instances use "osf" as their `_id` unique_together = ('_id', 'provider') + db_table = 'osf_notificationsubscription_legacy' @classmethod def load(cls, q): @@ -95,7 +97,6 @@ def remove_user_from_subscription(self, user, save=True): if save: self.save() - class NotificationDigest(ObjectIDMixin, BaseModel): user = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.CASCADE) provider = models.ForeignKey('AbstractProvider', null=True, blank=True, on_delete=models.CASCADE) diff --git a/osf/models/provider.py b/osf/models/provider.py index 2ee920a77e5..b8dacc174bf 100644 --- a/osf/models/provider.py +++ b/osf/models/provider.py @@ -19,7 +19,7 @@ from .brand import Brand from .citation import CitationStyle from .licenses import NodeLicense -from .notifications import NotificationSubscription +from .notifications import NotificationSubscriptionLegacy from .storage import ProviderAssetFile from .subject import Subject from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField @@ -464,7 +464,7 @@ def create_provider_auth_groups(sender, instance, created, **kwargs): def create_provider_notification_subscriptions(sender, instance, created, **kwargs): if created: for subscription in instance.DEFAULT_SUBSCRIPTIONS: - NotificationSubscription.objects.get_or_create( + NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{instance._id}_{subscription}', event_name=subscription, provider=instance diff --git a/osf_tests/factories.py b/osf_tests/factories.py index 7ad8885e1ad..bf636677284 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -1049,9 +1049,9 @@ def handle_callback(self, response): } -class NotificationSubscriptionFactory(DjangoModelFactory): +class NotificationSubscriptionLegacyFactory(DjangoModelFactory): class Meta: - model = models.NotificationSubscription + model = models.NotificationSubscriptionLegacy def make_node_lineage(): diff --git a/osf_tests/utils.py b/osf_tests/utils.py index a8364a15478..b3f3c92bc88 100644 --- a/osf_tests/utils.py +++ b/osf_tests/utils.py @@ -16,7 +16,7 @@ Sanction, RegistrationProvider, RegistrationSchema, - NotificationSubscription + NotificationSubscriptionLegacy ) from osf.utils.migrations import create_schema_blocks_for_atomic_schema @@ -229,7 +229,7 @@ def _ensure_subscriptions(provider): Avoid that. ''' for subscription in provider.DEFAULT_SUBSCRIPTIONS: - NotificationSubscription.objects.get_or_create( + NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_{subscription}', event_name=subscription, provider=provider diff --git a/scripts/add_global_subscriptions.py b/scripts/add_global_subscriptions.py index b326c6f9f67..52746875d79 100644 --- a/scripts/add_global_subscriptions.py +++ b/scripts/add_global_subscriptions.py @@ -6,13 +6,13 @@ import logging import sys +from osf.models.notifications import NotificationSubscriptionLegacy from website.app import setup_django setup_django() from django.apps import apps from django.db import transaction from website.app import init_app -from osf.models import NotificationSubscription from website.notifications import constants from website.notifications.utils import to_subscription_key @@ -35,10 +35,10 @@ def add_global_subscriptions(dry=True): for user_event in user_events: user_event_id = to_subscription_key(user._id, user_event) - subscription = NotificationSubscription.load(user_event_id) + subscription = NotificationSubscriptionLegacy.load(user_event_id) if not subscription: logger.info(f'No {user_event} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscription(_id=user_event_id, owner=user, event_name=user_event) + subscription = NotificationSubscriptionLegacy(_id=user_event_id, owner=user, event_name=user_event) subscription.save() # Need to save in order to access m2m fields subscription.add_user_to_subscription(user, notification_type) subscription.save() diff --git a/scripts/remove_notification_subscriptions_from_registrations.py b/scripts/remove_notification_subscriptions_from_registrations.py index 8984cb25b50..94b20a19a93 100644 --- a/scripts/remove_notification_subscriptions_from_registrations.py +++ b/scripts/remove_notification_subscriptions_from_registrations.py @@ -17,7 +17,7 @@ def remove_notification_subscriptions_from_registrations(dry_run=True): Registration = apps.get_model('osf.Registration') NotificationSubscription = apps.get_model('osf.NotificationSubscription') - notifications_to_delete = NotificationSubscription.objects.filter(node__type='osf.registration') + notifications_to_delete = NotificationSubscriptionLegacy.objects.filter(node__type='osf.registration') registrations_affected = Registration.objects.filter( id__in=notifications_to_delete.values_list( 'node_id', flat=True diff --git a/tests/test_events.py b/tests/test_events.py index 866bf6ec337..c9e30273b49 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -131,7 +131,7 @@ def setUp(self): self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) # subscription - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + 'file_updated', owner=self.project, event_name='file_updated', @@ -157,7 +157,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( + self.project_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' @@ -184,7 +184,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( + self.project_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' @@ -219,7 +219,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( + self.project_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' @@ -249,7 +249,7 @@ def setUp(self): self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) # subscription - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + 'file_updated', owner=self.project, event_name='file_updated', @@ -303,21 +303,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionFactory( + self.private_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( + self.file_sub = factories.NotificationSubscriptionLegacyFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id @@ -398,21 +398,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionFactory( + self.private_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( + self.file_sub = factories.NotificationSubscriptionLegacyFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id @@ -480,21 +480,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionFactory( + self.private_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( + self.file_sub = factories.NotificationSubscriptionLegacyFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id diff --git a/tests/test_notifications.py b/tests/test_notifications.py index b52190ca999..64ab0b1bb75 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -7,7 +7,14 @@ from django.utils import timezone from framework.auth import Auth -from osf.models import Comment, NotificationDigest, NotificationSubscription, Guid, OSFUser +from osf.models import ( + Comment, + NotificationDigest, + NotificationSubscription, + Guid, + OSFUser, + NotificationSubscriptionLegacy +) from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications from website.notifications.exceptions import InvalidSubscriptionError @@ -123,19 +130,19 @@ def test_new_node_creator_is_not_subscribed(self): def test_new_project_creator_is_subscribed_with_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comments', user=user, event_name='global_comments' ).add_user_to_subscription(user, 'email_digest') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'none') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_mentions', user=user, event_name='global_mentions' @@ -146,8 +153,8 @@ def test_new_project_creator_is_subscribed_with_global_settings(self): user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') + file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') + comments_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_comments') assert len(user_subscriptions) == 5 # subscribed to both node and user settings assert 'file_updated' in event_types @@ -163,25 +170,25 @@ def test_new_project_creator_is_subscribed_with_global_settings(self): def test_new_node_creator_is_not_subscribed_with_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comments', user=user, event_name='global_comments' ).add_user_to_subscription(user, 'email_digest') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'none') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comment_replies', user=user, event_name='global_comment_replies' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_mentions', user=user, event_name='global_mentions' @@ -213,25 +220,25 @@ def test_subscribe_user_to_registration_notifications(self): def test_new_project_creator_is_subscribed_with_default_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comments', user=user, event_name='global_comments' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comment_replies', user=user, event_name='global_comment_replies' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_mentions', user=user, event_name='global_mentions' @@ -242,8 +249,8 @@ def test_new_project_creator_is_subscribed_with_default_global_settings(self): user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') + file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') + comments_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_comments') assert len(user_subscriptions) == 6 # subscribed to both node and user settings assert 'file_updated' in event_types @@ -259,19 +266,19 @@ def test_new_fork_creator_is_subscribed_with_default_global_settings(self): user = factories.UserFactory() project = factories.ProjectFactory(creator=user) - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comments', user=user, event_name='global_comments' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_mentions', user=user, event_name='global_mentions' @@ -282,10 +289,10 @@ def test_new_fork_creator_is_subscribed_with_default_global_settings(self): user_subscriptions = list(utils.get_all_user_subscriptions(user)) event_types = [sub.event_name for sub in user_subscriptions] - node_file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - node_comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') - project_file_updated_subscription = NotificationSubscription.objects.get(_id=project._id + '_file_updated') - project_comments_subscription = NotificationSubscription.objects.get(_id=project._id + '_comments') + node_file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') + node_comments_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_comments') + project_file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=project._id + '_file_updated') + project_comments_subscription = NotificationSubscriptionLegacy.objects.get(_id=project._id + '_comments') assert len(user_subscriptions) == 7 # subscribed to project, fork, and user settings assert 'file_updated' in event_types @@ -301,25 +308,25 @@ def test_new_fork_creator_is_subscribed_with_default_global_settings(self): def test_new_node_creator_is_not_subscribed_with_default_global_settings(self): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comments', user=user, event_name='global_comments' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_file_updated', user=user, event_name='global_file_updated' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_comment_replies', user=user, event_name='global_comment_replies' ).add_user_to_subscription(user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_' + 'global_mentions', user=user, event_name='global_mentions' @@ -353,13 +360,13 @@ def test_contributor_subscribed_when_added_to_component(self): user = factories.UserFactory() contributor = factories.UserFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=contributor._id + '_' + 'global_comments', user=contributor, event_name='global_comments' ).add_user_to_subscription(contributor, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=contributor._id + '_' + 'global_file_updated', user=contributor, event_name='global_file_updated' @@ -371,8 +378,8 @@ def test_contributor_subscribed_when_added_to_component(self): contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) event_types = [sub.event_name for sub in contributor_subscriptions] - file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments') + file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') + comments_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_comments') assert len(contributor_subscriptions) == 4 # subscribed to both node and user settings assert 'file_updated' in event_types @@ -416,7 +423,7 @@ def test_create_new_subscription(self): # check that subscription was created event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) + s = NotificationSubscriptionLegacy.objects.get(_id=event_id) # check that user was added to notification_type field assert payload['id'] == s.owner._id @@ -455,7 +462,7 @@ def test_adopt_parent_subscription_default(self): self.app.post(url, json=payload, auth=self.node.creator.auth) event_id = self.node._id + '_' + 'comments' # confirm subscription was created because parent had default subscription - s = NotificationSubscription.objects.filter(_id=event_id).count() + s = NotificationSubscriptionLegacy.objects.filter(_id=event_id).count() assert 0 == s def test_change_subscription_to_adopt_parent_subscription_removes_user(self): @@ -469,7 +476,7 @@ def test_change_subscription_to_adopt_parent_subscription_removes_user(self): # check that subscription was created event_id = self.node._id + '_' + 'comments' - s = NotificationSubscription.objects.get(_id=event_id) + s = NotificationSubscriptionLegacy.objects.get(_id=event_id) # change subscription to adopt_parent new_payload = { @@ -510,7 +517,7 @@ def setUp(self): self.project.add_contributor(contributor=self.contributor, permissions=permissions.READ) self.project.save() - self.subscription = NotificationSubscription.objects.get( + self.subscription = NotificationSubscriptionLegacy.objects.get( node=self.project, _id=self.project._id + '_comments' ) @@ -519,7 +526,7 @@ def setUp(self): self.node.add_contributor(contributor=self.project.creator, permissions=permissions.ADMIN) self.node.save() - self.node_subscription = NotificationSubscription.objects.get( + self.node_subscription = NotificationSubscriptionLegacy.objects.get( _id=self.node._id + '_comments', node=self.node ) @@ -560,10 +567,10 @@ def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): project = factories.ProjectFactory() component = factories.NodeFactory(parent=project, creator=project.creator) - s = NotificationSubscription.objects.filter(email_transactional=project.creator) + s = NotificationSubscriptionLegacy.objects.filter(email_transactional=project.creator) assert s.count() == 2 - s = NotificationSubscription.objects.filter(email_transactional=component.creator) + s = NotificationSubscriptionLegacy.objects.filter(email_transactional=component.creator) assert s.count() == 2 with capture_signals() as mock_signals: @@ -575,17 +582,17 @@ def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): assert component.is_deleted assert mock_signals.signals_sent() == {node_deleted} - s = NotificationSubscription.objects.filter(email_transactional=project.creator) + s = NotificationSubscriptionLegacy.objects.filter(email_transactional=project.creator) assert s.count() == 0 - s = NotificationSubscription.objects.filter(email_transactional=component.creator) + s = NotificationSubscriptionLegacy.objects.filter(email_transactional=component.creator) assert s.count() == 0 - with pytest.raises(NotificationSubscription.DoesNotExist): - NotificationSubscription.objects.get(node=project) + with pytest.raises(NotificationSubscriptionLegacy.DoesNotExist): + NotificationSubscriptionLegacy.objects.get(node=project) - with pytest.raises(NotificationSubscription.DoesNotExist): - NotificationSubscription.objects.get(node=component) + with pytest.raises(NotificationSubscriptionLegacy.DoesNotExist): + NotificationSubscriptionLegacy.objects.get(node=component) def list_or_dict(data): @@ -671,7 +678,7 @@ def setUp(self): self.user = factories.UserFactory() self.project = factories.ProjectFactory(creator=self.user) - self.project_subscription = NotificationSubscription.objects.get( + self.project_subscription = NotificationSubscriptionLegacy.objects.get( node=self.project, _id=self.project._id + '_comments', event_name='comments' @@ -682,7 +689,7 @@ def setUp(self): self.node = factories.NodeFactory(parent=self.project, creator=self.user) - self.node_comments_subscription = factories.NotificationSubscriptionFactory( + self.node_comments_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.node._id + '_' + 'comments', node=self.node, event_name='comments' @@ -691,19 +698,19 @@ def setUp(self): self.node_comments_subscription.email_transactional.add(self.user) self.node_comments_subscription.save() - self.node_subscription = list(NotificationSubscription.objects.filter(node=self.node)) + self.node_subscription = list(NotificationSubscriptionLegacy.objects.filter(node=self.node)) - self.user_subscription = [factories.NotificationSubscriptionFactory( + self.user_subscription = [factories.NotificationSubscriptionLegacyFactory( _id=self.user._id + '_' + 'comment_replies', user=self.user, event_name='comment_replies' ), - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=self.user._id + '_' + 'global_comment', user=self.user, event_name='global_comment' ), - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=self.user._id + '_' + 'global_file_updated', user=self.user, event_name='global_file_updated' @@ -770,7 +777,7 @@ def test_get_configured_project_ids_excludes_node_with_project_category(self): def test_get_configured_project_ids_includes_top_level_private_projects_if_subscriptions_on_node(self): private_project = factories.ProjectFactory() node = factories.NodeFactory(parent=private_project) - node_comments_subscription = factories.NotificationSubscriptionFactory( + node_comments_subscription = factories.NotificationSubscriptionLegacyFactory( _id=node._id + '_' + 'comments', node=node, event_name='comments' @@ -903,7 +910,7 @@ def test_format_data_user_subscriptions_includes_private_parent_if_configured_ch private_project = factories.ProjectFactory() node = factories.NodeFactory(parent=private_project) - node_comments_subscription = factories.NotificationSubscriptionFactory( + node_comments_subscription = factories.NotificationSubscriptionLegacyFactory( _id=node._id + '_' + 'comments', node=node, event_name='comments' @@ -934,7 +941,7 @@ def test_format_data_user_subscriptions_if_children_points_to_parent(self): private_project = factories.ProjectFactory(creator=self.user) node = factories.NodeFactory(parent=private_project, creator=self.user) node.save() - node_comments_subscription = factories.NotificationSubscriptionFactory( + node_comments_subscription = factories.NotificationSubscriptionLegacyFactory( _id=node._id + '_' + 'comments', node=node, event_name='comments' @@ -1170,19 +1177,19 @@ def setUp(self): self.base_project.add_contributor(self.user_3, permissions=permissions.WRITE) self.shared_node.add_contributor(self.user_3, permissions=permissions.WRITE) # Setting basic subscriptions - self.base_sub = factories.NotificationSubscriptionFactory( + self.base_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.base_project._id + '_file_updated', node=self.base_project, event_name='file_updated' ) self.base_sub.save() - self.shared_sub = factories.NotificationSubscriptionFactory( + self.shared_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.shared_node._id + '_file_updated', node=self.shared_node, event_name='file_updated' ) self.shared_sub.save() - self.private_sub = factories.NotificationSubscriptionFactory( + self.private_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.private_node._id + '_file_updated', node=self.private_node, event_name='file_updated' @@ -1196,7 +1203,7 @@ def test_no_subscription(self): def test_no_subscribers(self): node = factories.NodeFactory() - node_sub = factories.NotificationSubscriptionFactory( + node_sub = factories.NotificationSubscriptionLegacyFactory( _id=node._id + '_file_updated', node=node, event_name='file_updated' @@ -1260,7 +1267,7 @@ def test_several_nodes_deep_precedence(self): node2 = factories.NodeFactory(parent=self.shared_node) node3 = factories.NodeFactory(parent=node2) node4 = factories.NodeFactory(parent=node3) - node4_subscription = factories.NotificationSubscriptionFactory( + node4_subscription = factories.NotificationSubscriptionLegacyFactory( _id=node4._id + '_file_updated', node=node4, event_name='file_updated' @@ -1284,14 +1291,14 @@ def setUp(self): self.user_4 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1) - self.sub = factories.NotificationSubscriptionFactory( + self.sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_file_updated', node=self.project, event_name='file_updated' ) self.sub.email_transactional.add(self.user_1) self.sub.save() - self.file_sub = factories.NotificationSubscriptionFactory( + self.file_sub = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_xyz42_file_updated', node=self.project, event_name='xyz42_file_updated' @@ -1407,7 +1414,7 @@ def setUp(self): super().setUp() self.user = factories.AuthUserFactory() self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionFactory( + self.project_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.project._id + '_' + 'comments', node=self.project, event_name='comments' @@ -1417,13 +1424,13 @@ def setUp(self): self.project_subscription.save() self.node = factories.NodeFactory(parent=self.project) - self.node_subscription = factories.NotificationSubscriptionFactory( + self.node_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.node._id + '_comments', node=self.node, event_name='comments' ) self.node_subscription.save() - self.user_subscription = factories.NotificationSubscriptionFactory( + self.user_subscription = factories.NotificationSubscriptionLegacyFactory( _id=self.user._id + '_' + 'global_comment_replies', node=self.node, event_name='global_comment_replies' @@ -1441,7 +1448,7 @@ def test_notify_no_subscription(self, mock_store): @mock.patch('website.notifications.emails.store_emails') def test_notify_no_subscribers(self, mock_store): node = factories.NodeFactory() - node_subscription = factories.NotificationSubscriptionFactory( + node_subscription = factories.NotificationSubscriptionLegacyFactory( _id=node._id + '_comments', node=node, event_name='comments' @@ -1469,7 +1476,7 @@ def test_notify_does_not_send_to_exclude(self, mock_store): def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store): node = factories.NodeFactory() user = factories.UserFactory() - node_subscription = factories.NotificationSubscriptionFactory( + node_subscription = factories.NotificationSubscriptionLegacyFactory( _id=node._id + '_comments', node=node, event_name='comments' @@ -1485,7 +1492,7 @@ def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store): def test_notify_mentions_does_not_send_to_mentioned_users_subscribed_to_none(self, mock_store): node = factories.NodeFactory() user = factories.UserFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_global_mentions', node=self.node, event_name='global_mentions' @@ -1498,7 +1505,7 @@ def test_notify_mentions_does_not_send_to_mentioned_users_subscribed_to_none(sel @mock.patch('website.notifications.emails.store_emails') def test_notify_mentions_does_send_to_mentioned_users(self, mock_store): user = factories.UserFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_global_mentions', node=self.node, event_name='global_mentions' @@ -1572,7 +1579,7 @@ def test_check_node_one(self): def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user(self, mock_notify): # user subscribed to comment replies user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( + user_subscription = factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_comments', user=user, event_name='comment_replies' @@ -1603,7 +1610,7 @@ def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user( def test_check_user_comment_reply_only_calls_once(self, mock_notify): # user subscribed to comment replies user = factories.UserFactory() - user_subscription = factories.NotificationSubscriptionFactory( + user_subscription = factories.NotificationSubscriptionLegacyFactory( _id=user._id + '_comments', user=user, event_name='comment_replies' @@ -1885,19 +1892,19 @@ def setUp(self): 'provider_support_email': settings.OSF_SUPPORT_EMAIL, } self.action = factories.ReviewActionFactory() - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=self.user._id + '_' + 'global_comments', user=self.user, event_name='global_comments' ).add_user_to_subscription(self.user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=self.user._id + '_' + 'global_file_updated', user=self.user, event_name='global_file_updated' ).add_user_to_subscription(self.user, 'email_transactional') - factories.NotificationSubscriptionFactory( + factories.NotificationSubscriptionLegacyFactory( _id=self.user._id + '_' + 'global_reviews', user=self.user, event_name='global_reviews' @@ -1956,7 +1963,7 @@ def setUp(self): } self.action = factories.ReviewActionFactory() - self.subscription = NotificationSubscription.load(self.provider._id+'_new_pending_submissions') + self.subscription = NotificationSubscriptionLegacy.load(self.provider._id+'_new_pending_submissions') self.subscription.add_user_to_subscription(self.moderator_transacitonal, 'email_transactional') self.subscription.add_user_to_subscription(self.moderator_digest, 'email_digest') @@ -1971,7 +1978,7 @@ def test_reviews_submit_notification(self, mock_store): self.context_info_submission['profile_image_url'] = get_profile_image_url(self.context_info_submission['referrer']) self.context_info_submission['reviews_submission_url'] = f'{settings.DOMAIN}reviews/preprints/{provider._id}/{preprint._id}' listeners.reviews_submit_notification_moderators(self, time_now, self.context_info_submission) - subscription = NotificationSubscription.load(self.provider._id + '_new_pending_submissions') + subscription = NotificationSubscriptionLegacy.load(self.provider._id + '_new_pending_submissions') digest_subscriber_ids = list(subscription.email_digest.all().values_list('guids___id', flat=True)) instant_subscriber_ids = list(subscription.email_transactional.all().values_list('guids___id', flat=True)) @@ -2009,7 +2016,7 @@ def test_reviews_request_notification(self, mock_store): self.context_info_request[ 'reviewable']._id) listeners.reviews_withdrawal_requests_notification(self, time_now, self.context_info_request) - subscription = NotificationSubscription.load(self.provider._id + '_new_pending_submissions') + subscription = NotificationSubscriptionLegacy.load(self.provider._id + '_new_pending_submissions') digest_subscriber_ids = subscription.email_digest.all().values_list('guids___id', flat=True) instant_subscriber_ids = subscription.email_transactional.all().values_list('guids___id', flat=True) mock_store.assert_any_call(QuerySetMatcher(digest_subscriber_ids), diff --git a/website/notifications/emails.py b/website/notifications/emails.py index d26d43351d5..56f513920af 100644 --- a/website/notifications/emails.py +++ b/website/notifications/emails.py @@ -2,7 +2,8 @@ from babel import dates, core, Locale -from osf.models import AbstractNode, NotificationDigest, NotificationSubscription +from osf.models import AbstractNode, NotificationSubscriptionLegacy +from osf.models.notifications import NotificationDigest from osf.utils.permissions import ADMIN, READ from website import mails from website.notifications import constants @@ -159,7 +160,7 @@ def check_node(node, event): """Return subscription for a particular node and event.""" node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES} if node: - subscription = NotificationSubscription.load(utils.to_subscription_key(node._id, event)) + subscription = NotificationSubscriptionLegacy.load(utils.to_subscription_key(node._id, event)) for notification_type in node_subscriptions: users = getattr(subscription, notification_type, []) if users: @@ -172,7 +173,7 @@ def check_node(node, event): def get_user_subscriptions(user, event): if user.is_disabled: return {} - user_subscription = NotificationSubscription.load(utils.to_subscription_key(user._id, event)) + user_subscription = NotificationSubscriptionLegacy.load(utils.to_subscription_key(user._id, event)) if user_subscription: return {key: list(getattr(user_subscription, key).all().values_list('guids___id', flat=True)) for key in constants.NOTIFICATION_TYPES} else: diff --git a/website/notifications/utils.py b/website/notifications/utils.py index af8275ab5fb..c2b229295d4 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -91,10 +91,10 @@ def remove_supplemental_node(node): @app.task(max_retries=5, default_retry_delay=60) def remove_subscription_task(node_id): AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') node = AbstractNode.load(node_id) - NotificationSubscription.objects.filter(node=node).delete() + NotificationSubscriptionLegacy.objects.filter(node=node).delete() parent = node.parent_node if parent and parent.child_node_subscriptions: @@ -144,12 +144,12 @@ def users_to_remove(source_event, source_node, new_node): :param new_node: Node instance where a sub or new sub will be. :return: Dict of notification type lists with user_ids """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') removed_users = {key: [] for key in constants.NOTIFICATION_TYPES} if source_node == new_node: return removed_users - old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) - old_node_sub = NotificationSubscription.load(to_subscription_key(source_node._id, + old_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, source_event)) + old_node_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, '_'.join(source_event.split('_')[-2:]))) if not old_sub and not old_node_sub: return removed_users @@ -172,11 +172,11 @@ def move_subscription(remove_users, source_event, source_node, new_event, new_no :param new_node: Instance of Node :return: Returns a NOTIFICATION_TYPES list of removed users without permissions """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') OSFUser = apps.get_model('osf.OSFUser') if source_node == new_node: return - old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) + old_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, source_event)) if not old_sub: return elif old_sub: @@ -237,8 +237,8 @@ def check_project_subscriptions_are_all_none(user, node): def get_all_user_subscriptions(user, extra=None): """ Get all Subscription objects that the user is subscribed to""" - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - queryset = NotificationSubscription.objects.filter( + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') + queryset = NotificationSubscriptionLegacy.objects.filter( Q(none=user.pk) | Q(email_digest=user.pk) | Q(email_transactional=user.pk) @@ -392,14 +392,14 @@ def get_parent_notification_type(node, event, user): :return: str notification type (e.g. 'email_transactional') """ AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') if node and isinstance(node, AbstractNode) and node.parent_node and node.parent_node.has_permission(user, READ): parent = node.parent_node key = to_subscription_key(parent._id, event) try: - subscription = NotificationSubscription.objects.get(_id=key) - except NotificationSubscription.DoesNotExist: + subscription = NotificationSubscriptionLegacy.objects.get(_id=key) + except NotificationSubscriptionLegacy.DoesNotExist: return get_parent_notification_type(parent, event, user) for notification_type in constants.NOTIFICATION_TYPES: @@ -429,19 +429,19 @@ def check_if_all_global_subscriptions_are_none(user): # This function predates comment mentions, which is a global_ notification that cannot be disabled # Therefore, an actual check would never return True. # If this changes, an optimized query would look something like: - # not NotificationSubscription.objects.filter(Q(event_name__startswith='global_') & (Q(email_digest=user.pk)|Q(email_transactional=user.pk))).exists() + # not NotificationSubscriptionLegacy.objects.filter(Q(event_name__startswith='global_') & (Q(email_digest=user.pk)|Q(email_transactional=user.pk))).exists() return False def subscribe_user_to_global_notifications(user): - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') notification_type = 'email_transactional' user_events = constants.USER_SUBSCRIPTIONS_AVAILABLE for user_event in user_events: user_event_id = to_subscription_key(user._id, user_event) # get_or_create saves on creation - subscription, created = NotificationSubscription.objects.get_or_create(_id=user_event_id, user=user, event_name=user_event) + subscription, created = NotificationSubscriptionLegacy.objects.get_or_create(_id=user_event_id, user=user, event_name=user_event) subscription.add_user_to_subscription(user, notification_type) subscription.save() @@ -450,7 +450,7 @@ def subscribe_user_to_notifications(node, user): """ Update the notification settings for the creator or contributors :param user: User to subscribe to notifications """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') Preprint = apps.get_model('osf.Preprint') DraftRegistration = apps.get_model('osf.DraftRegistration') if isinstance(node, Preprint): @@ -476,16 +476,16 @@ def subscribe_user_to_notifications(node, user): for event in events: event_id = to_subscription_key(target_id, event) global_event_id = to_subscription_key(user._id, 'global_' + event) - global_subscription = NotificationSubscription.load(global_event_id) + global_subscription = NotificationSubscriptionLegacy.load(global_event_id) - subscription = NotificationSubscription.load(event_id) + subscription = NotificationSubscriptionLegacy.load(event_id) # If no subscription for component and creator is the user, do not create subscription # If no subscription exists for the component, this means that it should adopt its # parent's settings if not (node and node.parent_node and not subscription and node.creator == user): if not subscription: - subscription = NotificationSubscription(_id=event_id, owner=node, event_name=event) + subscription = NotificationSubscriptionLegacy(_id=event_id, owner=node, event_name=event) # Need to save here in order to access m2m fields subscription.save() if global_subscription: diff --git a/website/notifications/views.py b/website/notifications/views.py index 8ca4775367d..1cbb62ee08d 100644 --- a/website/notifications/views.py +++ b/website/notifications/views.py @@ -6,7 +6,8 @@ from framework.auth.decorators import must_be_logged_in from framework.exceptions import HTTPError -from osf.models import AbstractNode, NotificationSubscription, Registration +from osf.models import AbstractNode, Registration +from osf.models.notifications import NotificationSubscriptionLegacy from osf.utils.permissions import READ from website.notifications import utils from website.notifications.constants import NOTIFICATION_TYPES @@ -95,17 +96,17 @@ def configure_subscription(auth): raise HTTPError(http_status.HTTP_400_BAD_REQUEST) # If adopt_parent make sure that this subscription is None for the current User - subscription = NotificationSubscription.load(event_id) + subscription = NotificationSubscriptionLegacy.load(event_id) if not subscription: return {} # We're done here subscription.remove_user_from_subscription(user) return {} - subscription = NotificationSubscription.load(event_id) + subscription = NotificationSubscriptionLegacy.load(event_id) if not subscription: - subscription = NotificationSubscription(_id=event_id, owner=owner, event_name=event) + subscription = NotificationSubscriptionLegacy(_id=event_id, owner=owner, event_name=event) subscription.save() if node and node._id not in user.notifications_configured: diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 27a15c2c337..d6f3471dac7 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -71,7 +71,7 @@ def reviews_submit_notification_moderators(self, timestamp, context): Handle email notifications to notify moderators of new submissions or resubmission. """ # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription + from osf.models import NotificationSubscriptionLegacy from website.profile.utils import get_profile_image_url from website.notifications.emails import store_emails @@ -103,7 +103,7 @@ def reviews_submit_notification_moderators(self, timestamp, context): context['message'] = f'submitted "{resource.title}".' # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscription.objects.get_or_create( + provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_new_pending_submissions', provider=provider ) @@ -138,7 +138,7 @@ def reviews_submit_notification_moderators(self, timestamp, context): @reviews_signals.reviews_withdraw_requests_notification_moderators.connect def reviews_withdraw_requests_notification_moderators(self, timestamp, context): # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription + from osf.models import NotificationSubscriptionLegacy from website.profile.utils import get_profile_image_url from website.notifications.emails import store_emails @@ -146,7 +146,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context): provider = resource.provider # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscription.objects.get_or_create( + provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( _id=f'{provider._id}_new_pending_withdraw_requests', provider=provider ) @@ -191,13 +191,13 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context): @reviews_signals.reviews_email_withdrawal_requests.connect def reviews_withdrawal_requests_notification(self, timestamp, context): # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscription + from osf.models import NotificationSubscriptionLegacy from website.notifications.emails import store_emails from website.profile.utils import get_profile_image_url from website import settings # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription = NotificationSubscription.load( + provider_subscription = NotificationSubscriptionLegacy.load( '{}_new_pending_submissions'.format(context['reviewable'].provider._id)) preprint = context['reviewable'] preprint_word = preprint.provider.preprint_word From 69231e9f13926d41d52af0654a64bd2779d237cf Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 27 May 2025 13:52:43 -0400 Subject: [PATCH 013/176] add new notificationsubscription class to views --- api/subscriptions/fields.py | 12 ++ api/subscriptions/permissions.py | 9 +- api/subscriptions/serializers.py | 51 +++----- api/subscriptions/views.py | 66 ++-------- .../views/test_subscriptions_detail.py | 115 ++++++++++++------ osf/models/notification.py | 6 + osf_tests/factories.py | 11 ++ 7 files changed, 144 insertions(+), 126 deletions(-) create mode 100644 api/subscriptions/fields.py diff --git a/api/subscriptions/fields.py b/api/subscriptions/fields.py new file mode 100644 index 00000000000..c26ffaf5d4e --- /dev/null +++ b/api/subscriptions/fields.py @@ -0,0 +1,12 @@ +from rest_framework import serializers as ser +from osf.models import NotificationSubscription + +class FrequencyField(ser.ChoiceField): + def __init__(self, **kwargs): + super().__init__(choices=['none', 'instantly', 'daily', 'weekly', 'monthly'], **kwargs) + + def to_representation(self, obj: NotificationSubscription): + return obj.message_frequency + + def to_internal_value(self, freq): + return super().to_internal_value(freq) diff --git a/api/subscriptions/permissions.py b/api/subscriptions/permissions.py index f0f3553ad6c..a07eae6e81d 100644 --- a/api/subscriptions/permissions.py +++ b/api/subscriptions/permissions.py @@ -1,13 +1,10 @@ from rest_framework import permissions -from osf.models.notifications import NotificationSubscriptionLegacy +from osf.models.notification import NotificationSubscription class IsSubscriptionOwner(permissions.BasePermission): def has_object_permission(self, request, view, obj): - assert isinstance(obj, NotificationSubscriptionLegacy), f'obj must be a NotificationSubscriptionLegacy; got {obj}' - user_id = request.user.id - return obj.none.filter(id=user_id).exists() \ - or obj.email_transactional.filter(id=user_id).exists() \ - or obj.email_digest.filter(id=user_id).exists() + assert isinstance(obj, NotificationSubscription), f'obj must be a NotificationSubscription; got {obj}' + return obj.user == request.user diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index da7aadbb1a4..2bb1041d227 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -1,58 +1,43 @@ +from django.contrib.contenttypes.models import ContentType from rest_framework import serializers as ser -from rest_framework.exceptions import ValidationError from api.nodes.serializers import RegistrationProviderRelationshipField from api.collections_providers.fields import CollectionProviderRelationshipField from api.preprints.serializers import PreprintProviderRelationshipField +from osf.models import Node from website.util import api_v2_url -from api.base.serializers import JSONAPISerializer, LinksField - -NOTIFICATION_TYPES = { - 'none': 'none', - 'instant': 'email_transactional', - 'daily': 'email_digest', -} - - -class FrequencyField(ser.Field): - def to_representation(self, obj): - user_id = self.context['request'].user.id - if obj.email_transactional.filter(id=user_id).exists(): - return 'instant' - if obj.email_digest.filter(id=user_id).exists(): - return 'daily' - return 'none' - - def to_internal_value(self, frequency): - notification_type = NOTIFICATION_TYPES.get(frequency) - if notification_type: - return {'notification_type': notification_type} - raise ValidationError(f'Invalid frequency "{frequency}"') +from api.base.serializers import JSONAPISerializer +from .fields import FrequencyField class SubscriptionSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'id', 'event_name', + 'frequency', ]) - id = ser.CharField(source='_id', read_only=True) + id = ser.CharField(read_only=True) event_name = ser.CharField(read_only=True) frequency = FrequencyField(source='*', required=True) - links = LinksField({ - 'self': 'get_absolute_url', - }) class Meta: type_ = 'subscription' - def get_absolute_url(self, obj): - return obj.absolute_api_v2_url - def update(self, instance, validated_data): user = self.context['request'].user - notification_type = validated_data.get('notification_type') - instance.add_user_to_subscription(user, notification_type, save=True) + frequency = validated_data.get('frequency') + + if frequency != 'none' and instance.content_type == ContentType.objects.get_for_model(Node): + node = Node.objects.get( + id=instance.id, + content_type=instance.content_type, + ) + user_subs = node.parent_node.child_node_subscriptions + if node._id not in user_subs.setdefault(user._id, []): + user_subs[user._id].append(node._id) + node.parent_node.save() + return instance diff --git a/api/subscriptions/views.py b/api/subscriptions/views.py index a3c11a52aa8..e8c48b421b9 100644 --- a/api/subscriptions/views.py +++ b/api/subscriptions/views.py @@ -1,8 +1,8 @@ +from pyasn1_modules.rfc5126 import ContentType from rest_framework import generics from rest_framework import permissions as drf_permissions from rest_framework.exceptions import NotFound from django.core.exceptions import ObjectDoesNotExist -from django.db.models import Q from framework.auth.oauth_scopes import CoreScopes from api.base.views import JSONAPIBaseView @@ -16,13 +16,12 @@ ) from api.subscriptions.permissions import IsSubscriptionOwner from osf.models import ( - NotificationSubscription, CollectionProvider, PreprintProvider, RegistrationProvider, AbstractProvider, ) -from osf.models.notifications import NotificationSubscriptionLegacy +from osf.models.notification import NotificationSubscription class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): @@ -38,32 +37,20 @@ class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): required_read_scopes = [CoreScopes.SUBSCRIPTIONS_READ] required_write_scopes = [CoreScopes.NULL] - def get_default_queryset(self): - user = self.request.user - return NotificationSubscriptionLegacy.objects.filter( - Q(none=user) | - Q(email_digest=user) | - Q( - email_transactional=user, - ), - ).distinct() - def get_queryset(self): - return self.get_queryset_from_request() + return NotificationSubscription.objects.filter( + user=self.request.user, + ) class AbstractProviderSubscriptionList(SubscriptionList): - def get_default_queryset(self): - user = self.request.user - return NotificationSubscriptionLegacy.objects.filter( - provider___id=self.kwargs['provider_id'], - provider__type=self.provider_class._typedmodels_type, - ).filter( - Q(none=user) | - Q(email_digest=user) | - Q(email_transactional=user), - ).distinct() - + def get_queryset(self): + provider = AbstractProvider.objects.get(_id=self.kwargs['provider_id']) + return NotificationSubscription.objects.filter( + object_id=provider, + provider__type=ContentType.objects.get_for_model(provider.__class__), + user=self.request.user, + ) class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): view_name = 'notification-subscription-detail' @@ -81,7 +68,7 @@ class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): def get_object(self): subscription_id = self.kwargs['subscription_id'] try: - obj = NotificationSubscriptionLegacy.objects.get(_id=subscription_id) + obj = NotificationSubscription.objects.get(id=subscription_id) except ObjectDoesNotExist: raise NotFound self.check_object_permissions(self.request, obj) @@ -101,33 +88,6 @@ class AbstractProviderSubscriptionDetail(SubscriptionDetail): required_write_scopes = [CoreScopes.SUBSCRIPTIONS_WRITE] provider_class = None - def __init__(self, *args, **kwargs): - assert issubclass(self.provider_class, AbstractProvider), 'Class must be subclass of AbstractProvider' - super().__init__(*args, **kwargs) - - def get_object(self): - subscription_id = self.kwargs['subscription_id'] - if self.kwargs.get('provider_id'): - provider = self.provider_class.objects.get(_id=self.kwargs.get('provider_id')) - try: - obj = NotificationSubscriptionLegacy.objects.get( - _id=subscription_id, - provider_id=provider.id, - ) - except ObjectDoesNotExist: - raise NotFound - else: - try: - obj = NotificationSubscriptionLegacy.objects.get( - _id=subscription_id, - provider__type=self.provider_class._typedmodels_type, - ) - except ObjectDoesNotExist: - raise NotFound - self.check_object_permissions(self.request, obj) - return obj - - class CollectionProviderSubscriptionDetail(AbstractProviderSubscriptionDetail): provider_class = CollectionProvider serializer_class = CollectionSubscriptionSerializer diff --git a/api_tests/subscriptions/views/test_subscriptions_detail.py b/api_tests/subscriptions/views/test_subscriptions_detail.py index f64c835ad10..a9d880c687f 100644 --- a/api_tests/subscriptions/views/test_subscriptions_detail.py +++ b/api_tests/subscriptions/views/test_subscriptions_detail.py @@ -1,8 +1,10 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, NotificationSubscriptionLegacyFactory - +from osf_tests.factories import ( + AuthUserFactory, + NotificationSubscriptionFactory +) @pytest.mark.django_db class TestSubscriptionDetail: @@ -16,18 +18,18 @@ def user_no_auth(self): return AuthUserFactory() @pytest.fixture() - def global_user_notification(self, user): - notification = NotificationSubscriptionLegacyFactory(_id=f'{user._id}_global', user=user, event_name='global') - notification.add_user_to_subscription(user, 'email_transactional') - return notification + def notification(self, user): + return NotificationSubscriptionFactory( + user=user, + ) @pytest.fixture() - def url(self, global_user_notification): - return f'/{API_BASE}subscriptions/{global_user_notification._id}/' + def url(self, notification): + return f'/{API_BASE}subscriptions/{notification.id}/' @pytest.fixture() def url_invalid(self): - return '/{}subscriptions/{}/'.format(API_BASE, 'invalid-notification-id') + return f'/{API_BASE}subscriptions/invalid-notification-id/' @pytest.fixture() def payload(self): @@ -51,56 +53,101 @@ def payload_invalid(self): } } - def test_subscription_detail(self, app, user, user_no_auth, global_user_notification, url, url_invalid, payload, payload_invalid): - # GET with valid notification_id - # Invalid user - res = app.get(url, auth=user_no_auth.auth, expect_errors=True) + def test_subscription_detail_invalid_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.get( + url, + auth=user_no_auth.auth, + expect_errors=True + ) assert res.status_code == 403 - # No user - res = app.get(url, expect_errors=True) + + def test_subscription_detail_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.get( + url, + expect_errors=True + ) assert res.status_code == 401 - # Valid user + + def test_subscription_detail_valid_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.get(url, auth=user.auth) notification_id = res.json['data']['id'] assert res.status_code == 200 - assert notification_id == f'{user._id}_global' + assert notification_id == str(notification.id) - # GET with invalid notification_id - # No user + def test_subscription_detail_invalid_notification_id_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.get(url_invalid, expect_errors=True) assert res.status_code == 404 - # Existing user - res = app.get(url_invalid, auth=user.auth, expect_errors=True) + + def test_subscription_detail_invalid_notification_id_existing_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.get( + url_invalid, + auth=user.auth, + expect_errors=True + ) assert res.status_code == 404 - # PATCH with valid notification_id and invalid data - # Invalid user + def test_subscription_detail_invalid_payload_403( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload_invalid, auth=user_no_auth.auth, expect_errors=True) assert res.status_code == 403 - # No user + + def test_subscription_detail_invalid_payload_401( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload_invalid, expect_errors=True) assert res.status_code == 401 - # Valid user - res = app.patch_json_api(url, payload_invalid, auth=user.auth, expect_errors=True) + + def test_subscription_detail_invalid_payload_400( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): + res = app.patch_json_api( + url, + payload_invalid, + auth=user.auth, + expect_errors=True + ) assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == 'Invalid frequency "invalid-frequency"' + assert res.json['errors'][0]['detail'] == '"invalid-frequency" is not a valid choice.' - # PATCH with invalid notification_id - # No user + def test_subscription_detail_patch_invalid_notification_id_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url_invalid, payload, expect_errors=True) assert res.status_code == 404 - # Existing user + + def test_subscription_detail_patch_invalid_notification_id_existing_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url_invalid, payload, auth=user.auth, expect_errors=True) assert res.status_code == 404 - # PATCH with valid notification_id and valid data - # Invalid user + def test_subscription_detail_patch_invalid_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload, auth=user_no_auth.auth, expect_errors=True) assert res.status_code == 403 - # No user + + def test_subscription_detail_patch_no_user( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload, expect_errors=True) assert res.status_code == 401 - # Valid user + + def test_subscription_detail_patch( + self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid + ): res = app.patch_json_api(url, payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['attributes']['frequency'] == 'none' diff --git a/osf/models/notification.py b/osf/models/notification.py index b95d5140ebc..6f0fae57067 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -296,6 +296,12 @@ def emit(self, user, subscribed_object=None, event_context=None): event_context=event_context ) + @property + def absolute_api_v2_url(self): + from api.base.utils import absolute_reverse + return absolute_reverse('institutions:institution-detail', kwargs={'institution_id': self._id, 'version': 'v2'}) + + class Notification(models.Model): subscription = models.ForeignKey( NotificationSubscription, diff --git a/osf_tests/factories.py b/osf_tests/factories.py index bf636677284..d5ece941465 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -1054,6 +1054,17 @@ class Meta: model = models.NotificationSubscriptionLegacy +class NotificationSubscriptionFactory(DjangoModelFactory): + class Meta: + model = models.NotificationSubscription + notification_type = factory.LazyAttribute(lambda o: NotificationTypeFactory()) + + +class NotificationTypeFactory(DjangoModelFactory): + class Meta: + model = models.NotificationType + + def make_node_lineage(): node1 = NodeFactory() node2 = NodeFactory(parent=node1) From a8b5727ce400fb84d3c1006531daaa5a721aab04 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Wed, 28 May 2025 16:52:26 +0300 Subject: [PATCH 014/176] update mails mock --- addons/boa/tests/test_tasks.py | 47 +- admin_tests/meetings/__init__.py | 0 admin_tests/meetings/test_forms.py | 80 -- admin_tests/meetings/test_serializers.py | 23 - admin_tests/meetings/test_views.py | 195 ----- .../views/test_crossref_email_response.py | 48 +- ...est_draft_registration_contributor_list.py | 18 +- .../views/test_draft_registration_list.py | 35 +- .../test_institution_relationship_nodes.py | 73 +- .../views/test_node_contributors_list.py | 26 +- api_tests/nodes/views/test_node_forks_list.py | 48 +- .../test_node_relationship_institutions.py | 209 ++--- .../views/test_preprint_contributors_list.py | 41 +- ...est_collections_provider_moderator_list.py | 41 +- .../test_preprint_provider_moderator_list.py | 36 +- api_tests/providers/tasks/test_bulk_upload.py | 61 +- .../views/test_registration_detail.py | 11 +- .../test_node_request_institutional_access.py | 151 +--- .../requests/views/test_node_request_list.py | 14 +- .../views/test_preprint_request_list.py | 7 +- .../views/test_request_actions_create.py | 37 +- api_tests/users/views/test_user_claim.py | 37 +- api_tests/users/views/test_user_list.py | 41 +- .../test_user_message_institutional_access.py | 63 +- api_tests/users/views/test_user_settings.py | 41 +- .../users/views/test_user_settings_detail.py | 12 +- conftest.py | 17 + .../test_check_crossref_dois.py | 15 +- .../test_email_all_users.py | 22 +- osf_tests/test_archiver.py | 115 +-- osf_tests/test_collection.py | 29 +- osf_tests/test_collection_submission.py | 172 +--- osf_tests/test_institution.py | 21 +- osf_tests/test_merging_users.py | 9 +- osf_tests/test_node.py | 9 +- osf_tests/test_queued_mail.py | 40 +- ...t_registration_moderation_notifications.py | 219 +---- osf_tests/test_reviewable.py | 15 +- osf_tests/test_schema_responses.py | 137 ++- osf_tests/test_user.py | 16 +- .../test_deactivate_requested_accounts.py | 21 +- scripts/tests/test_send_queued_mails.py | 17 +- scripts/tests/test_triggered_mails.py | 3 +- tests/test_adding_contributor_views.py | 215 ++--- tests/test_auth.py | 47 +- tests/test_auth_views.py | 54 +- tests/test_conferences.py | 786 ------------------ tests/test_misc_views.py | 25 +- tests/test_notifications.py | 43 +- tests/test_preprints.py | 28 +- tests/test_registrations/test_embargoes.py | 10 +- tests/test_registrations/test_retractions.py | 26 +- tests/test_spam_mixin.py | 10 +- tests/test_user_profile_view.py | 24 +- tests/test_webtests.py | 67 +- website/mails/mails.py | 32 +- 56 files changed, 897 insertions(+), 2742 deletions(-) delete mode 100644 admin_tests/meetings/__init__.py delete mode 100644 admin_tests/meetings/test_forms.py delete mode 100644 admin_tests/meetings/test_serializers.py delete mode 100644 admin_tests/meetings/test_views.py delete mode 100644 tests/test_conferences.py diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py index e3bbf7f9c76..b2dcd6d86bc 100644 --- a/addons/boa/tests/test_tasks.py +++ b/addons/boa/tests/test_tasks.py @@ -38,6 +38,9 @@ def setUp(self): self.output_file_name = 'fake_boa_script_results.txt' self.job_id = '1a2b3c4d5e6f7g8' + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + def tearDown(self): super().tearDown() @@ -52,9 +55,10 @@ def test_boa_error_code(self): assert BoaErrorCode.FILE_TOO_LARGE_ERROR == 6 assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7 + @mock.patch('website.mails.settings.USE_EMAIL', True) + @mock.patch('website.mails.settings.USE_CELERY', False) def test_handle_boa_error(self): - with mock.patch('addons.boa.tasks.execute_email_send', return_value=None) as mock_send_mail, \ - mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ + with mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error: return_value = handle_boa_error( self.error_message, @@ -68,24 +72,7 @@ def test_handle_boa_error(self): output_file_name=self.output_file_name, job_id=self.job_id ) - mock_send_mail.assert_called_with( - to_addr=self.user_username, - mail=ADDONS_BOA_JOB_FAILURE, - fullname=self.user_fullname, - code=BoaErrorCode.UNKNOWN, - message=self.error_message, - query_file_name=self.query_file_name, - file_size=self.file_size, - max_file_size=boa_settings.MAX_SUBMISSION_SIZE, - query_file_full_path=self.file_full_path, - output_file_name=self.output_file_name, - job_id=self.job_id, - max_job_wait_hours=self.max_job_wait_hours, - project_url=self.project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, - ) + self.mock_send_grid.assert_called() mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True) mock_logger_error.assert_called_with(self.error_message) assert return_value == BoaErrorCode.UNKNOWN @@ -167,9 +154,14 @@ def setUp(self): boa_settings.REFRESH_JOB_INTERVAL = DEFAULT_REFRESH_JOB_INTERVAL boa_settings.MAX_JOB_WAITING_TIME = DEFAULT_MAX_JOB_WAITING_TIME + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + def tearDown(self): super().tearDown() + @mock.patch('website.mails.settings.USE_EMAIL', True) + @mock.patch('website.mails.settings.USE_CELERY', False) async def test_submit_success(self): with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \ mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \ @@ -179,7 +171,6 @@ async def test_submit_success(self): mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \ mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None) as mock_async_sleep, \ - mock.patch('addons.boa.tasks.execute_email_send', return_value=None) as mock_send_mail, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: return_value = await submit_to_boa_async( self.host, @@ -199,19 +190,7 @@ async def test_submit_success(self): assert self.mock_job.refresh.call_count == 4 assert mock_async_sleep.call_count == 4 mock_close.assert_called() - mock_send_mail.assert_called_with( - to_addr=self.user.username, - mail=ADDONS_BOA_JOB_COMPLETE, - fullname=self.user.fullname, - query_file_name=self.query_file_name, - query_file_full_path=self.file_full_path, - output_file_name=self.output_file_name, - job_id=self.mock_job.id, - project_url=self.project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, - ) + self.mock_send_grid.assert_called() mock_handle_boa_error.assert_not_called() async def test_download_error(self): diff --git a/admin_tests/meetings/__init__.py b/admin_tests/meetings/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/admin_tests/meetings/test_forms.py b/admin_tests/meetings/test_forms.py deleted file mode 100644 index 0417ffeeb8c..00000000000 --- a/admin_tests/meetings/test_forms.py +++ /dev/null @@ -1,80 +0,0 @@ -from tests.base import AdminTestCase -from osf_tests.factories import AuthUserFactory -from tests.test_conferences import ConferenceFactory - -from admin.meetings.forms import MeetingForm, MultiEmailField - -data = dict( - edit='False', - endpoint='short', - name='Much longer', - info_url='http://something.com', - logo_url='http://osf.io/eg634', - active='True', - admins='zzz@email.org', - public_projects='True', - poster='True', - talk='True', - submission1='poster', - submission2='talk', - submission1_plural='posters', - submission2_plural='talks', - meeting_title_type='Of course', - add_submission='No more', - mail_subject='Awesome', - mail_message_body='Nothings', - mail_attachment='Again', - homepage_link_text='Need to add to tests', -) - - -class TestMultiEmailField(AdminTestCase): - def test_to_python_nothing(self): - field = MultiEmailField() - res = field.to_python('') - assert res == [] - - def test_to_python_one(self): - field = MultiEmailField() - res = field.to_python('aaa@email.org') - assert res == ['aaa@email.org'] - - def test_to_python_more(self): - field = MultiEmailField() - res = field.to_python('aaa@email.org, bbb@email.org, ccc@email.org') - assert res == ['aaa@email.org', 'bbb@email.org', 'ccc@email.org'] - - -class TestMeetingForm(AdminTestCase): - def setUp(self): - super().setUp() - self.user = AuthUserFactory() - - def test_clean_admins_raise(self): - form = MeetingForm(data=data) - assert not form.is_valid() - assert 'admins' in form.errors - assert 'zzz@email.org' in form.errors['admins'][0] - assert 'does not have an OSF account' in form.errors['admins'][0] - - def test_clean_admins_okay(self): - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first()}) - form = MeetingForm(data=mod_data) - assert form.is_valid() - - def test_clean_endpoint_raise_not_exist(self): - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first(), 'edit': 'True'}) - form = MeetingForm(data=mod_data) - assert 'endpoint' in form.errors - assert 'Meeting not found with this endpoint to update' == form.errors['endpoint'][0] - - def test_clean_endpoint_raise_exists(self): - conf = ConferenceFactory() - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.values_list('address', flat=True).first(), - 'endpoint': conf.endpoint}) - form = MeetingForm(data=mod_data) - assert 'endpoint' in form.errors - assert 'A meeting with this endpoint exists already.' == form.errors['endpoint'][0] diff --git a/admin_tests/meetings/test_serializers.py b/admin_tests/meetings/test_serializers.py deleted file mode 100644 index 93fd7f6ac7c..00000000000 --- a/admin_tests/meetings/test_serializers.py +++ /dev/null @@ -1,23 +0,0 @@ -from tests.base import AdminTestCase -from tests.test_conferences import ConferenceFactory - -from admin.meetings.serializers import serialize_meeting - - -class TestsSerializeMeeting(AdminTestCase): - def setUp(self): - super().setUp() - self.conf = ConferenceFactory() - - def test_serialize(self): - res = serialize_meeting(self.conf) - assert isinstance(res, dict) - assert res['endpoint'] == self.conf.endpoint - assert res['name'] == self.conf.name - assert res['info_url'] == self.conf.info_url - assert res['logo_url'] == self.conf.logo_url - assert res['active'] == self.conf.active - assert res['public_projects'] == self.conf.public_projects - assert res['poster'] == self.conf.poster - assert res['talk'] == self.conf.talk - assert res['num_submissions'] == self.conf.valid_submissions.count() diff --git a/admin_tests/meetings/test_views.py b/admin_tests/meetings/test_views.py deleted file mode 100644 index bca7adbc14b..00000000000 --- a/admin_tests/meetings/test_views.py +++ /dev/null @@ -1,195 +0,0 @@ -import pytest -from django.test import RequestFactory -from django.http import Http404 -from django.urls import reverse -from django.contrib.auth.models import Permission -from django.core.exceptions import PermissionDenied - -from tests.base import AdminTestCase -from osf_tests.factories import AuthUserFactory -from tests.test_conferences import ConferenceFactory -from osf.models.conference import Conference, DEFAULT_FIELD_NAMES - -from admin_tests.utilities import setup_form_view -from admin_tests.meetings.test_forms import data -from admin.meetings.views import ( - MeetingListView, - MeetingCreateFormView, - MeetingFormView, - get_custom_fields, - get_admin_users, -) -from admin.meetings.forms import MeetingForm - - -class TestMeetingListView(AdminTestCase): - def setUp(self): - super().setUp() - Conference.objects.all().delete() - ConferenceFactory() - ConferenceFactory() - ConferenceFactory() - - def test_get_queryset(self): - view = MeetingListView() - assert len(view.get_queryset()) == 3 - - def test_no_user_permissions_raises_error(self): - user = AuthUserFactory() - request = RequestFactory().get(reverse('meetings:list')) - request.user = user - - with pytest.raises(PermissionDenied): - MeetingListView.as_view()(request) - - def test_correct_view_permissions(self): - user = AuthUserFactory() - - view_permission = Permission.objects.get(codename='view_conference') - user.user_permissions.add(view_permission) - user.save() - - request = RequestFactory().get(reverse('meetings:list')) - request.user = user - - response = MeetingListView.as_view()(request) - assert response.status_code == 200 - - -class TestMeetingFormView(AdminTestCase): - def setUp(self): - super().setUp() - self.conf = ConferenceFactory() - self.user = AuthUserFactory() - self.request = RequestFactory().post('/fake_path') - self.view = MeetingFormView - mod_data = dict(data) - mod_data.update({ - 'edit': 'True', - 'endpoint': self.conf.endpoint, - 'admins': self.user.emails.first().address, - 'location': 'Timbuktu, Mali', - 'start date': 'Dec 11 2014', - 'end_date': 'Jan 12 2013' - }) - self.form = MeetingForm(data=mod_data) - self.form.is_valid() - - self.url = reverse('meetings:detail', kwargs={'endpoint': self.conf.endpoint}) - - def test_dispatch_raise_404(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint='meh') - with pytest.raises(Http404): - view.dispatch(self.request, endpoint='meh') - - def test_get_context(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - res = view.get_context_data() - assert isinstance(res, dict) - assert 'endpoint' in res - assert res['endpoint'] == self.conf.endpoint - - def test_get_initial(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - res = view.get_initial() - assert isinstance(res, dict) - assert 'endpoint' in res - assert 'submission2_plural' in res - - def test_form_valid(self): - view = setup_form_view(self.view(), self.request, self.form, - endpoint=self.conf.endpoint) - view.conf = self.conf - view.form_valid(self.form) - self.conf.reload() - assert self.conf.admins.all()[0].emails.first().address == self.user.emails.first().address - assert self.conf.location == self.form.cleaned_data['location'] - assert self.conf.start_date == self.form.cleaned_data['start_date'] - - def test_no_user_permissions_raises_error(self): - request = RequestFactory().get(self.url) - request.user = self.user - - with pytest.raises(PermissionDenied): - self.view.as_view()(request, endpoint=self.conf.endpoint) - - def test_correct_view_permissions(self): - - view_permission = Permission.objects.get(codename='change_conference') - self.user.user_permissions.add(view_permission) - self.user.save() - - request = RequestFactory().get(self.url) - request.user = self.user - - response = self.view.as_view()(request, endpoint=self.conf.endpoint) - assert response.status_code == 200 - - -class TestMeetingCreateFormView(AdminTestCase): - def setUp(self): - super().setUp() - Conference.objects.all().delete() - self.user = AuthUserFactory() - self.request = RequestFactory().post('/fake_path') - self.view = MeetingCreateFormView - mod_data = dict(data) - mod_data.update({'admins': self.user.emails.first().address}) - self.form = MeetingForm(data=mod_data) - self.form.is_valid() - - self.url = reverse('meetings:create') - - def test_get_initial(self): - self.view().get_initial() - assert not self.view().initial['edit'] - assert self.view.initial['submission1'] == DEFAULT_FIELD_NAMES['submission1'] - - def test_form_valid(self): - view = setup_form_view(self.view(), self.request, self.form) - view.form_valid(self.form) - assert Conference.objects.filter(endpoint=data['endpoint']).count() == 1 - - def test_no_user_permissions_raises_error(self): - request = RequestFactory().get(self.url) - request.user = self.user - - with pytest.raises(PermissionDenied): - self.view.as_view()(request) - - def test_correct_view_permissions(self): - change_permission = Permission.objects.get(codename='view_conference') - view_permission = Permission.objects.get(codename='change_conference') - self.user.user_permissions.add(view_permission) - self.user.user_permissions.add(change_permission) - self.user.save() - - request = RequestFactory().get(self.url) - request.user = self.user - - response = self.view.as_view()(request) - assert response.status_code == 200 - - -class TestMeetingMisc(AdminTestCase): - def test_get_custom_fields(self): - res1, res2 = get_custom_fields(data) - assert isinstance(res1, dict) - assert isinstance(res2, dict) - for key in res1.keys(): - assert 'field' not in key - - def test_get_admin_users(self): - user_1 = AuthUserFactory() - user_2 = AuthUserFactory() - user_3 = AuthUserFactory() - emails = [user_1.emails.first().address, user_2.emails.first().address, user_3.emails.first().address] - res = get_admin_users(emails) - assert user_1 in res - assert user_2 in res - assert user_3 in res diff --git a/api_tests/crossref/views/test_crossref_email_response.py b/api_tests/crossref/views/test_crossref_email_response.py index 4594ea5ff16..775a0045c06 100644 --- a/api_tests/crossref/views/test_crossref_email_response.py +++ b/api_tests/crossref/views/test_crossref_email_response.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest import hmac import hashlib @@ -11,6 +10,7 @@ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCrossRefEmailResponse: def make_mailgun_payload(self, crossref_response): @@ -155,49 +155,47 @@ def test_wrong_request_context_raises_permission_error(self, app, url, error_xml assert response.status_code == 400 - def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml): + def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml, mock_send_grid): assert not preprint.get_identifier_value('doi') - with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=error_xml) - app.post(url, context_data) - assert mock_send_mail.called + context_data = self.make_mailgun_payload(crossref_response=error_xml) + app.post(url, context_data) + assert mock_send_grid.called assert not preprint.get_identifier_value('doi') - def test_success_response_sets_doi(self, app, url, preprint, success_xml): + def test_success_response_sets_doi(self, app, url, preprint, success_xml, mock_send_grid): assert not preprint.get_identifier_value('doi') - with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=success_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=success_xml) + mock_send_grid.reset_mock() + app.post(url, context_data) preprint.reload() - assert not mock_send_mail.called + assert not mock_send_grid.called assert preprint.get_identifier_value('doi') assert preprint.preprint_doi_created - def test_update_success_response(self, app, preprint, url): + def test_update_success_response(self, app, preprint, url, mock_send_grid): initial_value = 'TempDOIValue' preprint.set_identifier_value(category='doi', value=initial_value) update_xml = self.update_success_xml(preprint) - with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=update_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=update_xml) + mock_send_grid.reset_mock() + app.post(url, context_data) - assert not mock_send_mail.called + assert not mock_send_grid.called assert preprint.get_identifier_value(category='doi') != initial_value - def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, url): + def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, url, mock_send_grid): preprint.set_identifier_value(category='doi', value='test') preprint.preprint_doi_created = timezone.now() preprint.save() update_xml = self.update_success_xml(preprint) pre_created = preprint.preprint_doi_created - with mock.patch('framework.auth.views.mails.execute_email_send'): - context_data = self.make_mailgun_payload(crossref_response=update_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=update_xml) + app.post(url, context_data) assert preprint.preprint_doi_created == pre_created @@ -214,14 +212,14 @@ def test_success_batch_response(self, app, url): for preprint in preprint_list: assert preprint.get_identifier_value('doi') == settings.DOI_FORMAT.format(prefix=provider.doi_prefix, guid=preprint._id) - def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint): + def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint, mock_send_grid): legacy_value = 'IAmALegacyDOI' preprint.set_identifier_value(category='legacy_doi', value=legacy_value) update_xml = self.update_success_xml(preprint) - with mock.patch('framework.auth.views.mails.execute_email_send') as mock_send_mail: - context_data = self.make_mailgun_payload(crossref_response=update_xml) - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=update_xml) + mock_send_grid.reset_mock() + app.post(url, context_data) - assert not mock_send_mail.called + assert not mock_send_grid.called assert preprint.identifiers.get(category='legacy_doi').deleted diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index bb59fca7d59..bb1fe4d7b04 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -208,16 +208,16 @@ def create_serializer(self): return DraftRegistrationContributorsCreateSerializer +@pytest.mark.usefixtures('mock_send_grid') class TestDraftContributorCreateEmail(DraftRegistrationCRUDTestCase, TestNodeContributorCreateEmail): @pytest.fixture() def url_project_contribs(self, project_public): # Overrides TestNodeContributorCreateEmail return f'/{API_BASE}draft_registrations/{project_public._id}/contributors/' - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_sends_email( - self, mock_mail, app, user, user_two, - url_project_contribs): + self, app, user, user_two, + url_project_contribs, mock_send_grid): # Overrides TestNodeContributorCreateEmail url = f'{url_project_contribs}?send_email=draft_registration' payload = { @@ -238,7 +238,7 @@ def test_add_contributor_sends_email( res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 # Overrides TestNodeContributorCreateEmail def test_add_contributor_signal_if_default( @@ -264,9 +264,8 @@ def test_add_contributor_signal_if_default( assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' # Overrides TestNodeContributorCreateEmail - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_sends_email( - self, mock_mail, app, user, url_project_contribs): + self, mock_send_grid, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=draft_registration' payload = { 'data': { @@ -279,7 +278,7 @@ def test_add_unregistered_contributor_sends_email( } res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 # Overrides TestNodeContributorCreateEmail @mock.patch('website.project.signals.unreg_contributor_added.send') @@ -301,9 +300,8 @@ def test_add_unregistered_contributor_signal_if_default( assert 'draft_registration' == kwargs['email_template'] # Overrides TestNodeContributorCreateEmail - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_without_email_no_email( - self, mock_mail, app, user, url_project_contribs): + self, mock_send_grid, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=draft_registration' payload = { 'data': { @@ -318,7 +316,7 @@ def test_add_unregistered_contributor_without_email_no_email( res = app.post_json_api(url, payload, auth=user.auth) assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 class TestDraftContributorBulkCreate(DraftRegistrationCRUDTestCase, TestNodeContributorBulkCreate): diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index b2c23123df0..d19c6d994d5 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from framework.auth.core import Auth @@ -18,7 +17,7 @@ ) from osf.utils.permissions import READ, WRITE, ADMIN -from website import mails, settings +from website import settings @pytest.fixture(autouse=True) @@ -158,6 +157,7 @@ def test_draft_with_deleted_registered_node_shows_up_in_draft_list( assert data[0]['attributes']['registration_metadata'] == {} +@pytest.mark.usefixtures('mock_send_grid') class TestDraftRegistrationCreateWithNode(AbstractDraftRegistrationTestCase): @pytest.fixture() @@ -336,11 +336,11 @@ def test_logged_in_non_contributor_cannot_create_draft( ) assert res.status_code == 403 - def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload): - with mock.patch.object(mails, 'execute_email_send') as mock_send_mail: - app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) + def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload, mock_send_grid): + mock_send_grid.reset_mock() + app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) - assert not mock_send_mail.called + assert not mock_send_grid.called def test_affiliated_institutions_are_copied_from_node_no_institutions(self, app, user, url_draft_registrations, payload): """ @@ -402,6 +402,7 @@ def test_affiliated_institutions_are_copied_from_user(self, app, user, url_draft assert list(draft_registration.affiliated_institutions.all()) == list(user.get_affiliated_institutions()) +@pytest.mark.usefixtures('mock_send_grid') class TestDraftRegistrationCreateWithoutNode(AbstractDraftRegistrationTestCase): @pytest.fixture() def url_draft_registrations(self): @@ -428,23 +429,21 @@ def test_admin_can_create_draft( assert draft.creator == user assert draft.has_permission(user, ADMIN) is True - def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload): + def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload, mock_send_grid): # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor - with mock.patch.object(mails, 'execute_email_send') as mock_send_mail: - resp = app.post_json_api( - f'{url_draft_registrations}?embed=branched_from&embed=initiator', - payload, - auth=user.auth - ) - assert mock_send_mail.called + app.post_json_api( + f'{url_draft_registrations}?embed=branched_from&embed=initiator', + payload, + auth=user.auth + ) + assert mock_send_grid.called # Python 3.6 does not support mock.call_args.args/kwargs # Instead, mock.call_args[0] is positional args, mock.call_args[1] is kwargs # (note, this is compatible with later versions) - mock_send_kwargs = mock_send_mail.call_args[1] - assert mock_send_kwargs['mail'] == mails.CONTRIBUTOR_ADDED_DRAFT_REGISTRATION - assert mock_send_kwargs['user'] == user - assert mock_send_kwargs['node'] == DraftRegistration.load(resp.json['data']['id']) + mock_send_kwargs = mock_send_grid.call_args[1] + assert mock_send_kwargs['subject'] == 'You have a new registration draft.' + assert mock_send_kwargs['to_addr'] == user.email def test_create_draft_with_provider( self, app, user, url_draft_registrations, non_default_provider, payload_with_non_default_provider diff --git a/api_tests/institutions/views/test_institution_relationship_nodes.py b/api_tests/institutions/views/test_institution_relationship_nodes.py index 50f78d53c74..c62d760710d 100644 --- a/api_tests/institutions/views/test_institution_relationship_nodes.py +++ b/api_tests/institutions/views/test_institution_relationship_nodes.py @@ -1,5 +1,4 @@ import pytest -from unittest import mock from api.base.settings.defaults import API_BASE from osf_tests.factories import ( @@ -26,6 +25,7 @@ def make_registration_payload(*node_ids): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestInstitutionRelationshipNodes: @pytest.fixture() @@ -373,47 +373,44 @@ def test_add_non_node(self, app, user, institution, url_institution_nodes): assert res.status_code == 404 def test_email_sent_on_affiliation_addition(self, app, user, institution, node_without_institution, - url_institution_nodes): + url_institution_nodes, mock_send_grid): node_without_institution.add_contributor(user, permissions='admin') current_institution = InstitutionFactory() node_without_institution.affiliated_institutions.add(current_institution) - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.post_json_api( - url_institution_nodes, - { - 'data': [ - { - 'type': 'nodes', 'id': node_without_institution._id - } - ] - }, - auth=user.auth - ) - - assert res.status_code == 201 - mocked_send_mail.assert_called_once() - - def test_email_sent_on_affiliation_removal(self, app, admin, institution, node_public, url_institution_nodes): + res = app.post_json_api( + url_institution_nodes, + { + 'data': [ + { + 'type': 'nodes', 'id': node_without_institution._id + } + ] + }, + auth=user.auth + ) + + assert res.status_code == 201 + mock_send_grid.assert_called_once() + + def test_email_sent_on_affiliation_removal(self, app, admin, institution, node_public, url_institution_nodes, mock_send_grid): current_institution = InstitutionFactory() node_public.affiliated_institutions.add(current_institution) - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.delete_json_api( - url_institution_nodes, - { - 'data': [ - { - 'type': 'nodes', 'id': node_public._id - } - ] - }, - auth=admin.auth - ) - - # Assert response is successful - assert res.status_code == 204 - - call_args = mocked_send_mail.call_args[1] - assert call_args['user'] == admin - assert node_public == call_args['node'] + res = app.delete_json_api( + url_institution_nodes, + { + 'data': [ + { + 'type': 'nodes', 'id': node_public._id + } + ] + }, + auth=admin.auth + ) + + # Assert response is successful + assert res.status_code == 204 + + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == admin.email diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index 56401f685dc..81910a6ef55 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -1202,15 +1202,15 @@ def test_add_contributor_validation( @pytest.mark.django_db @pytest.mark.enable_bookmark_creation @pytest.mark.enable_enqueue_task +@pytest.mark.usefixtures('mock_send_grid') class TestNodeContributorCreateEmail(NodeCRUDTestCase): @pytest.fixture() def url_project_contribs(self, project_public): return f'/{API_BASE}nodes/{project_public._id}/contributors/' - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_no_email_if_false( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=false' payload = { @@ -1221,11 +1221,10 @@ def test_add_contributor_no_email_if_false( } res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_sends_email( - self, mock_mail, app, user, user_two, url_project_contribs + self, mock_send_grid, app, user, user_two, url_project_contribs ): url = f'{url_project_contribs}?send_email=default' payload = { @@ -1240,7 +1239,7 @@ def test_add_contributor_sends_email( res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @mock.patch('website.project.signals.contributor_added.send') def test_add_contributor_signal_if_default( @@ -1281,9 +1280,8 @@ def test_add_contributor_signal_preprint_email_disallowed( == 'preprint is not a valid email preference.' ) - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_sends_email( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=default' payload = { @@ -1294,7 +1292,7 @@ def test_add_unregistered_contributor_sends_email( } res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @mock.patch('website.project.signals.unreg_contributor_added.send') def test_add_unregistered_contributor_signal_if_default( @@ -1329,9 +1327,8 @@ def test_add_unregistered_contributor_signal_preprint_email_disallowed( == 'preprint is not a valid email preference.' ) - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_invalid_send_email_param( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=true' payload = { @@ -1345,11 +1342,10 @@ def test_add_contributor_invalid_send_email_param( assert ( res.json['errors'][0]['detail'] == 'true is not a valid email preference.' ) - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_without_email_no_email( - self, mock_mail, app, user, url_project_contribs + self, mock_send_grid, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=default' payload = { @@ -1365,7 +1361,7 @@ def test_add_unregistered_contributor_without_email_no_email( res = app.post_json_api(url, payload, auth=user.auth) assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.django_db diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index cbabc57351a..8fc9f9eb35b 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -11,7 +11,6 @@ ForkFactory ) from rest_framework import exceptions -from website import mails from osf.utils import permissions from api.nodes.serializers import NodeForksSerializer @@ -204,6 +203,7 @@ def test_forks_list_does_not_show_registrations_of_forks( @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestNodeForkCreate: @pytest.fixture() @@ -419,36 +419,26 @@ def test_read_only_contributor_can_fork_private_registration( def test_send_email_success( self, app, user, public_project_url, - fork_data_with_title, public_project): - - with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: - res = app.post_json_api( - public_project_url, - fork_data_with_title, - auth=user.auth) - assert res.status_code == 201 - assert res.json['data']['id'] == public_project.forks.first()._id - mock_send_mail.assert_called_with( - user.email, - mails.FORK_COMPLETED, - title=public_project.title, - guid=res.json['data']['id'], - can_change_preferences=False) + fork_data_with_title, public_project, mock_send_grid): + + res = app.post_json_api( + public_project_url, + fork_data_with_title, + auth=user.auth) + assert res.status_code == 201 + assert res.json['data']['id'] == public_project.forks.first()._id + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == user.email + assert call_args['subject'] == 'Your fork has completed' def test_send_email_failed( self, app, user, public_project_url, - fork_data_with_title, public_project): + fork_data_with_title, public_project, mock_send_grid): with mock.patch.object(NodeForksSerializer, 'save', side_effect=Exception()): - with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: - with pytest.raises(Exception): - app.post_json_api( - public_project_url, - fork_data_with_title, - auth=user.auth) - mock_send_mail.assert_called_with( - user.email, - mails.FORK_FAILED, - title=public_project.title, - guid=public_project._id, - can_change_preferences=False) + with pytest.raises(Exception): + app.post_json_api( + public_project_url, + fork_data_with_title, + auth=user.auth) + assert mock_send_grid.called diff --git a/api_tests/nodes/views/test_node_relationship_institutions.py b/api_tests/nodes/views/test_node_relationship_institutions.py index 476bf8841e7..3bf25dc5adf 100644 --- a/api_tests/nodes/views/test_node_relationship_institutions.py +++ b/api_tests/nodes/views/test_node_relationship_institutions.py @@ -1,5 +1,4 @@ import pytest -from unittest import mock from api.base.settings.defaults import API_BASE from osf_tests.factories import ( @@ -8,7 +7,6 @@ NodeFactory, ) from osf.utils import permissions -from website import mails @pytest.mark.django_db @@ -115,6 +113,7 @@ def create_payload(self, institutions): ] } +@pytest.mark.usefixtures('mock_send_grid') class TestNodeRelationshipInstitutions(RelationshipInstitutionsTestMixin): def test_node_with_no_permissions(self, app, unauthorized_user_with_affiliation, institution_one, node_institutions_url): @@ -203,72 +202,59 @@ def test_user_with_institution_and_permissions( assert institution_one in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_user_with_institution_and_permissions_through_patch(self, app, user, institution_one, institution_two, - node, node_institutions_url): - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_one, institution_two]), - auth=user.auth - ) - assert res.status_code == 200 - assert mocked_send_mail.call_count == 2 - - first_call_args = mocked_send_mail.call_args_list[0] - assert first_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) - - second_call_args = mocked_send_mail.call_args_list[1] - assert second_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) - - @mock.patch('website.mails.settings.USE_EMAIL', True) - def test_remove_institutions_with_affiliated_user(self, app, user, institution_one, node, node_institutions_url): + node, node_institutions_url, mock_send_grid): + + mock_send_grid.reset_mock() + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_one, institution_two]), + auth=user.auth + ) + assert res.status_code == 200 + assert mock_send_grid.call_count == 2 + + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' + + second_call_args = mock_send_grid.call_args_list[1][1] + assert second_call_args['to_addr'] == user.email + assert second_call_args['subject'] == 'Project Affiliation Changed' + + def test_remove_institutions_with_affiliated_user(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.put_json_api( - node_institutions_url, - { - 'data': [] - }, - auth=user.auth - ) - - mocked_send_mail.assert_called_with( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + mock_send_grid.reset_mock() + res = app.put_json_api( + node_institutions_url, + { + 'data': [] + }, + auth=user.auth + ) + + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 200 assert node.affiliated_institutions.count() == 0 - @mock.patch('website.mails.settings.USE_EMAIL', True) - def test_using_post_making_no_changes_returns_201(self, app, user, institution_one, node, node_institutions_url): + def test_using_post_making_no_changes_returns_201(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.post_json_api( - node_institutions_url, - self.create_payload([institution_one]), - auth=user.auth - ) - mocked_send_mail.assert_not_called() + mock_send_grid.reset_mock() + res = app.post_json_api( + node_institutions_url, + self.create_payload([institution_one]), + auth=user.auth + ) + mock_send_grid.assert_not_called() assert res.status_code == 201 assert institution_one in node.affiliated_institutions.all() @@ -289,87 +275,70 @@ def test_put_not_admin_but_affiliated(self, app, institution_one, node, node_ins assert res.status_code == 200 assert institution_one in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_add_through_patch_one_inst_to_node_with_inst( - self, app, user, institution_one, institution_two, node, node_institutions_url): + self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_one, institution_two]), - auth=user.auth - ) - assert mocked_send_mail.call_count == 1 - first_call_args = mocked_send_mail.call_args_list[0] - assert first_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + mock_send_grid.reset_mock() + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_one, institution_two]), + auth=user.auth + ) + assert mock_send_grid.call_count == 1 + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 200 assert institution_one in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_add_through_patch_one_inst_while_removing_other( - self, app, user, institution_one, institution_two, node, node_institutions_url): + self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_two]), - auth=user.auth - ) - assert mocked_send_mail.call_count == 2 - first_call_args = mocked_send_mail.call_args_list[0] - assert first_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) - second_call_args = mocked_send_mail.call_args_list[1] - assert second_call_args == mock.call( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + mock_send_grid.reset_mock() + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_two]), + auth=user.auth + ) + assert mock_send_grid.call_count == 2 + + first_call_args = mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == user.email + assert first_call_args['subject'] == 'Project Affiliation Changed' + + second_call_args = mock_send_grid.call_args_list[1][1] + assert second_call_args['to_addr'] == user.email + assert second_call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 200 assert institution_one not in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() - @mock.patch('website.mails.settings.USE_EMAIL', True) def test_add_one_inst_with_post_to_node_with_inst( - self, app, user, institution_one, institution_two, node, node_institutions_url): + self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.post_json_api( - node_institutions_url, - self.create_payload([institution_two]), - auth=user.auth - ) - mocked_send_mail.assert_called_with( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + res = app.post_json_api( + node_institutions_url, + self.create_payload([institution_two]), + auth=user.auth + ) + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == user.email + assert call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 201 assert institution_one in node.affiliated_institutions.all() @@ -383,23 +352,19 @@ def test_delete_nothing(self, app, user, node_institutions_url): ) assert res.status_code == 204 - @mock.patch('website.mails.settings.USE_EMAIL', True) - def test_delete_existing_inst(self, app, user, institution_one, node, node_institutions_url): + def test_delete_existing_inst(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): node.affiliated_institutions.add(institution_one) node.save() - with mock.patch('osf.models.mixins.mails.execute_email_send') as mocked_send_mail: - res = app.delete_json_api( - node_institutions_url, - self.create_payload([institution_one]), - auth=user.auth - ) - mocked_send_mail.assert_called_with( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - user=user, - node=node, - ) + res = app.delete_json_api( + node_institutions_url, + self.create_payload([institution_one]), + auth=user.auth + ) + + call_args = mock_send_grid.call_args[1] + assert call_args['to_addr'] == user.email + assert call_args['subject'] == 'Project Affiliation Changed' assert res.status_code == 204 assert institution_one not in node.affiliated_institutions.all() diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index 9c4fd2fa57b..6676b542b60 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -1345,15 +1345,15 @@ def test_add_contributor_validation(self, preprint_published, validate_data): @pytest.mark.django_db @pytest.mark.enable_enqueue_task +@pytest.mark.usefixtures('mock_send_grid') class TestPreprintContributorCreateEmail(NodeCRUDTestCase): @pytest.fixture() def url_preprint_contribs(self, preprint_published): return f'/{API_BASE}preprints/{preprint_published._id}/contributors/' - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_no_email_if_false( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=false' payload = { 'data': { @@ -1364,13 +1364,13 @@ def test_add_contributor_no_email_if_false( } } } + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_needs_preprint_filter_to_send_email( - self, mock_mail, app, user, user_two, + self, mock_send_grid, app, user, user_two, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=default' payload = { @@ -1389,10 +1389,11 @@ def test_add_contributor_needs_preprint_filter_to_send_email( } } + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @mock.patch('website.project.signals.contributor_added.send') def test_add_contributor_signal_if_preprint( @@ -1419,9 +1420,8 @@ def test_add_contributor_signal_if_preprint( assert mock_send.call_count == 1 assert 'preprint' == kwargs['email_template'] - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_sends_email( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=preprint' payload = { 'data': { @@ -1432,9 +1432,11 @@ def test_add_unregistered_contributor_sends_email( } } } + + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @mock.patch('website.project.signals.unreg_contributor_added.send') def test_add_unregistered_contributor_signal_if_preprint( @@ -1455,9 +1457,8 @@ def test_add_unregistered_contributor_signal_if_preprint( assert 'preprint' == kwargs['email_template'] assert mock_send.call_count == 1 - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_contributor_invalid_send_email_param( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=true' payload = { 'data': { @@ -1468,16 +1469,16 @@ def test_add_contributor_invalid_send_email_param( } } } + mock_send_grid.reset_mock() res = app.post_json_api( url, payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'true is not a valid email preference.' - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') def test_add_unregistered_contributor_without_email_no_email( - self, mock_mail, app, user, url_preprint_contribs): + self, mock_send_grid, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=preprint' payload = { 'data': { @@ -1488,16 +1489,16 @@ def test_add_unregistered_contributor_without_email_no_email( } } + mock_send_grid.reset_mock() with capture_signals() as mock_signal: res = app.post_json_api(url, payload, auth=user.auth) assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated') def test_publishing_preprint_sends_emails_to_contributors( - self, mock_update, mock_mail, app, user, url_preprint_contribs, preprint_unpublished): + self, mock_update, mock_send_grid, app, user, url_preprint_contribs, preprint_unpublished): url = f'/{API_BASE}preprints/{preprint_unpublished._id}/' user_two = AuthUserFactory() preprint_unpublished.add_contributor(user_two, permissions=permissions.WRITE, save=True) @@ -1535,9 +1536,8 @@ def test_contributor_added_signal_not_specified( assert 'preprint' == kwargs['email_template'] assert mock_send.call_count == 1 - @mock.patch('framework.auth.views.mails.execute_email_send') def test_contributor_added_not_sent_if_unpublished( - self, mock_mail, app, user, preprint_unpublished): + self, mock_send_grid, app, user, preprint_unpublished): url = f'/{API_BASE}preprints/{preprint_unpublished._id}/contributors/?send_email=preprint' payload = { 'data': { @@ -1548,9 +1548,10 @@ def test_contributor_added_not_sent_if_unpublished( } } } + mock_send_grid.reset_mock() res = app.post_json_api(url, payload, auth=user.auth) assert res.status_code == 201 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.django_db diff --git a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py index abdd218fbb3..20d081e8709 100644 --- a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py +++ b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -90,17 +89,16 @@ def test_GET_admin_with_filter(self, app, url, nonmoderator, moderator, admin, p @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestPOSTCollectionsModeratorList: - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_POST_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_POST_unauthorized(self, mock_send_grid, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, expect_errors=True) assert res.status_code == 401 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_POST_forbidden(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_POST_forbidden(self, mock_send_grid, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) @@ -109,58 +107,53 @@ def test_POST_forbidden(self, mock_mail, app, url, nonmoderator, moderator, prov res = app.post(url, payload, auth=moderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_POST_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_success_existing_user(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_POST_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): + def test_POST_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin, provider): payload = make_payload(user_id=moderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_POST_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): + def test_POST_admin_failure_unreg_moderator(self, mock_send_grid, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Jalen Hurts', 'email': '1eagles@allbatman.org'} # test_user_with_no_moderator_admin_permissions payload = make_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 # test_user_with_moderator_admin_permissions payload = make_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - assert mock_mail.call_args[0][0] == unreg_user['email'] + assert mock_send_grid.call_count == 1 + assert mock_send_grid.call_args[1]['to_addr'] == unreg_user['email'] - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_POST_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='citizen') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_POST_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_success_email(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Flecher Cox' diff --git a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py index 2ea44e3209e..8998d2a85ca 100644 --- a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py +++ b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -9,6 +8,7 @@ from osf.utils import permissions +@pytest.mark.usefixtures('mock_send_grid') class ProviderModeratorListTestClass: @pytest.fixture() @@ -68,8 +68,7 @@ def test_list_get_admin_with_filter(self, app, url, nonmoderator, moderator, adm assert res.json['data'][0]['id'] == admin._id assert res.json['data'][0]['attributes']['permission_group'] == permissions.ADMIN - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_list_post_unauthorized(self, mock_mail, app, url, nonmoderator, moderator, provider): + def test_list_post_unauthorized(self, mock_send_grid, app, url, nonmoderator, moderator, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post(url, payload, expect_errors=True) assert res.status_code == 401 @@ -80,58 +79,53 @@ def test_list_post_unauthorized(self, mock_mail, app, url, nonmoderator, moderat res = app.post(url, payload, auth=moderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_list_post_admin_success_existing_user(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_success_existing_user(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_list_post_admin_failure_existing_moderator(self, mock_mail, app, url, moderator, admin, provider): + def test_list_post_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin, provider): payload = self.create_payload(user_id=moderator._id, permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_list_post_admin_failure_unreg_moderator(self, mock_mail, app, url, moderator, nonmoderator, admin, provider): + def test_list_post_admin_failure_unreg_moderator(self, mock_send_grid, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Son Goku', 'email': 'goku@dragonball.org'} # test_user_with_no_moderator_admin_permissions payload = self.create_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 # test_user_with_moderator_admin_permissions payload = self.create_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - assert mock_mail.call_args[0][0] == unreg_user['email'] + assert mock_send_grid.call_count == 1 + assert mock_send_grid.call_args[1]['to_addr'] == unreg_user['email'] - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_list_post_admin_failure_invalid_group(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='citizen') res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_list_post_admin_success_email(self, mock_mail, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_success_email(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = self.create_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_list_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Alice Alisdottir' diff --git a/api_tests/providers/tasks/test_bulk_upload.py b/api_tests/providers/tasks/test_bulk_upload.py index 99942a74e92..221861ea313 100644 --- a/api_tests/providers/tasks/test_bulk_upload.py +++ b/api_tests/providers/tasks/test_bulk_upload.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest import uuid @@ -8,13 +7,10 @@ from osf.models import RegistrationBulkUploadJob, RegistrationBulkUploadRow, RegistrationProvider, RegistrationSchema from osf.models.registration_bulk_upload_job import JobState from osf.models.registration_bulk_upload_row import RegistrationBulkUploadContributors -from osf.registrations.utils import get_registration_provider_submissions_url from osf.utils.permissions import ADMIN, READ, WRITE from osf_tests.factories import InstitutionFactory, SubjectFactory, UserFactory -from website import mails, settings - class TestRegistrationBulkUploadContributors: @@ -67,6 +63,7 @@ def test_error_message_default(self): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestBulkUploadTasks: @pytest.fixture() @@ -320,9 +317,7 @@ def test_bulk_creation_dry_run(self, registration_row_1, registration_row_2, upl assert upload_job_done_full.state == JobState.PICKED_UP assert not upload_job_done_full.email_sent - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) - def test_bulk_creation_done_full(self, mock_send_mail, registration_row_1, registration_row_2, + def test_bulk_creation_done_full(self, mock_send_grid, registration_row_1, registration_row_2, upload_job_done_full, provider, initiator, read_contributor, write_contributor): bulk_create_registrations(upload_job_done_full.id, dry_run=False) @@ -340,18 +335,9 @@ def test_bulk_creation_done_full(self, mock_send_mail, registration_row_1, regis assert row.draft_registration.contributor_set.get(user=write_contributor).permission == WRITE assert row.draft_registration.contributor_set.get(user=read_contributor).permission == READ - mock_send_mail.assert_called_with( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_ALL, - fullname=initiator.fullname, - auto_approval=False, - count=2, - pending_submissions_url=get_registration_provider_submissions_url(provider), - ) - - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) - def test_bulk_creation_done_partial(self, mock_send_mail, registration_row_3, + mock_send_grid.assert_called() + + def test_bulk_creation_done_partial(self, mock_send_grid, registration_row_3, registration_row_invalid_extra_bib_1, upload_job_done_partial, provider, initiator, read_contributor, write_contributor): @@ -369,26 +355,9 @@ def test_bulk_creation_done_partial(self, mock_send_mail, registration_row_3, assert registration_row_3.draft_registration.contributor_set.get(user=write_contributor).permission == WRITE assert registration_row_3.draft_registration.contributor_set.get(user=read_contributor).permission == READ - mock_send_mail.assert_called_with( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL, - fullname=initiator.fullname, - auto_approval=False, - approval_errors=[], - draft_errors=[ - 'Title: Test title Invalid - Extra Bibliographic Contributor, External ID: 90-=ijkl, ' - 'Error: Bibliographic contributors must be one of admin, read-only or read-write' - ], - total=2, - successes=1, - failures=1, - pending_submissions_url=get_registration_provider_submissions_url(provider), - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) - - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) - def test_bulk_creation_done_error(self, mock_send_mail, registration_row_invalid_extra_bib_2, + mock_send_grid.assert_called() + + def test_bulk_creation_done_error(self, mock_send_grid, registration_row_invalid_extra_bib_2, registration_row_invalid_affiliation, upload_job_done_error, provider, initiator, read_contributor, write_contributor, institution): @@ -398,16 +367,4 @@ def test_bulk_creation_done_error(self, mock_send_mail, registration_row_invalid assert upload_job_done_error.email_sent assert len(RegistrationBulkUploadRow.objects.filter(upload__id=upload_job_done_error.id)) == 0 - mock_send_mail.assert_called_with( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_FAILURE_ALL, - fullname=initiator.fullname, - draft_errors=[ - 'Title: Test title Invalid - Extra Bibliographic Contributor, External ID: 90-=ijkl, ' - 'Error: Bibliographic contributors must be one of admin, read-only or read-write', - f'Title: Test title Invalid - Unauthorized Affiliation, External ID: mnopqrst, ' - f'Error: Initiator [{initiator._id}] is not affiliated with institution [{institution._id}]', - ], - count=2, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) + mock_send_grid.assert_called() diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 2cede11165c..086aef73798 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -686,6 +686,7 @@ def test_read_write_contributor_can_edit_writeable_fields( @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestRegistrationWithdrawal(TestRegistrationUpdateTestCase): @pytest.fixture @@ -744,15 +745,14 @@ def test_initiate_withdraw_registration_fails( res = app.put_json_api(public_url, public_payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 - @mock.patch('website.mails.execute_email_send') - def test_initiate_withdrawal_success(self, mock_send_mail, app, user, public_registration, public_url, public_payload): + def test_initiate_withdrawal_success(self, mock_send_grid, app, user, public_registration, public_url, public_payload): res = app.put_json_api(public_url, public_payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['attributes']['pending_withdrawal'] is True public_registration.refresh_from_db() assert public_registration.is_pending_retraction assert public_registration.registered_from.logs.first().action == 'retraction_initiated' - assert mock_send_mail.called + assert mock_send_grid.called def test_initiate_withdrawal_with_embargo_ends_embargo( self, app, user, public_project, public_registration, public_url, public_payload): @@ -775,9 +775,8 @@ def test_initiate_withdrawal_with_embargo_ends_embargo( assert public_registration.is_pending_retraction assert not public_registration.is_pending_embargo - @mock.patch('website.mails.execute_email_send') def test_withdraw_request_does_not_send_email_to_unregistered_admins( - self, mock_send_mail, app, user, public_registration, public_url, public_payload): + self, mock_send_grid, app, user, public_registration, public_url, public_payload): unreg = UnregUserFactory() with disconnected_from_listeners(contributor_added): public_registration.add_unregistered_contributor( @@ -794,7 +793,7 @@ def test_withdraw_request_does_not_send_email_to_unregistered_admins( # Only the creator gets an email; the unreg user does not get emailed assert public_registration._contributors.count() == 2 - assert mock_send_mail.call_count == 1 + assert mock_send_grid.call_count == 3 @pytest.mark.django_db diff --git a/api_tests/requests/views/test_node_request_institutional_access.py b/api_tests/requests/views/test_node_request_institutional_access.py index 13551df4647..d868739e9bd 100644 --- a/api_tests/requests/views/test_node_request_institutional_access.py +++ b/api_tests/requests/views/test_node_request_institutional_access.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -6,12 +5,11 @@ from osf_tests.factories import NodeFactory, InstitutionFactory, AuthUserFactory from osf.utils.workflows import DefaultStates, NodeRequestTypes -from website import language -from website.mails import NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST from framework.auth import Auth @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestNodeRequestListInstitutionalAccess(NodeRequestTestMixin): @pytest.fixture() @@ -208,85 +206,37 @@ def test_institutional_admin_unauth_institution(self, app, project, institution_ assert res.status_code == 403 assert 'Institutional request access is not enabled.' in res.json['errors'][0]['detail'] - @mock.patch('api.requests.serializers.execute_email_send') - @mock.patch('osf.utils.machines.mails.execute_email_send') - def test_email_send_institutional_request_specific_email( - self, - mock_send_mail_machines, - mock_send_mail_serializers, - user_with_affiliation, - app, - project, - url, - create_payload, - institutional_admin, - institution - ): - """ - Test that the institutional request triggers email notifications to appropriate recipients. - """ - # Set up mock behaviors - project.is_public = True - project.save() - - # Perform the action - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) - - # Ensure response is successful - assert res.status_code == 201 - - assert mock_send_mail_serializers.call_count == 1 - assert mock_send_mail_machines.call_count == 0 - - # Check calls for osf.utils.machines.mails.send_mail - mock_send_mail_serializers.assert_called_once_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) - - @mock.patch('api.requests.serializers.execute_email_send') - def test_email_not_sent_without_recipient(self, mock_mail, app, project, institutional_admin, url, + def test_email_not_sent_without_recipient(self, mock_send_grid, app, project, institutional_admin, url, create_payload, institution): """ Test that an email is not sent when no recipient is listed when an institutional access request is made, but the request is still made anyway without email. """ del create_payload['data']['relationships']['message_recipient'] + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 # Check that an email is sent - assert not mock_mail.called + assert not mock_send_grid.called - @mock.patch('api.requests.serializers.execute_email_send') - def test_email_not_sent_outside_institution(self, mock_mail, app, project, institutional_admin, url, + def test_email_not_sent_outside_institution(self, mock_send_grid, app, project, institutional_admin, url, create_payload, user_without_affiliation, institution): """ Test that you are prevented from requesting a user with the correct institutional affiliation. """ create_payload['data']['relationships']['message_recipient']['data']['id'] = user_without_affiliation._id + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth, expect_errors=True) assert res.status_code == 403 assert f'User {user_without_affiliation._id} is not affiliated with the institution.' in res.json['errors'][0]['detail'] # Check that an email is sent - assert not mock_mail.called + assert not mock_send_grid.called - @mock.patch('api.requests.serializers.execute_email_send') def test_email_sent_on_creation( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -298,31 +248,15 @@ def test_email_sent_on_creation( """ Test that an email is sent to the appropriate recipients when an institutional access request is made. """ + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert mock_send_grid.call_count == 1 - @mock.patch('api.requests.serializers.execute_email_send') def test_bcc_institutional_admin( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -335,32 +269,15 @@ def test_bcc_institutional_admin( Ensure BCC option works as expected, sending messages to sender giving them a copy for themselves. """ create_payload['data']['attributes']['bcc_sender'] = True - + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=[institutional_admin.username], - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert mock_send_grid.call_count == 1 - @mock.patch('api.requests.serializers.execute_email_send') def test_reply_to_institutional_admin( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -373,27 +290,11 @@ def test_reply_to_institutional_admin( Ensure reply-to option works as expected, allowing a reply to header be added to the email. """ create_payload['data']['attributes']['reply_to'] = True - + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 - - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=institutional_admin.username, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': create_payload['data']['attributes']['comment'], - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + assert mock_send_grid.call_count == 1 def test_access_requests_disabled_raises_permission_denied( self, app, node_with_disabled_access_requests, user_with_affiliation, institutional_admin, create_payload @@ -410,10 +311,9 @@ def test_access_requests_disabled_raises_permission_denied( assert res.status_code == 403 assert f"{node_with_disabled_access_requests._id} does not have Access Requests enabled" in res.json['errors'][0]['detail'] - @mock.patch('api.requests.serializers.execute_email_send') def test_placeholder_text_when_comment_is_empty( self, - mock_mail, + mock_send_grid, app, project, institutional_admin, @@ -427,24 +327,11 @@ def test_placeholder_text_when_comment_is_empty( """ # Test with empty comment create_payload['data']['attributes']['comment'] = '' + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - mock_mail.assert_called_with( - to_addr=user_with_affiliation.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - bcc_addr=None, - reply_to=None, - **{ - 'sender': institutional_admin, - 'recipient': user_with_affiliation, - 'comment': language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT, - 'institution': institution, - 'osf_url': mock.ANY, - 'node': project, - } - ) + mock_send_grid.assert_called() def test_requester_can_resubmit(self, app, project, institutional_admin, url, create_payload): """ diff --git a/api_tests/requests/views/test_node_request_list.py b/api_tests/requests/views/test_node_request_list.py index 4396d524f6e..41ee66747d4 100644 --- a/api_tests/requests/views/test_node_request_list.py +++ b/api_tests/requests/views/test_node_request_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -9,6 +8,7 @@ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestNodeRequestListCreate(NodeRequestTestMixin): @pytest.fixture() def url(self, project): @@ -80,25 +80,25 @@ def test_requests_disabled_list(self, app, url, create_payload, project, admin): res = app.get(url, create_payload, auth=admin.auth, expect_errors=True) assert res.status_code == 403 - @mock.patch('website.mails.mails.execute_email_send') - def test_email_sent_to_all_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): + def test_email_sent_to_all_admins_on_submit(self, mock_send_grid, app, project, noncontrib, url, create_payload, second_admin): project.is_public = True project.save() + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - @mock.patch('website.mails.mails.execute_email_send') - def test_email_not_sent_to_parent_admins_on_submit(self, mock_mail, app, project, noncontrib, url, create_payload, second_admin): + def test_email_not_sent_to_parent_admins_on_submit(self, mock_send_grid, app, project, noncontrib, url, create_payload, second_admin): component = NodeFactory(parent=project, creator=second_admin) component.is_public = True project.save() url = f'/{API_BASE}nodes/{component._id}/requests/' + mock_send_grid.reset_mock() res = app.post_json_api(url, create_payload, auth=noncontrib.auth) assert res.status_code == 201 assert component.parent_admin_contributors.count() == 1 assert component.contributors.count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_request_followed_by_added_as_contrib(elf, app, project, noncontrib, admin, url, create_payload): res = app.post_json_api(url, create_payload, auth=noncontrib.auth) diff --git a/api_tests/requests/views/test_preprint_request_list.py b/api_tests/requests/views/test_preprint_request_list.py index 3ff21c73ec1..72e16862f7a 100644 --- a/api_tests/requests/views/test_preprint_request_list.py +++ b/api_tests/requests/views/test_preprint_request_list.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -6,6 +5,7 @@ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestPreprintRequestListCreate(PreprintRequestTestMixin): def url(self, preprint): return f'/{API_BASE}preprints/{preprint._id}/requests/' @@ -65,8 +65,7 @@ def test_requester_cannot_submit_again(self, app, admin, create_payload, pre_mod assert res.json['errors'][0]['detail'] == 'Users may not have more than one withdrawal request per preprint.' @pytest.mark.skip('TODO: IN-284 -- add emails') - @mock.patch('website.reviews.listeners.mails.execute_email_send') - def test_email_sent_to_moderators_on_submit(self, mock_mail, app, admin, create_payload, moderator, post_mod_preprint): + def test_email_sent_to_moderators_on_submit(self, mock_send_grid, app, admin, create_payload, moderator, post_mod_preprint): res = app.post_json_api(self.url(post_mod_preprint), create_payload, auth=admin.auth) assert res.status_code == 201 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index 05404e5c0bb..30e579d3ab3 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.base.settings.defaults import API_BASE @@ -8,6 +7,7 @@ @pytest.mark.django_db @pytest.mark.enable_enqueue_task +@pytest.mark.usefixtures('mock_send_grid') class TestCreateNodeRequestAction(NodeRequestTestMixin): @pytest.fixture() def url(self, node_request): @@ -190,8 +190,8 @@ def test_rejects_fail_with_requests_disabled(self, app, admin, url, node_request assert initial_state == node_request.machine_state assert node_request.creator not in node_request.target.contributors - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): + def test_email_sent_on_approve(self, mock_send_grid, app, admin, url, node_request): + mock_send_grid.reset_mock() initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='accept') @@ -200,10 +200,10 @@ def test_email_sent_on_approve(self, mock_mail, app, admin, url, node_request): node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator in node_request.target.contributors - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('website.mails.mails.execute_email_send') - def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): + def test_email_sent_on_reject(self, mock_send_grid, app, admin, url, node_request): + mock_send_grid.reset_mock() initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='reject') @@ -212,10 +212,10 @@ def test_email_sent_on_reject(self, mock_mail, app, admin, url, node_request): node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator not in node_request.target.contributors - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('website.mails.mails.execute_email_send') - def test_email_not_sent_on_reject(self, mock_mail, app, requester, url, node_request): + def test_email_not_sent_on_reject(self, mock_send_grid, app, requester, url, node_request): + mock_send_grid.reset_mock() initial_state = node_request.machine_state initial_comment = node_request.comment payload = self.create_payload(node_request._id, trigger='edit_comment', comment='ASDFG') @@ -224,7 +224,7 @@ def test_email_not_sent_on_reject(self, mock_mail, app, requester, url, node_req node_request.reload() assert initial_state == node_request.machine_state assert initial_comment != node_request.comment - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 def test_set_permissions_on_approve(self, app, admin, url, node_request): assert node_request.creator not in node_request.target.contributors @@ -255,6 +255,7 @@ def test_accept_request_defaults_to_read_and_visible(self, app, admin, url, node @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCreatePreprintRequestAction(PreprintRequestTestMixin): @pytest.fixture() def url(self, pre_request, post_request, none_request): @@ -384,8 +385,8 @@ def test_write_contrib_and_noncontrib_cannot_edit_comment(self, app, write_contr assert initial_state == request.machine_state assert initial_comment == request.comment - @mock.patch('website.reviews.listeners.mails.execute_email_send') - def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_sent_on_approve(self, mock_send_grid, app, moderator, url, pre_request, post_request): + mock_send_grid.reset_mock() for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -397,11 +398,10 @@ def test_email_sent_on_approve(self, mock_mail, app, moderator, url, pre_request assert initial_state != request.machine_state assert request.target.is_retracted # There are two preprints withdrawn and each preprint have 2 contributors. So 4 emails are sent in total. - assert mock_mail.call_count == 4 + assert mock_send_grid.call_count == 4 @pytest.mark.skip('TODO: IN-331 -- add emails') - @mock.patch('website.reviews.listeners.mails.execute_email_send') - def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_sent_on_reject(self, mock_send_grid, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -411,11 +411,10 @@ def test_email_sent_on_reject(self, mock_mail, app, moderator, url, pre_request, request.reload() assert initial_state != request.machine_state assert not request.target.is_retracted - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 @pytest.mark.skip('TODO: IN-284/331 -- add emails') - @mock.patch('website.reviews.listeners.mails.execute_email_send') - def test_email_not_sent_on_edit_comment(self, mock_mail, app, moderator, url, pre_request, post_request): + def test_email_not_sent_on_edit_comment(self, mock_send_grid, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted @@ -425,7 +424,7 @@ def test_email_not_sent_on_edit_comment(self, mock_mail, app, moderator, url, pr request.reload() assert initial_state != request.machine_state assert not request.target.is_retracted - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 def test_auto_approve(self, app, auto_withdrawable_pre_mod_preprint, auto_approved_pre_request): assert auto_withdrawable_pre_mod_preprint.is_retracted diff --git a/api_tests/users/views/test_user_claim.py b/api_tests/users/views/test_user_claim.py index fbe273872f4..0e265021c5c 100644 --- a/api_tests/users/views/test_user_claim.py +++ b/api_tests/users/views/test_user_claim.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from django.utils import timezone @@ -13,13 +12,9 @@ ) @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestClaimUser: - @pytest.fixture - def mock_mail(self): - with mock.patch('website.project.views.contributor.mails.execute_email_send') as patch: - yield patch - @pytest.fixture() def referrer(self): return AuthUserFactory() @@ -121,37 +116,41 @@ def test_claim_unauth_failure(self, app, url, unreg_user, project, wrong_preprin ) assert res.status_code == 401 - def test_claim_unauth_success_with_original_email(self, app, url, project, unreg_user, mock_mail): + def test_claim_unauth_success_with_original_email(self, app, url, project, unreg_user, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email='david@david.son', id=project._id), ) assert res.status_code == 204 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - def test_claim_unauth_success_with_claimer_email(self, app, url, unreg_user, project, claimer, mock_mail): + def test_claim_unauth_success_with_claimer_email(self, app, url, unreg_user, project, claimer, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email=claimer.username, id=project._id) ) assert res.status_code == 204 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - def test_claim_unauth_success_with_unknown_email(self, app, url, project, unreg_user, mock_mail): + def test_claim_unauth_success_with_unknown_email(self, app, url, project, unreg_user, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email='asdf@fdsa.com', id=project._id), ) assert res.status_code == 204 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - def test_claim_unauth_success_with_preprint_id(self, app, url, preprint, unreg_user, mock_mail): + def test_claim_unauth_success_with_preprint_id(self, app, url, preprint, unreg_user, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(email='david@david.son', id=preprint._id), ) assert res.status_code == 204 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 def test_claim_auth_failure(self, app, url, claimer, wrong_preprint, project, unreg_user, referrer): _url = url.format(unreg_user._id) @@ -210,9 +209,10 @@ def test_claim_auth_failure(self, app, url, claimer, wrong_preprint, project, un ) assert res.status_code == 403 - def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project, mock_mail): + def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project, mock_send_grid): unreg_user.unclaimed_records[project._id]['last_sent'] = timezone.now() unreg_user.save() + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(id=project._id), @@ -221,13 +221,14 @@ def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project, ) assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'User account can only be claimed with an existing user once every 24 hours' - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - def test_claim_auth_success(self, app, url, claimer, unreg_user, project, mock_mail): + def test_claim_auth_success(self, app, url, claimer, unreg_user, project, mock_send_grid): + mock_send_grid.reset_mock() res = app.post_json_api( url.format(unreg_user._id), self.payload(id=project._id), auth=claimer.auth ) assert res.status_code == 204 - assert mock_mail.call_count == 2 + assert mock_send_grid.call_count == 2 diff --git a/api_tests/users/views/test_user_list.py b/api_tests/users/views/test_user_list.py index 04b2f644171..32cc69758d4 100644 --- a/api_tests/users/views/test_user_list.py +++ b/api_tests/users/views/test_user_list.py @@ -246,6 +246,7 @@ def test_users_list_filter_multiple_fields_with_bad_filter( @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUsersCreate: @pytest.fixture() @@ -277,9 +278,8 @@ def tearDown(self, app): app.reset() # clears cookies OSFUser.remove() - @mock.patch('framework.auth.views.mails.execute_email_send') def test_logged_in_user_with_basic_auth_cannot_create_other_user_or_send_mail( - self, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_send_grid, app, user, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 res = app.post_json_api( f'{url_base}?send_email=true', @@ -290,11 +290,10 @@ def test_logged_in_user_with_basic_auth_cannot_create_other_user_or_send_mail( assert res.status_code == 403 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') def test_logged_out_user_cannot_create_other_user_or_send_mail( - self, mock_mail, app, email_unconfirmed, data, url_base): + self, mock_send_grid, app, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 res = app.post_json_api( f'{url_base}?send_email=true', @@ -304,12 +303,11 @@ def test_logged_out_user_cannot_create_other_user_or_send_mail( assert res.status_code == 401 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.execute_email_send') def test_cookied_requests_can_create_and_email( - self, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_send_grid, app, user, email_unconfirmed, data, url_base): # NOTE: skipped tests are not tested during session refactor, only updated to fix import session = SessionStore() session['auth_user_id'] = user._id @@ -324,17 +322,16 @@ def test_cookied_requests_can_create_and_email( ) assert res.status_code == 201 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_can_create_and_send_email( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -364,17 +361,16 @@ def test_properly_scoped_token_can_create_and_send_email( assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_does_not_send_email_without_kwarg( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -406,17 +402,16 @@ def test_properly_scoped_token_does_not_send_email_without_kwarg( assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_can_create_without_username_but_not_send_email( - self, mock_auth, mock_mail, app, user, data, url_base): + self, mock_auth, mock_send_grid, app, user, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -452,12 +447,11 @@ def test_properly_scoped_token_can_create_without_username_but_not_send_email( except ValueError: raise AssertionError('Username is not a valid UUID') assert OSFUser.objects.filter(fullname='No Email').count() == 1 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') def test_improperly_scoped_token_can_not_create_or_email( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Unauthorized Token', @@ -489,17 +483,16 @@ def test_improperly_scoped_token_can_not_create_or_email( assert res.status_code == 403 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') # TODO: Remove when available outside of DEV_MODE @unittest.skipIf( not settings.DEV_MODE, 'DEV_MODE disabled, osf.admin unavailable') def test_admin_scoped_token_can_create_and_send_email( - self, mock_auth, mock_mail, app, user, email_unconfirmed, data, url_base): + self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Admin Token', @@ -529,4 +522,4 @@ def test_admin_scoped_token_can_create_and_send_email( assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 diff --git a/api_tests/users/views/test_user_message_institutional_access.py b/api_tests/users/views/test_user_message_institutional_access.py index ef507cacbb3..2f60c4ae726 100644 --- a/api_tests/users/views/test_user_message_institutional_access.py +++ b/api_tests/users/views/test_user_message_institutional_access.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from osf.models.user_message import MessageTypes, UserMessage from api.base.settings.defaults import API_BASE @@ -6,11 +5,11 @@ AuthUserFactory, InstitutionFactory ) -from website.mails import USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST from webtest import AppError @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUserMessageInstitutionalAccess: """ Tests for `UserMessage`. @@ -85,12 +84,10 @@ def payload(self, institution, user): } } - @mock.patch('osf.models.user_message.execute_email_send') - def test_institutional_admin_can_create_message(self, mock_send_mail, app, institutional_admin, institution, url_with_affiliation, payload): + def test_institutional_admin_can_create_message(self, mock_send_grid, app, institutional_admin, institution, url_with_affiliation, payload): """ Ensure an institutional admin can create a `UserMessage` with a `message` and `institution`. """ - mock_send_mail.return_value = mock.MagicMock() res = app.post_json_api( url_with_affiliation, @@ -105,19 +102,16 @@ def test_institutional_admin_can_create_message(self, mock_send_mail, app, insti assert user_message.message_text == payload['data']['attributes']['message_text'] assert user_message.institution == institution - mock_send_mail.assert_called_once() - assert mock_send_mail.call_args[1]['to_addr'] == user_message.recipient.username - assert 'Requesting user access for collaboration' in mock_send_mail.call_args[1]['message_text'] + mock_send_grid.assert_called_once() + assert mock_send_grid.call_args[1]['to_addr'] == user_message.recipient.username assert user_message._id == data['id'] - @mock.patch('osf.models.user_message.execute_email_send') - def test_institutional_admin_can_not_create_message(self, mock_send_mail, app, institutional_admin_on_institution_without_access, + def test_institutional_admin_can_not_create_message(self, mock_send_grid, app, institutional_admin_on_institution_without_access, institution_without_access, url_with_affiliation_on_institution_without_access, payload): """ Ensure an institutional admin cannot create a `UserMessage` with a `message` and `institution` witch has 'institutional_request_access_enabled' as False """ - mock_send_mail.return_value = mock.MagicMock() # Use pytest.raises to explicitly expect the 403 error with pytest.raises(AppError) as exc_info: @@ -197,10 +191,9 @@ def test_admin_cannot_message_user_outside_institution( assert ('Cannot send to a recipient that is not affiliated with the provided institution.' in res.json['errors'][0]['detail']['user']) - @mock.patch('osf.models.user_message.execute_email_send') def test_cc_institutional_admin( self, - mock_send_mail, + mock_send_grid, app, institutional_admin, institution, @@ -211,7 +204,6 @@ def test_cc_institutional_admin( """ Ensure CC option works as expected, sending messages to all institutional admins except the sender. """ - mock_send_mail.return_value = mock.MagicMock() # Enable CC in the payload payload['data']['attributes']['bcc_sender'] = True @@ -227,20 +219,9 @@ def test_cc_institutional_admin( assert user_message.is_sender_BCCed # Two emails are sent during the CC but this is how the mock works `send_email` is called once. - mock_send_mail.assert_called_once_with( - to_addr=user_with_affiliation.username, - bcc_addr=[institutional_admin.username], - reply_to=None, - message_text='Requesting user access for collaboration', - mail=USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - sender=institutional_admin, - recipient=user_with_affiliation, - institution=institution, - ) + assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username - @mock.patch('osf.models.user_message.execute_email_send') - def test_cc_field_defaults_to_false(self, mock_send_mail, app, institutional_admin, url_with_affiliation, user_with_affiliation, institution, payload): + def test_cc_field_defaults_to_false(self, mock_send_grid, app, institutional_admin, url_with_affiliation, user_with_affiliation, institution, payload): """ Ensure the `cc` field defaults to `false` when not provided in the payload. """ @@ -249,20 +230,10 @@ def test_cc_field_defaults_to_false(self, mock_send_mail, app, institutional_adm user_message = UserMessage.objects.get(sender=institutional_admin) assert user_message.message_text == payload['data']['attributes']['message_text'] - mock_send_mail.assert_called_once_with( - to_addr=user_with_affiliation.username, - bcc_addr=None, - reply_to=None, - message_text='Requesting user access for collaboration', - mail=USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - sender=institutional_admin, - recipient=user_with_affiliation, - institution=institution, - ) - @mock.patch('osf.models.user_message.execute_email_send') - def test_reply_to_header_set(self, mock_send_mail, app, institutional_admin, user_with_affiliation, institution, url_with_affiliation, payload): + assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username + + def test_reply_to_header_set(self, mock_send_grid, app, institutional_admin, user_with_affiliation, institution, url_with_affiliation, payload): """ Ensure that the 'Reply-To' header is correctly set to the sender's email address. """ @@ -275,14 +246,4 @@ def test_reply_to_header_set(self, mock_send_mail, app, institutional_admin, use ) assert res.status_code == 201 - mock_send_mail.assert_called_once_with( - to_addr=user_with_affiliation.username, - bcc_addr=None, - reply_to=institutional_admin.username, - message_text='Requesting user access for collaboration', - mail=USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST, - user=user_with_affiliation, - sender=institutional_admin, - recipient=user_with_affiliation, - institution=institution, - ) + assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index 7e7f6284edb..ec60c1f4c3d 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -12,7 +12,6 @@ from django.middleware import csrf from osf.models import Email, NotableDomain from framework.auth.views import auth_email_logout -from website import mails, settings @pytest.fixture() def user_one(): @@ -29,6 +28,7 @@ def unconfirmed_address(): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUserRequestExport: @pytest.fixture() @@ -48,8 +48,7 @@ def test_get(self, app, user_one, url): res = app.get(url, auth=user_one.auth, expect_errors=True) assert res.status_code == 405 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_post(self, mock_mail, app, user_one, user_two, url, payload): + def test_post(self, mock_send_grid, app, user_one, user_two, url, payload): # Logged out res = app.post_json_api(url, payload, expect_errors=True) assert res.status_code == 401 @@ -64,20 +63,18 @@ def test_post(self, mock_mail, app, user_one, user_two, url, payload): assert res.status_code == 204 user_one.reload() assert user_one.email_last_sent is not None - assert mock_mail.call_count == 1 + assert mock_send_grid.call_count == 1 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_post_invalid_type(self, mock_mail, app, user_one, url, payload): + def test_post_invalid_type(self, mock_send_grid, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' res = app.post_json_api(url, payload, auth=user_one.auth, expect_errors=True) assert res.status_code == 409 user_one.reload() assert user_one.email_last_sent is None - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_exceed_throttle(self, mock_mail, app, user_one, url, payload): + def test_exceed_throttle(self, app, user_one, url, payload): assert user_one.email_last_sent is None res = app.post_json_api(url, payload, auth=user_one.auth) assert res.status_code == 204 @@ -171,6 +168,7 @@ def test_multiple_errors(self, app, user_one, url, payload): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestResetPassword: @pytest.fixture() @@ -189,27 +187,20 @@ def url(self): def csrf_token(self): return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) - def test_get(self, app, url, user_one): + def test_get(self, mock_send_grid, app, url, user_one): encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' - with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: - res = app.get(url) - assert res.status_code == 200 + res = app.get(url) + assert res.status_code == 200 - user_one.reload() - mock_send_mail.assert_called_with( - to_addr=user_one.username, - mail=mails.FORGOT_PASSWORD, - reset_link=f'{settings.RESET_PASSWORD_URL}{user_one._id}/{user_one.verification_key_v2['token']}/', - can_change_preferences=False, - ) + user_one.reload() + assert mock_send_grid.call_args[1]['to_addr'] == user_one.username - def test_get_invalid_email(self, app, url): + def test_get_invalid_email(self, mock_send_grid, app, url): url = f'{url}?email={'invalid_email'}' - with mock.patch.object(mails, 'execute_email_send', return_value=None) as mock_send_mail: - res = app.get(url) - assert res.status_code == 200 - assert not mock_send_mail.called + res = app.get(url) + assert res.status_code == 200 + assert not mock_send_grid.called def test_post(self, app, url, user_one, csrf_token): app.set_cookie(CSRF_COOKIE_NAME, csrf_token) diff --git a/api_tests/users/views/test_user_settings_detail.py b/api_tests/users/views/test_user_settings_detail.py index c26a4628d56..cc02e6ae145 100644 --- a/api_tests/users/views/test_user_settings_detail.py +++ b/api_tests/users/views/test_user_settings_detail.py @@ -227,6 +227,7 @@ def test_unauthorized_patch_403(self, app, url, payload, user_two): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUpdateRequestedDeactivation: @pytest.fixture() @@ -241,8 +242,7 @@ def payload(self, user_one): } } - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_patch_requested_deactivation(self, mock_mail, app, user_one, user_two, url, payload): + def test_patch_requested_deactivation(self, app, user_one, user_two, url, payload): # Logged out res = app.patch_json_api(url, payload, expect_errors=True) assert res.status_code == 401 @@ -271,18 +271,16 @@ def test_patch_requested_deactivation(self, mock_mail, app, user_one, user_two, user_one.reload() assert user_one.requested_deactivation is False - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_patch_invalid_type(self, mock_mail, app, user_one, url, payload): + def test_patch_invalid_type(self, mock_send_grid, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' res = app.patch_json_api(url, payload, auth=user_one.auth, expect_errors=True) assert res.status_code == 409 user_one.reload() assert user_one.email_last_sent is None - assert mock_mail.call_count == 0 + assert mock_send_grid.call_count == 0 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_exceed_throttle(self, mock_mail, app, user_one, url, payload): + def test_exceed_throttle(self, app, user_one, url, payload): assert user_one.email_last_sent is None res = app.patch_json_api(url, payload, auth=user_one.auth) assert res.status_code == 200 diff --git a/conftest.py b/conftest.py index 6f870093ed4..2270f7e7d16 100644 --- a/conftest.py +++ b/conftest.py @@ -357,3 +357,20 @@ def helpful_thing(self): ``` """ yield from rolledback_transaction('function_transaction') + + +@pytest.fixture() +def mock_send_grid(): + with mock.patch.object(website_settings, 'USE_EMAIL', True): + with mock.patch.object(website_settings, 'USE_CELERY', False): + with mock.patch('framework.email.tasks.send_email') as mock_sendgrid: + mock_sendgrid.return_value = True + yield mock_sendgrid + + +def start_mock_send_grid(test_case): + patcher = mock.patch('framework.email.tasks.send_email') + mocked_send = patcher.start() + test_case.addCleanup(patcher.stop) + mocked_send.return_value = True + return mocked_send diff --git a/osf_tests/management_commands/test_check_crossref_dois.py b/osf_tests/management_commands/test_check_crossref_dois.py index df7410c2c21..993c7e6731e 100644 --- a/osf_tests/management_commands/test_check_crossref_dois.py +++ b/osf_tests/management_commands/test_check_crossref_dois.py @@ -8,12 +8,13 @@ from osf_tests.factories import PreprintFactory -from website import settings, mails +from website import settings from osf.management.commands.check_crossref_dois import check_crossref_dois, report_stuck_dois @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCheckCrossrefDOIs: @pytest.fixture() @@ -60,15 +61,7 @@ def test_check_crossref_dois(self, crossref_response, stuck_preprint, preprint): assert stuck_preprint.identifiers.count() == 1 assert stuck_preprint.identifiers.first().value == doi - @mock.patch('website.mails.execute_email_send') - def test_report_stuck_dois(self, mock_email, stuck_preprint): + def test_report_stuck_dois(self, mock_send_grid, stuck_preprint): report_stuck_dois(dry_run=False) - guid = stuck_preprint.guids.first()._id - mock_email.assert_called_with( - guids=guid, - time_since_published=2, - mail=mails.CROSSREF_DOIS_PENDING, - pending_doi_count=1, - to_addr=settings.OSF_SUPPORT_EMAIL - ) + mock_send_grid.assert_called() diff --git a/osf_tests/management_commands/test_email_all_users.py b/osf_tests/management_commands/test_email_all_users.py index f9477cf93de..14df656ee52 100644 --- a/osf_tests/management_commands/test_email_all_users.py +++ b/osf_tests/management_commands/test_email_all_users.py @@ -1,13 +1,12 @@ -from unittest import mock import pytest from django.utils import timezone from osf_tests.factories import UserFactory -from website import mails from osf.management.commands.email_all_users import email_all_users +@pytest.mark.usefixtures('mock_send_grid') class TestEmailAllUsers: @pytest.fixture() @@ -42,32 +41,25 @@ def unregistered_user(self): return UserFactory(is_registered=False) @pytest.mark.django_db - @mock.patch('website.mails.execute_email_send') - def test_email_all_users_dry(self, mock_email, superuser): + def test_email_all_users_dry(self, mock_send_grid, superuser): email_all_users('TOU_NOTIF', dry_run=True) - mock_email.assert_called_with( - to_addr=superuser.email, - mail=mails.TOU_NOTIF, - given_name=superuser.given_name - ) + mock_send_grid.assert_called() @pytest.mark.django_db - @mock.patch('website.mails.execute_email_send') def test_dont_email_inactive_users( - self, mock_email, deleted_user, inactive_user, unconfirmed_user, unregistered_user): + self, mock_send_grid, deleted_user, inactive_user, unconfirmed_user, unregistered_user): email_all_users('TOU_NOTIF') - mock_email.assert_not_called() + mock_send_grid.assert_not_called() @pytest.mark.django_db - @mock.patch('website.mails.execute_email_send') - def test_email_all_users_offset(self, mock_email, user, user2): + def test_email_all_users_offset(self, mock_send_grid, user, user2): email_all_users('TOU_NOTIF', offset=1, start_id=0) email_all_users('TOU_NOTIF', offset=1, start_id=1) email_all_users('TOU_NOTIF', offset=1, start_id=2) - assert mock_email.call_count == 2 + assert mock_send_grid.call_count == 2 diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index eb8b9d6f6d9..bffffb75f05 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -7,7 +7,6 @@ from unittest import mock from django.utils import timezone from django.db import IntegrityError -from unittest.mock import call import pytest from framework.auth import Auth @@ -34,6 +33,7 @@ from tests.base import OsfTestCase, fake from tests import utils as test_utils from tests.utils import unique as _unique +from conftest import start_mock_send_grid pytestmark = pytest.mark.django_db @@ -715,10 +715,15 @@ def test_archive_success_same_file_in_component(self): assert child_reg._id in question['extra'][0]['viewUrl'] +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverUtils(ArchiverTestCase): - @mock.patch('website.mails.execute_email_send') - def test_handle_archive_fail(self, mock_send_mail): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + + def test_handle_archive_fail(self): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, self.src, @@ -726,13 +731,11 @@ def test_handle_archive_fail(self, mock_send_mail): self.user, {} ) - assert mock_send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 self.dst.reload() assert self.dst.is_deleted - @mock.patch('website.mails.execute_email_send') - def test_handle_archive_fail_copy(self, mock_send_mail): - url = settings.INTERNAL_DOMAIN + self.src._id + def test_handle_archive_fail_copy(self): archiver_utils.handle_archive_fail( ARCHIVER_NETWORK_ERROR, self.src, @@ -740,31 +743,9 @@ def test_handle_archive_fail_copy(self, mock_send_mail): self.user, {} ) - args_user = dict( - to_addr=self.user.username, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_COPY_ERROR_USER, - results={}, - can_change_preferences=False, - ) - args_desk = dict( - to_addr=settings.OSF_SUPPORT_EMAIL, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_COPY_ERROR_DESK, - results={}, - can_change_preferences=False, - url=url, - ) - mock_send_mail.assert_has_calls([ - call(**args_user), - call(**args_desk), - ], any_order=True) - - @mock.patch('website.mails.execute_email_send') - def test_handle_archive_fail_size(self, mock_send_mail): - url = settings.INTERNAL_DOMAIN + self.src._id + assert self.mock_send_grid.call_count == 2 + + def test_handle_archive_fail_size(self): archiver_utils.handle_archive_fail( ARCHIVER_SIZE_EXCEEDED, self.src, @@ -772,26 +753,7 @@ def test_handle_archive_fail_size(self, mock_send_mail): self.user, {} ) - args_user = dict( - to_addr=self.user.username, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_SIZE_EXCEEDED_USER, - can_change_preferences=False, - ) - args_desk = dict( - to_addr=settings.OSF_SUPPORT_EMAIL, - user=self.user, - src=self.src, - mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, - stat_result={}, - can_change_preferences=False, - url=url, - ) - mock_send_mail.assert_has_calls([ - call(**args_user), - call(**args_desk), - ], any_order=True) + assert self.mock_send_grid.call_count == 2 def test_aggregate_file_tree_metadata(self): a_stat_result = archiver_utils.aggregate_file_tree_metadata('dropbox', FILE_TREE, self.user) @@ -878,9 +840,14 @@ def test_get_file_map_memoization(self): archiver_utils.get_file_map(node) assert mock_get_file_tree.call_count == call_count - +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverListeners(ArchiverTestCase): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('website.archiver.tasks.archive') @mock.patch('website.archiver.utils.before_archive') def test_after_register(self, mock_before_archive, mock_archive): @@ -931,24 +898,21 @@ def test_archive_callback_pending(self, mock_delay): ARCHIVER_SUCCESS ) self.dst.archive_job.save() - with mock.patch('website.mails.execute_email_send') as mock_send: - with mock.patch('website.archiver.utils.handle_archive_fail') as mock_fail: - listeners.archive_callback(self.dst) - assert not mock_send.called + with mock.patch('website.archiver.utils.handle_archive_fail') as mock_fail: + listeners.archive_callback(self.dst) + assert not self.mock_send_grid.called assert not mock_fail.called assert mock_delay.called - @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archive_callback_done_success(self, mock_send, mock_archive_success): + def test_archive_callback_done_success(self, mock_archive_success): self.dst.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) self.dst.archive_job.save() listeners.archive_callback(self.dst) - assert mock_send.call_count == 1 + assert self.mock_send_grid.call_count == 0 - @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archive_callback_done_embargoed(self, mock_send, mock_archive_success): + def test_archive_callback_done_embargoed(self, mock_archive_success): end_date = timezone.now() + datetime.timedelta(days=30) self.dst.archive_job.meta = { 'embargo_urls': { @@ -960,7 +924,7 @@ def test_archive_callback_done_embargoed(self, mock_send, mock_archive_success): self.dst.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) self.dst.save() listeners.archive_callback(self.dst) - assert mock_send.call_count == 1 + assert self.mock_send_grid.call_count == 0 def test_archive_callback_done_errors(self): self.dst.archive_job.update_target('osfstorage', ARCHIVER_FAILURE) @@ -1037,9 +1001,8 @@ def test_archive_tree_finished_false_for_partial_archive(self): rsibling.save() assert not reg.archive_job.archive_tree_finished() - @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archive_callback_on_tree_sends_only_one_email(self, mock_send_success, mock_arhive_success): + def test_archive_callback_on_tree_sends_only_one_email(self, mock_arhive_success): proj = factories.NodeFactory() child = factories.NodeFactory(parent=proj) factories.NodeFactory(parent=child) @@ -1053,16 +1016,15 @@ def test_archive_callback_on_tree_sends_only_one_email(self, mock_send_success, rchild.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild.save() listeners.archive_callback(rchild) - assert not mock_send_success.called + assert not self.mock_send_grid.called reg.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) reg.save() listeners.archive_callback(reg) - assert not mock_send_success.called + assert not self.mock_send_grid.called rchild2.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild2.save() listeners.archive_callback(rchild2) - assert mock_send_success.call_count == 1 - assert mock_send_success.called + assert not self.mock_send_grid.called class TestArchiverScripts(ArchiverTestCase): @@ -1110,8 +1072,14 @@ def test_find_failed_registrations(self): assert pk not in failed +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverBehavior(OsfTestCase): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('osf.models.AbstractNode.update_search') def test_archiving_registrations_not_added_to_search_before_archival(self, mock_update_search): proj = factories.ProjectFactory() @@ -1120,9 +1088,8 @@ def test_archiving_registrations_not_added_to_search_before_archival(self, mock_ assert not mock_update_search.called @mock.patch('osf.models.AbstractNode.update_search') - @mock.patch('website.mails.execute_email_send') @mock.patch('website.archiver.tasks.archive_success.delay') - def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock_update_search, mock_send, mock_archive_success): + def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock_update_search, mock_archive_success): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj) reg.save() @@ -1135,8 +1102,7 @@ def test_archiving_nodes_added_to_search_on_archive_success_if_public(self, mock @pytest.mark.enable_search @mock.patch('website.search.elastic_search.delete_doc') - @mock.patch('website.mails.execute_email_send') - def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_send, mock_delete_index_node): + def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_delete_index_node): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj, archive=True) reg.save() @@ -1148,8 +1114,7 @@ def test_archiving_nodes_not_added_to_search_on_archive_failure(self, mock_send, assert mock_delete_index_node.called @mock.patch('osf.models.AbstractNode.update_search') - @mock.patch('website.mails.execute_email_send') - def test_archiving_nodes_not_added_to_search_on_archive_incomplete(self, mock_send, mock_update_search): + def test_archiving_nodes_not_added_to_search_on_archive_incomplete(self, mock_update_search): proj = factories.ProjectFactory() reg = factories.RegistrationFactory(project=proj) reg.save() diff --git a/osf_tests/test_collection.py b/osf_tests/test_collection.py index 3a8cf880520..c28dea3eb99 100644 --- a/osf_tests/test_collection.py +++ b/osf_tests/test_collection.py @@ -16,8 +16,6 @@ CollectionProviderFactory ) from osf.utils.workflows import CollectionSubmissionStates -from website.mails import mails -from osf.models.collection_submission import mails as collection_submission_mail pytestmark = pytest.mark.django_db @@ -73,6 +71,7 @@ def test_can_remove_root_folder_structure_without_cascading(self, user, auth): @pytest.mark.enable_bookmark_creation +@pytest.mark.usefixtures('mock_send_grid') class TestImplicitRemoval: @pytest.fixture @@ -127,32 +126,22 @@ def test_node_removed_from_collection_on_privacy_change(self, auth, collected_no assert associated_collections.filter(collection=bookmark_collection).exists() @mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits - def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provider_collected_node, bookmark_collection): + def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provider_collected_node, bookmark_collection, mock_send_grid): associated_collections = provider_collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - provider_collected_node.set_privacy('private', auth=auth) - assert mock_send.called - assert len(mock_send.call_args_list) == 1 - email1 = mock_send.call_args_list[0] - _, email1_kwargs = email1 - assert {email1_kwargs['node'].id} == {provider_collected_node.id} - expected_mail = mails.COLLECTION_SUBMISSION_REMOVED_PRIVATE(associated_collections.last().collection, provider_collected_node) - assert {email1_kwargs['mail'].tpl_prefix} == {expected_mail.tpl_prefix} + mock_send_grid.reset_mock() + provider_collected_node.set_privacy('private', auth=auth) + assert mock_send_grid.called + assert len(mock_send_grid.call_args_list) == 1 @mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits - def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection): + def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection, mock_send_grid): associated_collections = collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - collected_node.set_privacy('private', auth=auth) - assert not mock_send.called + collected_node.set_privacy('private', auth=auth) + assert not mock_send_grid.called def test_node_removed_from_collection_on_delete(self, collected_node, bookmark_collection, auth): associated_collections = collected_node.guids.first().collectionsubmission_set diff --git a/osf_tests/test_collection_submission.py b/osf_tests/test_collection_submission.py index fe99a65f751..2ff2b279a6b 100644 --- a/osf_tests/test_collection_submission.py +++ b/osf_tests/test_collection_submission.py @@ -13,9 +13,6 @@ from osf.utils.workflows import CollectionSubmissionStates from framework.exceptions import PermissionsError from api_tests.utils import UserRoles -from website.mails import mails -from osf_tests.utils import assert_notification_correctness -from osf.models.collection_submission import mails as collection_submission_mail from osf.management.commands.populate_collection_provider_notification_subscriptions import populate_collection_provider_notification_subscriptions from django.utils import timezone @@ -147,6 +144,7 @@ def configure_test_auth(node, user_role, provider=None): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestModeratedCollectionSubmission: MOCK_NOW = timezone.now() @@ -161,23 +159,15 @@ def test_submit(self, moderated_collection_submission): # .submit on post_save assert moderated_collection_submission.state == CollectionSubmissionStates.PENDING - def test_notify_contributors_pending(self, node, moderated_collection): - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - collection_submission = CollectionSubmission( - guid=node.guids.first(), - collection=moderated_collection, - creator=node.creator, - ) - collection_submission.save() - assert mock_send.called - assert collection_submission.state == CollectionSubmissionStates.PENDING - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_SUBMITTED(collection_submission.creator, node), - {user.username for user in node.contributors.all()} + def test_notify_contributors_pending(self, node, moderated_collection, mock_send_grid): + collection_submission = CollectionSubmission( + guid=node.guids.first(), + collection=moderated_collection, + creator=node.creator, ) + collection_submission.save() + assert mock_send_grid.called + assert collection_submission.state == CollectionSubmissionStates.PENDING def test_notify_moderators_pending(self, node, moderated_collection): from website.notifications import emails @@ -216,19 +206,11 @@ def test_accept_success(self, node, moderated_collection_submission): moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - def test_notify_moderated_accepted(self, node, moderated_collection_submission): + def test_notify_moderated_accepted(self, node, moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.accept(user=moderator, comment='Test Comment') - assert mock_send.called + moderated_collection_submission.accept(user=moderator, comment='Test Comment') + assert mock_send_grid.called assert moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_ACCEPTED(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) def test_reject_fails(self, node, user_role, moderated_collection_submission): @@ -242,20 +224,12 @@ def test_reject_success(self, node, moderated_collection_submission): moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - def test_notify_moderated_rejected(self, node, moderated_collection_submission): + def test_notify_moderated_rejected(self, node, moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.reject(user=moderator, comment='Test Comment') - assert mock_send.called - assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REJECTED(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + moderated_collection_submission.reject(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED @pytest.mark.parametrize('user_role', UserRoles.excluding(*[UserRoles.ADMIN_USER, UserRoles.MODERATOR])) def test_remove_fails(self, node, user_role, moderated_collection_submission): @@ -274,37 +248,21 @@ def test_remove_success(self, node, user_role, moderated_collection_submission): moderated_collection_submission.remove(user=user, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_moderator(self, node, moderated_collection_submission): + def test_notify_moderated_removed_moderator(self, node, moderated_collection_submission, mock_send_grid): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_MODERATOR(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, moderated_collection_submission): + def test_notify_moderated_removed_admin(self, node, moderated_collection_submission, mock_send_grid): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, moderated_collection_submission): user = configure_test_auth(node, UserRoles.ADMIN_USER) @@ -340,6 +298,7 @@ def test_cancel_succeeds(self, node, moderated_collection_submission): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUnmoderatedCollectionSubmission: def test_moderated_submit(self, unmoderated_collection_submission): @@ -377,21 +336,13 @@ def test_remove_success(self, user_role, node, unmoderated_collection_submission unmoderated_collection_submission.remove(user=user, comment='Test Comment') assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, unmoderated_collection_submission): + def test_notify_moderated_removed_admin(self, node, unmoderated_collection_submission, mock_send_grid): unmoderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - unmoderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(unmoderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + unmoderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, unmoderated_collection_submission): user = configure_test_auth(node, UserRoles.ADMIN_USER) @@ -427,6 +378,7 @@ def test_cancel_succeeds(self, node, unmoderated_collection_submission): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestHybridModeratedCollectionSubmission: @pytest.mark.parametrize('user_role', UserRoles.excluding(UserRoles.MODERATOR)) @@ -482,20 +434,12 @@ def test_accept_success(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - def test_notify_moderated_accepted(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_accepted(self, node, hybrid_moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_ACCEPTED(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) def test_reject_fails(self, node, user_role, hybrid_moderated_collection_submission): @@ -509,20 +453,12 @@ def test_reject_success(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - def test_notify_moderated_rejected(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_rejected(self, node, hybrid_moderated_collection_submission, mock_send_grid): moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REJECTED(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED @pytest.mark.parametrize('user_role', UserRoles.excluding(*[UserRoles.ADMIN_USER, UserRoles.MODERATOR])) def test_remove_fails(self, node, user_role, hybrid_moderated_collection_submission): @@ -541,37 +477,21 @@ def test_remove_success(self, node, user_role, hybrid_moderated_collection_submi hybrid_moderated_collection_submission.remove(user=user, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collection_submission, mock_send_grid): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_MODERATOR(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, hybrid_moderated_collection_submission): + def test_notify_moderated_removed_admin(self, node, hybrid_moderated_collection_submission, mock_send_grid): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - send_mail = mails.execute_email_send - with mock.patch.object(collection_submission_mail, 'execute_email_send') as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send.called - assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - assert_notification_correctness( - mock_send, - mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(hybrid_moderated_collection_submission.collection, node), - {user.username for user in node.contributors.all()} - ) + hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert mock_send_grid.called + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, hybrid_moderated_collection_submission): user = configure_test_auth(node, UserRoles.ADMIN_USER) diff --git a/osf_tests/test_institution.py b/osf_tests/test_institution.py index 449d35b17a5..eca6737b6e5 100644 --- a/osf_tests/test_institution.py +++ b/osf_tests/test_institution.py @@ -12,7 +12,6 @@ RegionFactory, UserFactory, ) -from website import mails, settings @pytest.mark.django_db @@ -110,6 +109,7 @@ def test_non_group_member_doesnt_have_perms(self, institution, user): @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestInstitutionManager: def test_deactivated_institution_not_in_default_queryset(self): @@ -146,9 +146,7 @@ def test_reactivate_institution(self): institution.reactivate() assert institution.deactivated is None - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) - def test_send_deactivation_email_call_count(self, mock_send_mail): + def test_send_deactivation_email_call_count(self, mock_send_grid): institution = InstitutionFactory() user_1 = UserFactory() user_1.add_or_update_affiliated_institution(institution) @@ -157,24 +155,15 @@ def test_send_deactivation_email_call_count(self, mock_send_mail): user_2.add_or_update_affiliated_institution(institution) user_2.save() institution._send_deactivation_email() - assert mock_send_mail.call_count == 2 + assert mock_send_grid.call_count == 2 - @mock.patch('website.mails.settings.USE_EMAIL', False) - @mock.patch('website.mails.execute_email_send', return_value=None, side_effect=mails.execute_email_send) - def test_send_deactivation_email_call_args(self, mock_send_mail): + def test_send_deactivation_email_call_args(self, mock_send_grid): institution = InstitutionFactory() user = UserFactory() user.add_or_update_affiliated_institution(institution) user.save() institution._send_deactivation_email() - forgot_password = 'forgotpassword' if settings.DOMAIN.endswith('/') else '/forgotpassword' - mock_send_mail.assert_called_with( - to_addr=user.username, - mail=mails.INSTITUTION_DEACTIVATION, - user=user, - forgot_password_link=f'{settings.DOMAIN}{forgot_password}', - osf_support_email=settings.OSF_SUPPORT_EMAIL - ) + mock_send_grid.assert_called() def test_deactivate_inactive_institution_noop(self): institution = InstitutionFactory() diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index 31ee4aa6d52..ee13c7bc107 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -24,18 +24,22 @@ from tests.utils import run_celery_tasks from waffle.testutils import override_flag from osf.features import ENABLE_GV +from conftest import start_mock_send_grid SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @pytest.mark.enable_implicit_clean @pytest.mark.enable_bookmark_creation +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestUserMerging(OsfTestCase): def setUp(self): super().setUp() self.user = UserFactory() with self.context: handlers.celery_before_request() + self.mock_send_grid = start_mock_send_grid(self) def _add_unconfirmed_user(self): self.unconfirmed = UnconfirmedUserFactory() @@ -286,12 +290,11 @@ def test_merge_unregistered(self): assert self.user.is_invited is True assert self.user in self.project_with_unreg_contrib.contributors - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_merge_doesnt_send_signal(self, mock_notify): + def test_merge_doesnt_send_signal(self): #Explictly reconnect signal as it is disconnected by default for test contributor_added.connect(notify_added_contributor) other_user = UserFactory() with override_flag(ENABLE_GV, active=True): self.user.merge_user(other_user) assert other_user.merged_by._id == self.user._id - assert mock_notify.called is False + assert self.mock_send_grid.called is False diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index 030ad52a849..ea594a1dddf 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -1311,8 +1311,7 @@ class TestContributorAddedSignal: def disconnected_signals(self): return None - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_add_contributors_sends_contributor_added_signal(self, mock_send_mail, node, auth): + def test_add_contributors_sends_contributor_added_signal(self, node, auth): user = UserFactory() contributors = [{ 'user': user, @@ -2222,11 +2221,10 @@ def test_check_spam_on_private_node(self, project, user): assert not project.is_public @pytest.mark.enable_enqueue_task - @mock.patch('website.mails.execute_email_send') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) @pytest.mark.skip('Technically still true, but skipping because mocking is outdated') - def test_check_spam_on_private_node_bans_new_spam_user(self, mock_send_mail, project, user): + def test_check_spam_on_private_node_bans_new_spam_user(self, project, user): project.is_public = False project.save() with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): @@ -2253,10 +2251,9 @@ def test_check_spam_on_private_node_bans_new_spam_user(self, mock_send_mail, pro project3.reload() assert project3.is_public is True - @mock.patch('website.mails.execute_email_send') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) - def test_check_spam_on_private_node_does_not_ban_existing_user(self, mock_send_mail, project, user): + def test_check_spam_on_private_node_does_not_ban_existing_user(self, project, user): project.is_public = False project.save() with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): diff --git a/osf_tests/test_queued_mail.py b/osf_tests/test_queued_mail.py index d9429d9d384..395b770a61d 100644 --- a/osf_tests/test_queued_mail.py +++ b/osf_tests/test_queued_mail.py @@ -3,7 +3,6 @@ import pytest -from unittest import mock from django.utils import timezone from waffle.testutils import override_switch @@ -35,28 +34,24 @@ def queue_mail(self, mail, user, send_at=None, **kwargs): ) return mail - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_no_login_presend_for_active_user(self, mock_mail, user): + def test_no_login_presend_for_active_user(self, user): mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() + dt.timedelta(seconds=10) user.save() assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_no_login_presend_for_inactive_user(self, mock_mail, user): + def test_no_login_presend_for_inactive_user(self, user): mail = self.queue_mail(mail=NO_LOGIN, user=user) user.date_last_login = timezone.now() - dt.timedelta(weeks=10) user.save() assert timezone.now() - dt.timedelta(days=1) > user.date_last_login assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_no_addon_presend(self, mock_mail, user): + def test_no_addon_presend(self, user): mail = self.queue_mail(mail=NO_ADDON, user=user) assert mail.send_mail() is True - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_new_public_project_presend_for_no_project(self, mock_mail, user): + def test_new_public_project_presend_for_no_project(self, user): mail = self.queue_mail( mail=NEW_PUBLIC_PROJECT, user=user, @@ -65,8 +60,7 @@ def test_new_public_project_presend_for_no_project(self, mock_mail, user): ) assert bool(mail.send_mail()) is False - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_new_public_project_presend_success(self, mock_mail, user): + def test_new_public_project_presend_success(self, user): node = NodeFactory(is_public=True) mail = self.queue_mail( mail=NEW_PUBLIC_PROJECT, @@ -76,8 +70,7 @@ def test_new_public_project_presend_success(self, mock_mail, user): ) assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_welcome_osf4m_presend(self, mock_mail, user): + def test_welcome_osf4m_presend(self, user): user.date_last_login = timezone.now() - dt.timedelta(days=13) user.save() mail = self.queue_mail( @@ -90,8 +83,7 @@ def test_welcome_osf4m_presend(self, mock_mail, user): assert bool(mail.send_mail()) is True assert mail.data['downloads'] == 0 - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_finding_other_emails_sent_to_user(self, mock_mail, user): + def test_finding_other_emails_sent_to_user(self, user): mail = self.queue_mail( user=user, mail=NO_ADDON, @@ -100,16 +92,14 @@ def test_finding_other_emails_sent_to_user(self, mock_mail, user): mail.send_mail() assert len(mail.find_sent_of_same_type_and_user()) == 1 - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_user_is_active(self, mock_mail, user): + def test_user_is_active(self, user): mail = self.queue_mail( user=user, mail=NO_ADDON, ) assert bool(mail.send_mail()) is True - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_user_is_not_active_no_password(self, mock_mail): + def test_user_is_not_active_no_password(self): user = UserFactory.build() user.set_unusable_password() user.save() @@ -119,8 +109,7 @@ def test_user_is_not_active_no_password(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_user_is_not_active_not_registered(self, mock_mail): + def test_user_is_not_active_not_registered(self): user = UserFactory(is_registered=False) mail = self.queue_mail( user=user, @@ -128,8 +117,7 @@ def test_user_is_not_active_not_registered(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_user_is_not_active_is_merged(self, mock_mail): + def test_user_is_not_active_is_merged(self): other_user = UserFactory() user = UserFactory(merged_by=other_user) mail = self.queue_mail( @@ -138,8 +126,7 @@ def test_user_is_not_active_is_merged(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_user_is_not_active_is_disabled(self, mock_mail): + def test_user_is_not_active_is_disabled(self): user = UserFactory(date_disabled=timezone.now()) mail = self.queue_mail( user=user, @@ -147,8 +134,7 @@ def test_user_is_not_active_is_disabled(self, mock_mail): ) assert mail.send_mail() is False - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_user_is_not_active_is_not_confirmed(self, mock_mail): + def test_user_is_not_active_is_not_confirmed(self): user = UserFactory(date_confirmed=None) mail = self.queue_mail( user=user, diff --git a/osf_tests/test_registration_moderation_notifications.py b/osf_tests/test_registration_moderation_notifications.py index 81659d79b08..100c15e64e1 100644 --- a/osf_tests/test_registration_moderation_notifications.py +++ b/osf_tests/test_registration_moderation_notifications.py @@ -9,7 +9,6 @@ from osf.migrations import update_provider_auth_groups from osf.models import Brand, NotificationDigest from osf.models.action import RegistrationAction -from osf.utils import machines from osf.utils.notifications import ( notify_submit, notify_accept_reject, @@ -25,9 +24,8 @@ RetractionFactory ) -from website import mails, settings +from website import settings from website.notifications import emails, tasks -from website.reviews import listeners def get_moderator(provider): @@ -46,9 +44,8 @@ def get_daily_moderator(provider): # Set USE_EMAIL to true and mock out the default mailer for consistency with other mocked settings -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.tasks.send_email', mock.MagicMock()) @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestRegistrationMachineNotification: MOCK_NOW = timezone.now() @@ -140,7 +137,7 @@ def withdraw_action(self, registration, admin): ) return registration_action - def test_submit_notifications(self, registration, moderator, admin, contrib, provider): + def test_submit_notifications(self, registration, moderator, admin, contrib, provider, mock_send_grid): """ [REQS-96] "As moderator of branded registry, I receive email notification upon admin author(s) submission approval" :param mock_email: @@ -150,50 +147,15 @@ def test_submit_notifications(self, registration, moderator, admin, contrib, pro # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call/args and also implicitly ensures # that the email acutally renders as normal in send_mail. - send_mail = mails.execute_email_send - with mock.patch.object(listeners.mails, 'execute_email_send', side_effect=send_mail) as mock_send_mail: - notify_submit(registration, admin) - - assert len(mock_send_mail.call_args_list) == 2 - admin_message, contrib_message = mock_send_mail.call_args_list + notify_submit(registration, admin) - assert admin_message == call( - admin.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_creator=True, - logo='osf_registries', - no_future_emails=[], - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_name=provider.name, - provider_url='http://localhost:5000/', - referrer=admin, - reviewable=registration, - user=admin, - workflow=None - ) + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert contrib_message == call( - contrib.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_creator=False, - logo='osf_registries', - no_future_emails=[], - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_name=provider.name, - provider_url='http://localhost:5000/', - referrer=admin, - reviewable=registration, - user=contrib, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Confirmation of your submission to OSF Registries' + assert contrib_message[1]['subject'] == 'Confirmation of your submission to OSF Registries' assert NotificationDigest.objects.count() == 1 digest = NotificationDigest.objects.last() @@ -365,7 +327,7 @@ def test_notify_moderator_registration_requests_withdrawal_notifications(self, m assert digest.event == 'new_pending_withdraw_requests' assert digest.provider == provider - def test_withdrawal_registration_accepted_notifications(self, registration_with_retraction, contrib, admin, withdraw_action): + def test_withdrawal_registration_accepted_notifications(self, registration_with_retraction, contrib, admin, withdraw_action, mock_send_grid): """ [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator decision" @@ -378,52 +340,17 @@ def test_withdrawal_registration_accepted_notifications(self, registration_with_ # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.execute_email_send - with mock.patch.object(machines.mails, 'execute_email_send', side_effect=send_mail) as mock_email: - notify_withdraw_registration(registration_with_retraction, withdraw_action) + notify_withdraw_registration(registration_with_retraction, withdraw_action) - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=True, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Your registration has been withdrawn' + assert contrib_message[1]['subject'] == 'Your registration has been withdrawn' - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=False, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) - - def test_withdrawal_registration_rejected_notifications(self, registration, contrib, admin, withdraw_request_action): + def test_withdrawal_registration_rejected_notifications(self, registration, contrib, admin, withdraw_request_action, mock_send_grid): """ [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator decision" @@ -436,46 +363,17 @@ def test_withdrawal_registration_rejected_notifications(self, registration, cont # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.execute_email_send - with mock.patch.object(machines.mails, 'execute_email_send', side_effect=send_mail) as mock_email: - notify_reject_withdraw_request(registration, withdraw_request_action) - - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list + notify_reject_withdraw_request(registration, withdraw_request_action) - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_DECLINED, - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_requester=True, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - workflow=None - ) + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_DECLINED, - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - is_requester=False, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Your withdrawal request has been declined' + assert contrib_message[1]['subject'] == 'Your withdrawal request has been declined' - def test_withdrawal_registration_force_notifications(self, registration_with_retraction, contrib, admin, withdraw_action): + def test_withdrawal_registration_force_notifications(self, registration_with_retraction, contrib, admin, withdraw_action, mock_send_grid): """ [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator decision" @@ -488,60 +386,25 @@ def test_withdrawal_registration_force_notifications(self, registration_with_ret # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.execute_email_send - with mock.patch.object(machines.mails, 'execute_email_send', side_effect=send_mail) as mock_email: - notify_withdraw_registration(registration_with_retraction, withdraw_action) + notify_withdraw_registration(registration_with_retraction, withdraw_action) - assert len(mock_email.call_args_list) == 2 - admin_message, contrib_message = mock_email.call_args_list + assert len(mock_send_grid.call_args_list) == 2 + admin_message, contrib_message = mock_send_grid.call_args_list - assert admin_message == call( - admin.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=admin, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=True, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) - - assert contrib_message == call( - contrib.email, - mails.WITHDRAWAL_REQUEST_GRANTED, - comment='yo', - contributor=contrib, - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration_with_retraction.draft_registration.get(), - is_requester=False, - force_withdrawal=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration_with_retraction, - workflow=None - ) + assert admin_message[1]['to_addr'] == admin.email + assert contrib_message[1]['to_addr'] == contrib.email + assert admin_message[1]['subject'] == 'Your registration has been withdrawn' + assert contrib_message[1]['subject'] == 'Your registration has been withdrawn' @pytest.mark.parametrize( 'digest_type, expected_recipient', [('email_transactional', get_moderator), ('email_digest', get_daily_moderator)] ) - def test_submissions_and_withdrawals_both_appear_in_moderator_digest(self, digest_type, expected_recipient, registration, admin, provider): + def test_submissions_and_withdrawals_both_appear_in_moderator_digest(self, digest_type, expected_recipient, registration, admin, provider, mock_send_grid): # Invoke the fixture function to get the recipient because parametrize expected_recipient = expected_recipient(provider) - with mock.patch('website.reviews.listeners.mails.execute_email_send'): - notify_submit(registration, admin) + + notify_submit(registration, admin) notify_moderator_registration_requests_withdrawal(registration, admin) # One user, one provider => one email @@ -566,16 +429,14 @@ def test_submsissions_and_withdrawals_do_not_appear_in_node_digest(self, digest_ assert not list(tasks.get_users_emails(digest_type)) - def test_moderator_digest_emails_render(self, registration, admin, moderator): + def test_moderator_digest_emails_render(self, registration, admin, moderator, mock_send_grid): notify_moderator_registration_requests_withdrawal(registration, admin) # Set up mock_send_mail as a pass-through to the original function. # This lets us assert on the call count/args and also implicitly # ensures that the email acutally renders as normal in send_mail. - send_mail = mails.execute_email_send - with mock.patch.object(tasks.mails, 'execute_email_send', side_effect=send_mail) as mock_send_mail: - tasks._send_reviews_moderator_emails('email_transactional') + tasks._send_reviews_moderator_emails('email_transactional') - mock_send_mail.assert_called() + mock_send_grid.assert_called() def test_branded_provider_notification_renders(self, registration, admin, moderator): # Set brand details to be checked in notify_base.mako diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py index 16131961d7e..e3bc0b3d709 100644 --- a/osf_tests/test_reviewable.py +++ b/osf_tests/test_reviewable.py @@ -4,10 +4,10 @@ from osf.models import Preprint from osf.utils.workflows import DefaultStates from osf_tests.factories import PreprintFactory, AuthUserFactory -from website import mails @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestReviewable: @mock.patch('website.identifiers.utils.request_identifiers') @@ -34,21 +34,18 @@ def test_state_changes(self, _): from_db.refresh_from_db() assert from_db.machine_state == DefaultStates.ACCEPTED.value - @mock.patch('website.reviews.listeners.mails.execute_email_send') - def test_reject_resubmission_sends_emails(self, send_mail): + def test_reject_resubmission_sends_emails(self, mock_send_grid): user = AuthUserFactory() preprint = PreprintFactory( reviews_workflow='pre-moderation', is_published=False ) assert preprint.machine_state == DefaultStates.INITIAL.value - assert not send_mail.call_count + assert not mock_send_grid.call_count preprint.run_submit(user) - assert send_mail.call_count == 1 + assert mock_send_grid.call_count == 1 assert preprint.machine_state == DefaultStates.PENDING.value - mail_template = send_mail.call_args[0][1] - assert mail_template == mails.REVIEWS_SUBMISSION_CONFIRMATION assert not user.notification_subscriptions.exists() preprint.run_reject(user, 'comment') @@ -56,6 +53,4 @@ def test_reject_resubmission_sends_emails(self, send_mail): preprint.run_submit(user) # Resubmission alerts users and moderators assert preprint.machine_state == DefaultStates.PENDING.value - mail_template = send_mail.call_args[0][1] - assert send_mail.call_count == 2 - assert mail_template == mails.REVIEWS_RESUBMISSION_CONFIRMATION + assert mock_send_grid.call_count == 2 diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 5672f832f18..40965c7cf31 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -8,9 +8,8 @@ from osf.models import schema_response # import module for mocking purposes from osf.utils.workflows import ApprovalStates, SchemaResponseTriggers from osf_tests.factories import AuthUserFactory, ProjectFactory, RegistrationFactory, RegistrationProviderFactory -from osf_tests.utils import get_default_test_schema, assert_notification_correctness, _ensure_subscriptions +from osf_tests.utils import get_default_test_schema, _ensure_subscriptions -from website.mails import mails from website.notifications import emails from transitions import MachineError @@ -96,6 +95,7 @@ def revised_response(initial_response): @pytest.mark.enable_bookmark_creation @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestCreateSchemaResponse(): def test_create_initial_response_sets_attributes(self, registration, schema): @@ -142,12 +142,11 @@ def test_create_initial_response_assigns_default_values(self, registration): for block in response.response_blocks.all(): assert block.response == DEFAULT_SCHEMA_RESPONSE_VALUES[block.schema_key] - def test_create_initial_response_does_not_notify(self, registration, admin_user): - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - schema_response.SchemaResponse.create_initial_response( - parent=registration, initiator=admin_user - ) - assert not mock_send.called + def test_create_initial_response_does_not_notify(self, registration, admin_user, mock_send_grid): + schema_response.SchemaResponse.create_initial_response( + parent=registration, initiator=admin_user + ) + assert not mock_send_grid.called def test_create_initial_response_fails_if_no_schema_and_no_parent_schema(self, registration): registration.registered_schema.clear() @@ -253,18 +252,14 @@ def test_create_from_previous_response(self, registration, initial_response): assert set(revised_response.response_blocks.all()) == set(initial_response.response_blocks.all()) def test_create_from_previous_response_notification( - self, initial_response, admin_user, notification_recipients): - send_mail = mails.execute_email_send - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - schema_response.SchemaResponse.create_from_previous_response( - previous_response=initial_response, initiator=admin_user - ) + self, initial_response, admin_user, notification_recipients, mock_send_grid): - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_INITIATED, notification_recipients + schema_response.SchemaResponse.create_from_previous_response( + previous_response=initial_response, initiator=admin_user ) + assert mock_send_grid.called + @pytest.mark.parametrize( 'invalid_response_state', [ @@ -547,6 +542,7 @@ def test_delete_fails_if_state_is_invalid(self, invalid_response_state, initial_ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestUnmoderatedSchemaResponseApprovalFlows(): def test_submit_response_adds_pending_approvers( @@ -578,29 +574,23 @@ def test_submit_response_writes_schema_response_action(self, initial_response, a assert new_action.trigger == SchemaResponseTriggers.SUBMIT.db_name def test_submit_response_notification( - self, revised_response, admin_user, notification_recipients): + self, revised_response, admin_user, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) revised_response.update_responses({'q1': 'must change one response or can\'t submit'}) revised_response.revision_justification = 'has for valid revision_justification for submission' revised_response.save() - send_mail = mails.execute_email_send - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.submit(user=admin_user, required_approvers=[admin_user]) + revised_response.submit(user=admin_user, required_approvers=[admin_user]) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_SUBMITTED, notification_recipients - ) + assert mock_send_grid.called - def test_no_submit_notification_on_initial_response(self, initial_response, admin_user): + def test_no_submit_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) initial_response.update_responses({'q1': 'must change one response or can\'t submit'}) initial_response.revision_justification = 'has for valid revision_justification for submission' initial_response.save() - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - initial_response.submit(user=admin_user, required_approvers=[admin_user]) - assert not mock_send.called + initial_response.submit(user=admin_user, required_approvers=[admin_user]) + assert not mock_send_grid.called def test_submit_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) @@ -682,30 +672,23 @@ def test_approve_response_writes_schema_response_action( ).count() == 2 def test_approve_response_notification( - self, revised_response, admin_user, alternate_user, notification_recipients): + self, revised_response, admin_user, alternate_user, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user, alternate_user) + mock_send_grid.reset_mock() + revised_response.approve(user=admin_user) + assert not mock_send_grid.called # Should only send email on final approval + revised_response.approve(user=alternate_user) + assert mock_send_grid.called - send_mail = mails.execute_email_send - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.approve(user=admin_user) - assert not mock_send.called # Should only send email on final approval - revised_response.approve(user=alternate_user) - - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_APPROVED, notification_recipients - ) - - def test_no_approve_notification_on_initial_response(self, initial_response, admin_user): + def test_no_approve_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - initial_response.approve(user=admin_user) - assert not mock_send.called + initial_response.approve(user=admin_user) + assert not mock_send_grid.called def test_approve_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -756,28 +739,22 @@ def test_reject_response_writes_schema_response_action(self, initial_response, a assert new_action.trigger == SchemaResponseTriggers.ADMIN_REJECT.db_name def test_reject_response_notification( - self, revised_response, admin_user, notification_recipients): + self, revised_response, admin_user, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user) - send_mail = mails.execute_email_send - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.reject(user=admin_user) + revised_response.reject(user=admin_user) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_REJECTED, notification_recipients - ) + assert mock_send_grid.called - def test_no_reject_notification_on_initial_response(self, initial_response, admin_user): + def test_no_reject_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - initial_response.reject(user=admin_user) - assert not mock_send.called + initial_response.reject(user=admin_user) + assert not mock_send_grid.called def test_reject_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -824,6 +801,7 @@ def test_internal_accept_clears_pending_approvers(self, initial_response, admin_ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestModeratedSchemaResponseApprovalFlows(): @pytest.fixture @@ -870,16 +848,13 @@ def test_schema_response_action_to_state_following_moderated_approve_is_pending_ assert new_action.to_state == ApprovalStates.PENDING_MODERATION.db_name assert new_action.trigger == SchemaResponseTriggers.APPROVE.db_name - def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user): + def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user) - send_mail = mails.execute_email_send - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - mock_send.side_effect = send_mail - revised_response.approve(user=admin_user) - assert mock_send.called + revised_response.approve(user=admin_user) + assert mock_send_grid.called def test_moderators_notified_on_admin_approval(self, revised_response, admin_user, moderator): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -925,27 +900,21 @@ def test_moderator_accept_writes_schema_response_action(self, initial_response, assert new_action.trigger == SchemaResponseTriggers.ACCEPT.db_name def test_moderator_accept_notification( - self, revised_response, moderator, notification_recipients): + self, revised_response, moderator, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - send_mail = mails.execute_email_send - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.accept(user=moderator) + revised_response.accept(user=moderator) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_APPROVED, notification_recipients - ) + assert mock_send_grid.called def test_no_moderator_accept_notification_on_initial_response( - self, initial_response, moderator): + self, initial_response, moderator, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - initial_response.accept(user=moderator) - assert not mock_send.called + initial_response.accept(user=moderator) + assert not mock_send_grid.called def test_moderator_reject(self, initial_response, admin_user, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) @@ -969,27 +938,21 @@ def test_moderator_reject_writes_schema_response_action( assert new_action.trigger == SchemaResponseTriggers.MODERATOR_REJECT.db_name def test_moderator_reject_notification( - self, revised_response, moderator, notification_recipients): + self, revised_response, moderator, notification_recipients, mock_send_grid): revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - send_mail = mails.execute_email_send - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - mock_send.side_effect = send_mail # implicitly test rendering - revised_response.reject(user=moderator) + revised_response.reject(user=moderator) - assert_notification_correctness( - mock_send, mails.SCHEMA_RESPONSE_REJECTED, notification_recipients - ) + assert mock_send_grid.called def test_no_moderator_reject_notification_on_initial_response( - self, initial_response, moderator): + self, initial_response, moderator, mock_send_grid): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - with mock.patch.object(schema_response.mails, 'execute_email_send', autospec=True) as mock_send: - initial_response.reject(user=moderator) - assert not mock_send.called + initial_response.reject(user=moderator) + assert not mock_send_grid.called def test_moderator_cannot_submit(self, initial_response, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index 0df3ead35e8..3a2e508dd2d 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -885,6 +885,7 @@ def test_get_user_by_cookie_no_session(self): assert OSFUser.from_cookie(cookie) is None +@pytest.mark.usefixtures('mock_send_grid') class TestChangePassword: def test_change_password(self, user): @@ -896,22 +897,19 @@ def test_change_password(self, user): user.change_password(old_password, new_password, confirm_password) assert bool(user.check_password(new_password)) is True - @mock.patch('website.mails.execute_email_send') - def test_set_password_notify_default(self, mock_send_mail, user): + def test_set_password_notify_default(self, mock_send_grid, user): old_password = 'password' user.set_password(old_password) user.save() - assert mock_send_mail.called is True + assert mock_send_grid.called is True - @mock.patch('website.mails.execute_email_send') - def test_set_password_no_notify(self, mock_send_mail, user): + def test_set_password_no_notify(self, mock_send_grid, user): old_password = 'password' user.set_password(old_password, notify=False) user.save() - assert mock_send_mail.called is False + assert mock_send_grid.called is False - @mock.patch('website.mails.execute_email_send') - def test_check_password_upgrade_hasher_no_notify(self, mock_send_mail, user, settings): + def test_check_password_upgrade_hasher_no_notify(self, mock_send_grid, user, settings): # NOTE: settings fixture comes from pytest-django. # changes get reverted after tests run settings.PASSWORD_HASHERS = ( @@ -922,7 +920,7 @@ def test_check_password_upgrade_hasher_no_notify(self, mock_send_mail, user, set user.password = 'sha1$lNb72DKWDv6P$e6ae16dada9303ae0084e14fc96659da4332bb05' user.check_password(raw_password) assert user.password.startswith('md5$') - assert mock_send_mail.called is False + assert mock_send_grid.called is False def test_change_password_invalid(self, old_password=None, new_password=None, confirm_password=None, error_message='Old password is invalid'): diff --git a/scripts/tests/test_deactivate_requested_accounts.py b/scripts/tests/test_deactivate_requested_accounts.py index eb14fc43278..07e43f74bf2 100644 --- a/scripts/tests/test_deactivate_requested_accounts.py +++ b/scripts/tests/test_deactivate_requested_accounts.py @@ -1,14 +1,12 @@ import pytest -from unittest import mock from osf_tests.factories import ProjectFactory, AuthUserFactory from osf.management.commands.deactivate_requested_accounts import deactivate_requested_accounts -from website import mails, settings - @pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') class TestDeactivateRequestedAccount: @pytest.fixture() @@ -26,8 +24,7 @@ def user_requested_deactivation_with_node(self): user.save() return user - @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.execute_email_send') - def test_deactivate_user_with_no_content(self, mock_mail, user_requested_deactivation): + def test_deactivate_user_with_no_content(self, mock_send_grid, user_requested_deactivation): deactivate_requested_accounts(dry_run=False) user_requested_deactivation.reload() @@ -35,21 +32,13 @@ def test_deactivate_user_with_no_content(self, mock_mail, user_requested_deactiv assert user_requested_deactivation.requested_deactivation assert user_requested_deactivation.contacted_deactivation assert user_requested_deactivation.is_disabled - mock_mail.assert_called_with(can_change_preferences=False, - mail=mails.REQUEST_DEACTIVATION_COMPLETE, - to_addr=user_requested_deactivation.username, - contact_email=settings.OSF_CONTACT_EMAIL, - user=user_requested_deactivation) + mock_send_grid.assert_called() - @mock.patch('osf.management.commands.deactivate_requested_accounts.mails.execute_email_send') - def test_deactivate_user_with_content(self, mock_mail, user_requested_deactivation_with_node): + def test_deactivate_user_with_content(self, mock_send_grid, user_requested_deactivation_with_node): deactivate_requested_accounts(dry_run=False) user_requested_deactivation_with_node.reload() assert user_requested_deactivation_with_node.requested_deactivation assert not user_requested_deactivation_with_node.is_disabled - mock_mail.assert_called_with(can_change_preferences=False, - mail=mails.REQUEST_DEACTIVATION, - to_addr=settings.OSF_SUPPORT_EMAIL, - user=user_requested_deactivation_with_node) + mock_send_grid.assert_called() diff --git a/scripts/tests/test_send_queued_mails.py b/scripts/tests/test_send_queued_mails.py index 1fad2a23c37..2815b85f5d9 100644 --- a/scripts/tests/test_send_queued_mails.py +++ b/scripts/tests/test_send_queued_mails.py @@ -10,7 +10,8 @@ from scripts.send_queued_mails import main, pop_and_verify_mails_for_each_user, find_queued_mails_ready_to_be_sent from website import settings - +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestSendQueuedMails(OsfTestCase): def setUp(self): @@ -20,6 +21,10 @@ def setUp(self): self.user.osf_mailing_lists[settings.OSF_HELP_LIST] = True self.user.save() + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + + def queue_mail(self, mail_type=NO_ADDON, user=None, send_at=None): return queue_mail( to_addr=user.username if user else self.user.username, @@ -29,21 +34,19 @@ def queue_mail(self, mail_type=NO_ADDON, user=None, send_at=None): fullname=user.fullname if user else self.user.fullname, ) - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_queue_addon_mail(self, mock_send): + def test_queue_addon_mail(self): self.queue_mail() main(dry_run=False) - assert mock_send.called + assert self.mock_send_grid.called - @mock.patch('osf.models.queued_mail.execute_email_send') - def test_no_two_emails_to_same_person(self, mock_send): + def test_no_two_emails_to_same_person(self): user = UserFactory() user.osf_mailing_lists[settings.OSF_HELP_LIST] = True user.save() self.queue_mail(user=user) self.queue_mail(user=user) main(dry_run=False) - assert mock_send.call_count == 1 + assert self.mock_send_grid.call_count == 1 def test_pop_and_verify_mails_for_each_user(self): user_with_email_sent = UserFactory() diff --git a/scripts/tests/test_triggered_mails.py b/scripts/tests/test_triggered_mails.py index 56c006b9c43..b0b94a7f7c5 100644 --- a/scripts/tests/test_triggered_mails.py +++ b/scripts/tests/test_triggered_mails.py @@ -38,8 +38,7 @@ def test_trigger_no_login_mail(self, mock_queue): send_at=mock.ANY, ) - @mock.patch('website.mails.execute_email_send') - def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self, mock_mail): + def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self): user_active = UserFactory(fullname='Spot') user_inactive = UserFactory(fullname='Nucha') user_already_received_mail = UserFactory(fullname='Pep') diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index a493e19fac6..17c2da39bc3 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -49,8 +49,11 @@ send_claim_registered_email, ) from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag +from conftest import start_mock_send_grid @pytest.mark.enable_implicit_clean +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestAddingContributorViews(OsfTestCase): def setUp(self): @@ -61,6 +64,8 @@ def setUp(self): # Authenticate all requests contributor_added.connect(notify_added_contributor) + self.mock_send_grid = start_mock_send_grid(self) + def test_serialize_unregistered_without_record(self): name, email = fake.name(), fake_email() res = serialize_unregistered(fullname=name, email=email) @@ -211,8 +216,7 @@ def test_add_contributors_post_only_sends_one_email_to_unreg_user( # finalize_invitation should only have been called once assert mock_send_claim_email.call_count == 1 - @mock.patch('website.mails.execute_email_send') - def test_add_contributors_post_only_sends_one_email_to_registered_user(self, mock_send_mail): + def test_add_contributors_post_only_sends_one_email_to_registered_user(self): # Project has components comp1 = NodeFactory(creator=self.creator, parent=self.project) comp2 = NodeFactory(creator=self.creator, parent=self.project) @@ -237,10 +241,9 @@ def test_add_contributors_post_only_sends_one_email_to_registered_user(self, moc self.app.post(url, json=payload, auth=self.creator.auth) # send_mail should only have been called once - assert mock_send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.execute_email_send') - def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self, mock_send_mail): + def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self): # Project has a component with a sub-component component = NodeFactory(creator=self.creator, parent=self.project) sub_component = NodeFactory(creator=self.creator, parent=component) @@ -265,7 +268,7 @@ def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_nod self.app.post(url, json=payload, auth=self.creator.auth) # send_mail is called for both the project and the sub-component - assert mock_send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 @mock.patch('website.project.views.contributor.send_claim_email') def test_email_sent_when_unreg_user_is_added(self, send_mail): @@ -286,8 +289,7 @@ def test_email_sent_when_unreg_user_is_added(self, send_mail): self.app.post(url, json=payload, follow_redirects=True, auth=self.creator.auth) send_mail.assert_called_with(email, ANY,ANY,notify=True, email_template='default') - @mock.patch('website.mails.execute_email_send') - def test_email_sent_when_reg_user_is_added(self, send_mail): + def test_email_sent_when_reg_user_is_added(self): contributor = UserFactory() contributors = [{ 'user': contributor, @@ -297,47 +299,29 @@ def test_email_sent_when_reg_user_is_added(self, send_mail): project = ProjectFactory(creator=self.auth.user) project.add_contributors(contributors, auth=self.auth) project.save() - assert send_mail.called - send_mail.assert_called_with( - to_addr=contributor.username, - mail=mails.CONTRIBUTOR_ADDED_DEFAULT, - user=contributor, - node=project, - referrer_name=self.auth.user.fullname, - all_global_subscriptions_none=False, - branded_service=None, - can_change_preferences=False, - logo=settings.OSF_LOGO, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - is_initiator=False, - published_preprints=[] + assert self.mock_send_grid.called - ) assert contributor.contributor_added_email_records[project._id]['last_sent'] == approx(int(time.time()), rel=1) - @mock.patch('website.mails.execute_email_send') - def test_contributor_added_email_sent_to_unreg_user(self, send_mail): + def test_contributor_added_email_sent_to_unreg_user(self): unreg_user = UnregUserFactory() project = ProjectFactory() project.add_unregistered_contributor(fullname=unreg_user.fullname, email=unreg_user.email, auth=Auth(project.creator)) project.save() - assert send_mail.called + assert self.mock_send_grid.called - @mock.patch('website.mails.execute_email_send') - def test_forking_project_does_not_send_contributor_added_email(self, send_mail): + def test_forking_project_does_not_send_contributor_added_email(self): project = ProjectFactory() project.fork_node(auth=Auth(project.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called - @mock.patch('website.mails.execute_email_send') - def test_templating_project_does_not_send_contributor_added_email(self, send_mail): + def test_templating_project_does_not_send_contributor_added_email(self): project = ProjectFactory() project.use_as_template(auth=Auth(project.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called @mock.patch('website.archiver.tasks.archive') - @mock.patch('website.mails.execute_email_send') - def test_registering_project_does_not_send_contributor_added_email(self, send_mail, mock_archive): + def test_registering_project_does_not_send_contributor_added_email(self, mock_archive): project = ProjectFactory() provider = RegistrationProviderFactory() project.register_node( @@ -347,63 +331,57 @@ def test_registering_project_does_not_send_contributor_added_email(self, send_ma None, provider=provider ) - assert not send_mail.called + assert not self.mock_send_grid.called - @mock.patch('website.mails.execute_email_send') - def test_notify_contributor_email_does_not_send_before_throttle_expires(self, send_mail): + def test_notify_contributor_email_does_not_send_before_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) notify_added_contributor(project, contributor, auth) - assert send_mail.called + assert self.mock_send_grid.called # 2nd call does not send email because throttle period has not expired notify_added_contributor(project, contributor, auth) - assert send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.execute_email_send') - def test_notify_contributor_email_sends_after_throttle_expires(self, send_mail): + def test_notify_contributor_email_sends_after_throttle_expires(self): throttle = 0.5 contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) notify_added_contributor(project, contributor, auth, throttle=throttle) - assert send_mail.called + assert self.mock_send_grid.called time.sleep(1) # throttle period expires notify_added_contributor(project, contributor, auth, throttle=throttle) - assert send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 - @mock.patch('website.mails.execute_email_send') - def test_add_contributor_to_fork_sends_email(self, send_mail): + def test_add_contributor_to_fork_sends_email(self): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) fork.add_contributor(contributor, auth=Auth(self.creator)) fork.save() - assert send_mail.called - assert send_mail.call_count == 1 + assert self.mock_send_grid.called + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.execute_email_send') - def test_add_contributor_to_template_sends_email(self, send_mail): + def test_add_contributor_to_template_sends_email(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) template.add_contributor(contributor, auth=Auth(self.creator)) template.save() - assert send_mail.called - assert send_mail.call_count == 1 + assert self.mock_send_grid.called + assert self.mock_send_grid.call_count == 1 - @mock.patch('website.mails.execute_email_send') - def test_creating_fork_does_not_email_creator(self, send_mail): + def test_creating_fork_does_not_email_creator(self): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called - @mock.patch('website.mails.execute_email_send') - def test_creating_template_does_not_email_creator(self, send_mail): + def test_creating_template_does_not_email_creator(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) - assert not send_mail.called + assert not self.mock_send_grid.called def test_add_multiple_contributors_only_adds_one_log(self): n_logs_pre = self.project.logs.count() @@ -459,6 +437,8 @@ def tearDown(self): contributor_added.disconnect(notify_added_contributor) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestUserInviteViews(OsfTestCase): def setUp(self): @@ -467,6 +447,8 @@ def setUp(self): self.project = ProjectFactory(creator=self.user) self.invite_url = f'/api/v1/project/{self.project._primary_key}/invite_contributor/' + self.mock_send_grid = start_mock_send_grid(self) + def test_invite_contributor_post_if_not_in_db(self): name, email = fake.name(), fake_email() res = self.app.post( @@ -534,8 +516,7 @@ def test_invite_contributor_requires_fullname(self): ) assert res.status_code == http_status.HTTP_400_BAD_REQUEST - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_send_claim_email_to_given_email(self, send_mail): + def test_send_claim_email_to_given_email(self): project = ProjectFactory() given_email = fake_email() unreg_user = project.add_unregistered_contributor( @@ -546,23 +527,9 @@ def test_send_claim_email_to_given_email(self, send_mail): project.save() send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project) - send_mail.assert_called_with( - given_email, - mails.INVITE_DEFAULT, - user=unreg_user, - referrer=ANY, - node=project, - claim_url=ANY, - email=unreg_user.email, - fullname=unreg_user.fullname, - branded_service=None, - can_change_preferences=False, - logo='osf_logo', - osf_contact_email=settings.OSF_CONTACT_EMAIL - ) + self.mock_send_grid.assert_called() - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_send_claim_email_to_referrer(self, send_mail): + def test_send_claim_email_to_referrer(self): project = ProjectFactory() referrer = project.creator given_email, real_email = fake_email(), fake_email() @@ -573,25 +540,9 @@ def test_send_claim_email_to_referrer(self, send_mail): project.save() send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project) - assert send_mail.called - # email was sent to referrer - send_mail.assert_called_with( - referrer.username, - mails.FORWARD_INVITE, - user=unreg_user, - referrer=referrer, - claim_url=unreg_user.get_claim_url(project._id, external=True), - email=real_email.lower().strip(), - fullname=unreg_user.get_unclaimed_record(project._id)['name'], - node=project, - branded_service=None, - can_change_preferences=False, - logo=settings.OSF_LOGO, - osf_contact_email=settings.OSF_CONTACT_EMAIL - ) + assert self.mock_send_grid.called - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_send_claim_email_before_throttle_expires(self, send_mail): + def test_send_claim_email_before_throttle_expires(self): project = ProjectFactory() given_email = fake_email() unreg_user = project.add_unregistered_contributor( @@ -601,14 +552,16 @@ def test_send_claim_email_before_throttle_expires(self, send_mail): ) project.save() send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - send_mail.reset_mock() + self.mock_send_grid.reset_mock() # 2nd call raises error because throttle hasn't expired with pytest.raises(HTTPError): send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - assert not send_mail.called + assert not self.mock_send_grid.called @pytest.mark.enable_implicit_clean +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestClaimViews(OsfTestCase): def setUp(self): @@ -640,6 +593,8 @@ def setUp(self): ) self.project.save() + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('website.project.views.contributor.send_claim_email') def test_claim_user_already_registered_redirects_to_claim_user_registered(self, claim_email): name = fake.name() @@ -737,8 +692,7 @@ def test_claim_user_invited_with_no_email_posts_to_claim_form(self): }) assert res.status_code == 400 - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_claim_user_post_with_registered_user_id(self, send_mail): + def test_claim_user_post_with_registered_user_id(self): # registered user who is attempting to claim the unclaimed contributor reg_user = UserFactory() payload = { @@ -750,14 +704,13 @@ def test_claim_user_post_with_registered_user_id(self, send_mail): res = self.app.post(url, json=payload) # mail was sent - assert send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 # ... to the correct address - referrer_call = send_mail.call_args_list[0] - claimer_call = send_mail.call_args_list[1] - args, _ = referrer_call - assert args[0] == self.referrer.username - args, _ = claimer_call - assert args[0] == reg_user.username + referrer_call = self.mock_send_grid.call_args_list[0] + claimer_call = self.mock_send_grid.call_args_list[1] + + assert referrer_call[1]['to_addr'] == self.referrer.email + assert claimer_call[1]['to_addr'] == reg_user.email # view returns the correct JSON assert res.json == { @@ -766,29 +719,27 @@ def test_claim_user_post_with_registered_user_id(self, send_mail): 'fullname': self.given_name, } - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_send_claim_registered_email(self, mock_send_mail): + def test_send_claim_registered_email(self): reg_user = UserFactory() send_claim_registered_email( claimer=reg_user, unclaimed_user=self.user, node=self.project ) - assert mock_send_mail.call_count == 2 - first_call_args = mock_send_mail.call_args_list[0][0] - assert first_call_args[0] == self.referrer.username - second_call_args = mock_send_mail.call_args_list[1][0] - assert second_call_args[0] == reg_user.username - - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mail): + assert self.mock_send_grid.call_count == 2 + first_call_args = self.mock_send_grid.call_args_list[0][1] + assert first_call_args['to_addr'] == self.referrer.email + second_call_args = self.mock_send_grid.call_args_list[1][1] + assert second_call_args['to_addr'] == reg_user.email + + def test_send_claim_registered_email_before_throttle_expires(self): reg_user = UserFactory() send_claim_registered_email( claimer=reg_user, unclaimed_user=self.user, node=self.project, ) - mock_send_mail.reset_mock() + self.mock_send_grid.reset_mock() # second call raises error because it was called before throttle period with pytest.raises(HTTPError): send_claim_registered_email( @@ -796,7 +747,7 @@ def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mai unclaimed_user=self.user, node=self.project, ) - assert not mock_send_mail.called + assert not self.mock_send_grid.called @mock.patch('website.project.views.contributor.send_claim_registered_email') def test_claim_user_post_with_email_already_registered_sends_correct_email( @@ -973,8 +924,7 @@ def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_sea assert unreg.given_name == parsed_name['given_name'] assert unreg.family_name == parsed_name['family_name'] - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_claim_user_post_returns_fullname(self, send_mail): + def test_claim_user_post_returns_fullname(self): url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' res = self.app.post( url, @@ -985,34 +935,17 @@ def test_claim_user_post_returns_fullname(self, send_mail): }, ) assert res.json['fullname'] == self.given_name - assert send_mail.called - - send_mail.assert_called_with( - self.given_email, - mails.INVITE_DEFAULT, - user=self.user, - referrer=self.referrer, - node=ANY, - claim_url=ANY, - email=self.user.email, - fullname=self.user.fullname, - branded_service=None, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - can_change_preferences=False, - logo='osf_logo' - ) - + assert self.mock_send_grid.called - @mock.patch('website.project.views.contributor.mails.execute_email_send') - def test_claim_user_post_if_email_is_different_from_given_email(self, send_mail): + def test_claim_user_post_if_email_is_different_from_given_email(self): email = fake_email() # email that is different from the one the referrer gave url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' self.app.post(url, json={'value': email, 'pk': self.user._primary_key} ) - assert send_mail.called - assert send_mail.call_count == 2 - call_to_invited = send_mail.mock_calls[0] + assert self.mock_send_grid.called + assert self.mock_send_grid.call_count == 2 + call_to_invited = self.mock_send_grid.mock_calls[0] call_to_invited.assert_called_with(to_addr=email) - call_to_referrer = send_mail.mock_calls[1] + call_to_referrer = self.mock_send_grid.mock_calls[1] call_to_referrer.assert_called_with(to_addr=self.given_email) def test_claim_url_with_bad_token_returns_400(self): diff --git a/tests/test_auth.py b/tests/test_auth.py index da72118da43..6088c608e67 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -36,14 +36,21 @@ must_have_addon, must_be_addon_authorizer, ) from website.util import api_url_for +from conftest import start_mock_send_grid from tests.test_cas_authentication import generate_external_user_with_resp logger = logging.getLogger(__name__) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthUtils(OsfTestCase): + def setUp(self): + super().setUp() + self.mock_send_grid = start_mock_send_grid(self) + def test_citation_with_only_fullname(self): user = UserFactory() user.fullname = 'Martin Luther King, Jr.' @@ -71,8 +78,7 @@ def test_unreg_user_can_register(self): assert user.get_confirmation_token(user.username) - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_confirm_email(self, mock_mail): + def test_confirm_email(self): user = UnregUserFactory() auth.register_unconfirmed( @@ -91,7 +97,7 @@ def test_confirm_email(self, mock_mail): user.reload() - mock_mail.assert_not_called() + self.mock_send_grid.assert_not_called() self.app.set_cookie(settings.COOKIE_NAME, user.get_or_create_cookie().decode()) @@ -101,7 +107,7 @@ def test_confirm_email(self, mock_mail): assert res.status_code == 302 assert '/' == urlparse(res.location).path - assert len(mock_mail.call_args_list) == 0 + assert len(self.mock_send_grid.call_args_list) == 0 assert len(get_session()['status']) == 1 def test_get_user_by_id(self): @@ -163,23 +169,15 @@ def test_successful_external_first_login_without_attributes(self, mock_service_v cas.make_response_from_ticket(ticket, service_url) assert user == mock_external_first_login_authenticate.call_args[0][0] - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_password_change_sends_email(self, mock_mail): + def test_password_change_sends_email(self): user = UserFactory() user.set_password('killerqueen') user.save() - assert len(mock_mail.call_args_list) == 1 - empty, kwargs = mock_mail.call_args - kwargs['user'].reload() + assert len(self.mock_send_grid.call_args_list) == 1 + empty, kwargs = self.mock_send_grid.call_args assert empty == () - assert kwargs == { - 'user': user, - 'mail': mails.PASSWORD_RESET, - 'to_addr': user.username, - 'can_change_preferences': False, - 'osf_contact_email': settings.OSF_CONTACT_EMAIL, - } + assert kwargs['to_addr'] == user.username @mock.patch('framework.auth.utils.requests.post') def test_validate_recaptcha_success(self, req_post): @@ -211,8 +209,7 @@ def test_validate_recaptcha_empty_response(self, req_post): # ensure None short circuits execution (no call to google) assert not validate_recaptcha(None) - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_sign_up_twice_sends_two_confirmation_emails_only(self, mock_mail): + def test_sign_up_twice_sends_two_confirmation_emails_only(self): # Regression test for https://openscience.atlassian.net/browse/OSF-7060 url = api_url_for('register_user') sign_up_data = { @@ -223,20 +220,10 @@ def test_sign_up_twice_sends_two_confirmation_emails_only(self, mock_mail): } self.app.post(url, json=sign_up_data) - assert len(mock_mail.call_args_list) == 1 - args, kwargs = mock_mail.call_args - assert args == ( - 'caesar@romanempire.com', - mails.INITIAL_CONFIRM_EMAIL, - ) + assert len(self.mock_send_grid.call_args_list) == 1 self.app.post(url, json=sign_up_data) - assert len(mock_mail.call_args_list) == 2 - args, kwargs = mock_mail.call_args - assert args == ( - 'caesar@romanempire.com', - mails.INITIAL_CONFIRM_EMAIL, - ) + assert len(self.mock_send_grid.call_args_list) == 2 class TestAuthObject(OsfTestCase): diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index 6d7b5fb0514..31445da2c8d 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -40,9 +40,12 @@ ) from website import mails, settings from website.util import api_url_for, web_url_for +from conftest import start_mock_send_grid pytestmark = pytest.mark.django_db +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthViews(OsfTestCase): def setUp(self): @@ -50,8 +53,9 @@ def setUp(self): self.user = AuthUserFactory() self.auth = self.user.auth - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_register_ok(self, _): + self.mock_send_grid = start_mock_send_grid(self) + + def test_register_ok(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -67,9 +71,7 @@ def test_register_ok(self, _): assert user.fullname == name assert user.accepted_terms_of_service is None - # Regression test for https://github.com/CenterForOpenScience/osf.io/issues/2902 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_register_email_case_insensitive(self, _): + def test_register_email_case_insensitive(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -84,8 +86,7 @@ def test_register_email_case_insensitive(self, _): user = OSFUser.objects.get(username=email) assert user.fullname == name - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_register_email_with_accepted_tos(self, _): + def test_register_email_with_accepted_tos(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -101,8 +102,7 @@ def test_register_email_with_accepted_tos(self, _): user = OSFUser.objects.get(username=email) assert user.accepted_terms_of_service - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_register_email_without_accepted_tos(self, _): + def test_register_email_without_accepted_tos(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' self.app.post( @@ -195,8 +195,7 @@ def test_register_blocked_email_domain(self): assert users.count() == 0 @mock.patch('framework.auth.views.validate_recaptcha', return_value=True) - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_register_good_captcha(self, _, validate_recaptcha): + def test_register_good_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' captcha = 'some valid captcha' @@ -217,8 +216,7 @@ def test_register_good_captcha(self, _, validate_recaptcha): assert user.fullname == name @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_register_missing_captcha(self, _, validate_recaptcha): + def test_register_missing_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'): @@ -236,8 +234,7 @@ def test_register_missing_captcha(self, _, validate_recaptcha): assert resp.status_code == http_status.HTTP_400_BAD_REQUEST @mock.patch('framework.auth.views.validate_recaptcha', return_value=False) - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_register_bad_captcha(self, _, validate_recaptcha): + def test_register_bad_captcha(self, validate_recaptcha): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'): @@ -317,35 +314,21 @@ def test_register_sends_user_registered_signal(self, mock_send_confirm_email): assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} assert mock_send_confirm_email.called - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_resend_confirmation(self, send_mail: MagicMock): + def test_resend_confirmation(self): email = 'test@mail.com' token = self.user.add_unconfirmed_email(email) self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert send_mail.called - send_mail.assert_called_with( - email, - mails.CONFIRM_EMAIL, - user=self.user, - confirmation_url=ANY, - email='test@mail.com', - merge_target=None, - external_id_provider=None, - branded_preprints_provider=None, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - logo='osf_logo' - ) + assert self.mock_send_grid.called + self.user.reload() assert token != self.user.get_confirmation_token(email) with pytest.raises(InvalidTokenError): self.user.get_unconfirmed_email_for_token(token) - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_click_confirmation_email(self, send_mail): + def test_click_confirmation_email(self): # TODO: check in qa url encoding email = 'test@mail.com' token = self.user.add_unconfirmed_email(email) @@ -509,14 +492,13 @@ def test_resend_confirmation_not_work_for_confirmed_email(self): assert res.status_code == 400 assert res.json['message_long'] == 'Cannnot resend confirmation for confirmed emails' - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_resend_confirmation_does_not_send_before_throttle_expires(self, send_mail): + def test_resend_confirmation_does_not_send_before_throttle_expires(self): email = 'test@mail.com' self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert send_mail.called + assert self.mock_send_grid.called # 2nd call does not send email because throttle period has not expired res = self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) assert res.status_code == 400 diff --git a/tests/test_conferences.py b/tests/test_conferences.py deleted file mode 100644 index 6573d462136..00000000000 --- a/tests/test_conferences.py +++ /dev/null @@ -1,786 +0,0 @@ -from unittest import mock - -import hmac -import hashlib -from io import BytesIO - -import pytest -from django.db import IntegrityError -from furl import furl - -from framework.auth import get_or_create_user -from framework.auth.core import Auth - -from osf.models import OSFUser, AbstractNode -from addons.wiki.models import WikiVersion -from osf.exceptions import BlockedEmailError -from website import settings -from website.conferences import views -from website.conferences import utils, message -from website.util import api_url_for, web_url_for - -from tests.base import OsfTestCase, fake -from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory - - -def assert_absolute(url): - parsed_domain = furl(settings.DOMAIN) - parsed_url = furl(url) - assert parsed_domain.host == parsed_url.host - - -def assert_equal_urls(first, second): - parsed_first = furl(first) - parsed_first.port = None - parsed_second = furl(second) - parsed_second.port = None - assert parsed_first == parsed_second - - -def create_fake_conference_nodes(n, conference): - nodes = [] - for i in range(n): - node = ProjectFactory(is_public=True) - conference.submissions.add(node) - node.save() - nodes.append(node) - return nodes - - -def create_fake_conference_nodes_bad_data(conference, n, bad_n, endpoint): - nodes = [] - for i in range(n): - node = ProjectFactory(is_public=True) - conference.submissions.add(node) - # inject bad data - if i < bad_n: - # Delete only contributor - node.contributor_set.filter(user=node.contributors.first()).delete() - node.save() - nodes.append(node) - return nodes - - -class TestConferenceUtils(OsfTestCase): - - def test_get_or_create_user_exists(self): - user = UserFactory() - fetched, created = get_or_create_user(user.fullname, user.username, is_spam=True) - assert not created - assert user._id == fetched._id - assert 'is_spam' not in fetched.system_tags - - def test_get_or_create_user_not_exists(self): - fullname = 'Roger Taylor' - username = 'roger@queen.com' - fetched, created = get_or_create_user(fullname, username, is_spam=False) - fetched.save() # in order to access m2m fields, e.g. tags - assert created - assert fetched.fullname == fullname - assert fetched.username == username - assert 'is_spam' not in fetched.system_tags - - def test_get_or_create_user_is_spam(self): - fullname = 'John Deacon' - username = 'deacon@queen.com' - fetched, created = get_or_create_user(fullname, username, is_spam=True) - fetched.save() # in order to access m2m fields, e.g. tags - assert created - assert fetched.fullname == fullname - assert fetched.username == username - assert 'is_spam' in fetched.system_tags - - def test_get_or_create_user_with_blocked_domain(self): - fullname = 'Kanye West' - username = 'kanye@mailinator.com' - with pytest.raises(BlockedEmailError) as e: - get_or_create_user(fullname, username, is_spam=True) - assert str(e.value) == 'Invalid Email' - - -class ContextTestCase(OsfTestCase): - MAILGUN_API_KEY = 'mailkimp' - - @classmethod - def setUpClass(cls): - super().setUpClass() - settings.MAILGUN_API_KEY, cls._MAILGUN_API_KEY = cls.MAILGUN_API_KEY, settings.MAILGUN_API_KEY - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - settings.MAILGUN_API_KEY = cls._MAILGUN_API_KEY - - def make_context(self, method='POST', **kwargs): - data = { - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - } - data.update(kwargs.pop('data', {})) - data = { - key: value - for key, value in data.items() - if value is not None - } - return self.app.application.test_request_context(method=method, data=data, **kwargs) - - -class TestProvisionNode(ContextTestCase): - - def setUp(self): - super().setUp() - self.node = ProjectFactory() - self.user = self.node.creator - self.conference = ConferenceFactory() - self.body = 'dragon on my back' - self.content = b'dragon attack' - self.attachment = BytesIO(self.content) - self.recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - self.conference.endpoint, - ) - - def make_context(self, **kwargs): - data = { - 'attachment-count': '1', - 'attachment-1': (self.attachment, 'attachment-1'), - 'X-Mailgun-Sscore': 0, - 'recipient': self.recipient, - 'stripped-text': self.body, - } - data.update(kwargs.pop('data', {})) - return super().make_context(data=data, **kwargs) - - def test_provision(self): - with self.make_context(): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert self.conference.endpoint in self.node.system_tags - assert self.node in self.conference.submissions.all() - assert 'spam' not in self.node.system_tags - - def test_provision_private(self): - self.conference.public_projects = False - self.conference.save() - with self.make_context(): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert not self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert 'spam' not in self.node.system_tags - - def test_provision_spam(self): - with self.make_context(data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}): - msg = message.ConferenceMessage() - utils.provision_node(self.conference, msg, self.node, self.user) - assert not self.node.is_public - assert self.conference.admins.first() in self.node.contributors - assert 'emailed' in self.node.system_tags - assert 'spam' in self.node.system_tags - - @mock.patch('website.conferences.utils.waterbutler_api_url_for') - @mock.patch('website.conferences.utils.requests.put') - def test_upload(self, mock_put, mock_get_url): - mock_get_url.return_value = 'http://queen.com/' - file_name = 'hammer-to-fall' - self.attachment.filename = file_name - self.attachment.content_type = 'application/json' - utils.upload_attachment(self.user, self.node, self.attachment) - mock_get_url.assert_called_with( - self.node._id, - 'osfstorage', - _internal=True, - base_url=self.node.osfstorage_region.waterbutler_url, - cookie=self.user.get_or_create_cookie().decode(), - name=file_name - ) - mock_put.assert_called_with( - mock_get_url.return_value, - data=self.content, - cookies={settings.COOKIE_NAME: self.user.get_or_create_cookie().decode()}, - ) - - @mock.patch('website.conferences.utils.waterbutler_api_url_for') - @mock.patch('website.conferences.utils.requests.put') - def test_upload_no_file_name(self, mock_put, mock_get_url): - mock_get_url.return_value = 'http://queen.com/' - self.attachment.filename = '' - self.attachment.content_type = 'application/json' - utils.upload_attachment(self.user, self.node, self.attachment) - mock_get_url.assert_called_with( - self.node._id, - 'osfstorage', - _internal=True, - base_url=self.node.osfstorage_region.waterbutler_url, - cookie=self.user.get_or_create_cookie().decode(), - name=settings.MISSING_FILE_NAME, - ) - mock_put.assert_called_with( - mock_get_url.return_value, - data=self.content, - cookies={settings.COOKIE_NAME: self.user.get_or_create_cookie().decode()}, - ) - - -class TestMessage(ContextTestCase): - PUSH_CONTEXT = False - - def test_verify_signature_valid(self): - with self.make_context(): - msg = message.ConferenceMessage() - msg.verify_signature() - - def test_verify_signature_invalid(self): - with self.make_context(data={'signature': 'fake'}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.verify_signature() - - def test_is_spam_false_missing_headers(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1}, - ) - with ctx: - msg = message.ConferenceMessage() - assert not msg.is_spam - - def test_is_spam_false_all_headers(self): - ctx = self.make_context( - method='POST', - data={ - 'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1, - 'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0], - 'X-Mailgun-Spf': message.SPF_PASS_VALUES[0], - }, - ) - with ctx: - msg = message.ConferenceMessage() - assert not msg.is_spam - - def test_is_spam_true_sscore(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_is_spam_true_dkim(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0][::-1]}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_is_spam_true_spf(self): - ctx = self.make_context( - method='POST', - data={'X-Mailgun-Spf': message.SPF_PASS_VALUES[0][::-1]}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.is_spam - - def test_subject(self): - ctx = self.make_context( - method='POST', - data={'subject': 'RE: Hip Hopera'}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.subject == 'Hip Hopera' - - def test_recipient(self): - address = 'test-conference@osf.io' - ctx = self.make_context( - method='POST', - data={'recipient': address}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.recipient == address - - def test_text(self): - text = 'welcome to my nuclear family' - ctx = self.make_context( - method='POST', - data={'stripped-text': text}, - ) - with ctx: - msg = message.ConferenceMessage() - assert msg.text == text - - def test_sender_name(self): - names = [ - (' Fred', 'Fred'), - ('Me‰¨ü', 'Me‰¨ü'), - ('fred@queen.com', 'fred@queen.com'), - ('Fred ', 'Fred'), - ('"Fred" ', 'Fred'), - ] - for name in names: - with self.make_context(data={'from': name[0]}): - msg = message.ConferenceMessage() - assert msg.sender_name == name[1] - - def test_sender_email(self): - emails = [ - ('fred@queen.com', 'fred@queen.com'), - ('FRED@queen.com', 'fred@queen.com') - ] - for email in emails: - with self.make_context(data={'from': email[0]}): - msg = message.ConferenceMessage() - assert msg.sender_email == email[1] - - def test_route_invalid_pattern(self): - with self.make_context(data={'recipient': 'spam@osf.io'}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_route_invalid_test(self): - recipient = '{}conf-talk@osf.io'.format('' if settings.DEV_MODE else 'stage-') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_route_valid_alternate(self): - conf = ConferenceFactory(endpoint='chocolate', active=True) - conf.name = 'Chocolate Conference' - conf.field_names['submission2'] = 'data' - conf.save() - recipient = '{}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - assert msg.conference_name == 'chocolate' - assert msg.conference_category == 'data' - conf.__class__.delete(conf) - - def test_route_valid_b(self): - recipient = '{}conf-poster@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - assert msg.conference_name == 'conf' - assert msg.conference_category == 'poster' - - def test_alternate_route_invalid(self): - recipient = '{}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '') - with self.make_context(data={'recipient': recipient}): - self.app.application.preprocess_request() - msg = message.ConferenceMessage() - with pytest.raises(message.ConferenceError): - msg.route - - def test_attachments_count_zero(self): - with self.make_context(data={'attachment-count': '0'}): - msg = message.ConferenceMessage() - assert msg.attachments == [] - - def test_attachments_count_one(self): - content = b'slightly mad' - sio = BytesIO(content) - ctx = self.make_context( - method='POST', - data={ - 'attachment-count': 1, - 'attachment-1': (sio, 'attachment-1'), - }, - ) - with ctx: - msg = message.ConferenceMessage() - assert len(msg.attachments) == 1 - assert msg.attachments[0].read() == content - - -class TestConferenceEmailViews(OsfTestCase): - - def test_redirect_to_meetings_url(self): - url = '/presentations/' - res = self.app.get(url) - assert res.status_code == 302 - res = self.app.get(url, follow_redirects=True) - assert res.request.path == '/meetings/' - - def test_conference_submissions(self): - AbstractNode.objects.all().delete() - conference1 = ConferenceFactory() - conference2 = ConferenceFactory() - # Create conference nodes - create_fake_conference_nodes( - 3, - conference1, - ) - create_fake_conference_nodes( - 2, - conference2, - ) - - url = api_url_for('conference_submissions') - res = self.app.get(url) - assert res.json['success'] - - def test_conference_plain_returns_200(self): - conference = ConferenceFactory() - url = web_url_for('conference_results__plain', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - - def test_conference_data(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - # Regression for OSF-8864 to confirm bad project data does not make whole conference break - def test_conference_bad_data(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - n_conference_nodes_bad = 1 - create_fake_conference_nodes_bad_data( - conference, - n_conference_nodes, - n_conference_nodes_bad, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - n_conference_nodes_bad - - def test_conference_data_url_upper(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint.upper()) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - def test_conference_data_tag_upper(self): - conference = ConferenceFactory() - - # Create conference nodes - n_conference_nodes = 3 - create_fake_conference_nodes( - n_conference_nodes, - conference, - ) - # Create a non-conference node - ProjectFactory() - - url = api_url_for('conference_data', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - assert len(res.json) == n_conference_nodes - - def test_conference_results(self): - conference = ConferenceFactory() - - url = web_url_for('conference_results', meeting=conference.endpoint) - res = self.app.get(url) - assert res.status_code == 200 - - def test_confererence_results_endpoint_is_case_insensitive(self): - ConferenceFactory(endpoint='StudySwap') - url = web_url_for('conference_results', meeting='studyswap') - res = self.app.get(url) - assert res.status_code == 200 - - -class TestConferenceModel(OsfTestCase): - - def test_endpoint_is_required(self): - with pytest.raises(IntegrityError): - ConferenceFactory(endpoint=None, name=fake.company()).save() - - def test_name_is_required(self): - with pytest.raises(IntegrityError): - ConferenceFactory(endpoint='spsp2014', name=None).save() - - def test_default_field_names(self): - conf = ConferenceFactory(endpoint='cookie', name='Cookies Conference') - conf.save() - assert conf.field_names['submission1'] == 'poster' - assert conf.field_names['mail_subject'] == 'Presentation title' - - def test_conference_valid_submissions(self): - conf = ConferenceFactory(endpoint='Hamburgers', name='Hamburger conference') - conf.save() - - # 3 good nodes added - create_fake_conference_nodes(3, conf) - - # Deleted node added - deleted_node = ProjectFactory(is_public=True) - deleted_node.is_deleted = True - deleted_node.save() - conf.submissions.add(deleted_node) - - # Private node added - private_node = ProjectFactory(is_public=False) - conf.submissions.add(private_node) - - assert conf.submissions.count() == 5 - assert conf.valid_submissions.count() == 3 - - -class TestConferenceIntegration(ContextTestCase): - - @mock.patch('website.conferences.views.execute_email_send') - @mock.patch('website.conferences.utils.upload_attachments') - def test_integration(self, mock_upload, mock_send_mail): - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert mock_upload.called - users = OSFUser.objects.filter(username=username) - assert users.count() == 1 - nodes = AbstractNode.objects.filter(title=title) - assert nodes.count() == 1 - node = nodes[0] - assert WikiVersion.objects.get_for_node(node, 'home').content == body - assert mock_send_mail.called - call_args, call_kwargs = mock_send_mail.call_args - assert_absolute(call_kwargs['conf_view_url']) - assert_absolute(call_kwargs['set_password_url']) - assert_absolute(call_kwargs['profile_url']) - assert_absolute(call_kwargs['file_url']) - assert_absolute(call_kwargs['node_url']) - - @mock.patch('website.conferences.views.execute_email_send') - def test_integration_inactive(self, mock_send_mail): - conference = ConferenceFactory(active=False) - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - body = 'dragon on my back' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - res = self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - }, - ) - assert res.status_code == 406 - call_args, call_kwargs = mock_send_mail.call_args - assert call_args == (username, views.CONFERENCE_INACTIVE) - assert call_kwargs['fullname'] == fullname - assert_equal_urls( - call_kwargs['presentations_url'], - web_url_for('conference_view', _absolute=True), - ) - - @mock.patch('website.conferences.views.execute_email_send') - @mock.patch('website.conferences.utils.upload_attachments') - def test_integration_wo_full_name(self, mock_upload, mock_send_mail): - username = 'no_full_name@mail.com' - title = 'no full name only email' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': username, - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert mock_upload.called - users = OSFUser.objects.filter(username=username) - assert users.count() == 1 - nodes = AbstractNode.objects.filter(title=title) - assert nodes.count() == 1 - node = nodes[0] - assert WikiVersion.objects.get_for_node(node, 'home').content == body - assert mock_send_mail.called - call_args, call_kwargs = mock_send_mail.call_args - assert_absolute(call_kwargs['conf_view_url']) - assert_absolute(call_kwargs['set_password_url']) - assert_absolute(call_kwargs['profile_url']) - assert_absolute(call_kwargs['file_url']) - assert_absolute(call_kwargs['node_url']) - - @mock.patch('website.conferences.views.execute_email_send') - @mock.patch('website.conferences.utils.upload_attachments') - def test_create_conference_node_with_same_name_as_existing_node(self, mock_upload, mock_send_mail): - conference = ConferenceFactory() - user = UserFactory() - title = 'Long Live Greg' - ProjectFactory(creator=user, title=title) - - body = 'Greg is a good plant' - content = 'Long may they reign.' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{user.fullname} <{user.username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1':(BytesIO(content.encode()), 'attachment-1') - }, - ) - - assert AbstractNode.objects.filter(title=title, creator=user).count() == 2 - assert mock_upload.called - assert mock_send_mail.called - - - @mock.patch('website.conferences.views.execute_email_send') - def test_conferences_discontinued(self, mock_send_mail): - fullname = 'John Deacon' - username = 'deacon@queen.com' - title = 'good songs' - conference = ConferenceFactory() - body = 'dragon on my back' - content = 'dragon attack' - recipient = '{}{}-poster@osf.io'.format( - 'test-' if settings.DEV_MODE else '', - conference.endpoint, - ) - from waffle.testutils import override_flag - from osf import features - with override_flag(features.DISABLE_MEETINGS, active=True): - res = self.app.post( - api_url_for('meeting_hook'), - data={ - 'X-Mailgun-Sscore': 0, - 'timestamp': '123', - 'token': 'secret', - 'signature': hmac.new( - key=settings.MAILGUN_API_KEY.encode(), - msg='{}{}'.format('123', 'secret').encode(), - digestmod=hashlib.sha256, - ).hexdigest(), - 'attachment-count': '1', - 'X-Mailgun-Sscore': 0, - 'from': f'{fullname} <{username}>', - 'recipient': recipient, - 'subject': title, - 'stripped-text': body, - 'attachment-1': (BytesIO(content.encode()), 'attachment-1') - }, - ) - assert res.status_code == 501 - assert res.json['message_short'] == 'Service has been discontinued' - - assert mock_send_mail.called diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index bae2870876c..814ab0556f1 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -49,6 +49,7 @@ from website.project.views.node import _should_show_wiki_widget from website.util import web_url_for from website.util import rubeus +from conftest import start_mock_send_grid pytestmark = pytest.mark.django_db @@ -360,6 +361,8 @@ def test_explore(self): assert res.status_code == 200 +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestExternalAuthViews(OsfTestCase): def setUp(self): @@ -381,6 +384,8 @@ def setUp(self): self.user.save() self.auth = (self.user.username, password) + self.mock_send_grid = start_mock_send_grid(self) + def test_external_login_email_get_with_invalid_session(self): url = web_url_for('external_login_email_get') resp = self.app.get(url) @@ -400,8 +405,7 @@ def test_external_login_confirm_email_get_without_destination(self): res = self.app.get(url, auth=self.auth) assert res.status_code == 400, 'bad request' - @mock.patch('website.mails.execute_email_send') - def test_external_login_confirm_email_get_create(self, mock_welcome): + def test_external_login_confirm_email_get_create(self): # TODO: check in qa url encoding assert not self.user.is_registered url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') @@ -410,15 +414,14 @@ def test_external_login_confirm_email_get_create(self, mock_welcome): assert '/login?service=' in res.location assert quote_plus('new=true') in res.location - assert mock_welcome.call_count == 0 + assert self.mock_send_grid.call_count == 0 self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert self.user.is_registered assert self.user.has_usable_password() - @mock.patch('website.mails.execute_email_send') - def test_external_login_confirm_email_get_link(self, mock_link_confirm): + def test_external_login_confirm_email_get_link(self): self.user.external_identity['orcid'][self.provider_id] = 'LINK' self.user.save() assert not self.user.is_registered @@ -429,15 +432,14 @@ def test_external_login_confirm_email_get_link(self, mock_link_confirm): assert '/login?service=' in res.location assert 'new=true' not in parse.unquote(res.location) - assert mock_link_confirm.call_count == 1 + assert self.mock_send_grid.call_count == 1 self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert self.user.is_registered assert self.user.has_usable_password() - @mock.patch('website.mails.execute_email_send') - def test_external_login_confirm_email_get_duped_id(self, mock_confirm): + def test_external_login_confirm_email_get_duped_id(self): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'CREATE'}}) assert dupe_user.external_identity == self.user.external_identity url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') @@ -446,7 +448,7 @@ def test_external_login_confirm_email_get_duped_id(self, mock_confirm): assert 'You should be redirected automatically' in str(res.html) assert '/login?service=' in res.location - assert mock_confirm.call_count == 0 + assert self.mock_send_grid.call_count == 0 self.user.reload() dupe_user.reload() @@ -454,14 +456,13 @@ def test_external_login_confirm_email_get_duped_id(self, mock_confirm): assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert dupe_user.external_identity == {} - @mock.patch('website.mails.execute_email_send') - def test_external_login_confirm_email_get_duping_id(self, mock_confirm): + def test_external_login_confirm_email_get_duping_id(self): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'VERIFIED'}}) url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') res = self.app.get(url) assert res.status_code == 403, 'only allows one user to link an id' - assert mock_confirm.call_count == 0 + assert self.mock_send_grid.call_count == 0 self.user.reload() dupe_user.reload() diff --git a/tests/test_notifications.py b/tests/test_notifications.py index e97bad37732..49c6f1083d2 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -851,6 +851,8 @@ def test_localize_timestamp_handles_unicode(self): assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestSendDigest(OsfTestCase): def setUp(self): super().setUp() @@ -859,6 +861,9 @@ def setUp(self): self.project = factories.ProjectFactory() self.timestamp = timezone.now() + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + def test_group_notifications_by_user_transactional(self): send_type = 'email_transactional' d = factories.NotificationDigestFactory( @@ -945,8 +950,7 @@ def test_group_notifications_by_user_digest(self): digest_ids = [d2._id, d3._id] remove_notifications(email_notification_ids=digest_ids) - @mock.patch('website.mails.execute_email_send') - def test_send_users_email_called_with_correct_args(self, mock_send_mail): + def test_send_users_email_called_with_correct_args(self): send_type = 'email_transactional' d = factories.NotificationDigestFactory( send_type=send_type, @@ -958,23 +962,17 @@ def test_send_users_email_called_with_correct_args(self, mock_send_mail): d.save() user_groups = list(get_users_emails(send_type)) send_users_email(send_type) - assert mock_send_mail.called - assert mock_send_mail.call_count == len(user_groups) + mock_send_grid = self.mock_send_grid + assert mock_send_grid.called + assert mock_send_grid.call_count == len(user_groups) last_user_index = len(user_groups) - 1 user = OSFUser.load(user_groups[last_user_index]['user_id']) - - args, kwargs = mock_send_mail.call_args + args, kwargs = mock_send_grid.call_args assert kwargs['to_addr'] == user.username - assert kwargs['mail'] == mails.DIGEST - assert kwargs['name'] == user.fullname - assert kwargs['can_change_node_preferences'] == True - message = group_by_node(user_groups[last_user_index]['info']) - assert kwargs['message'] == message - - @mock.patch('website.mails.execute_email_send') - def test_send_users_email_ignores_disabled_users(self, mock_send_mail): + + def test_send_users_email_ignores_disabled_users(self): send_type = 'email_transactional' d = factories.NotificationDigestFactory( send_type=send_type, @@ -993,7 +991,7 @@ def test_send_users_email_ignores_disabled_users(self, mock_send_mail): user.save() send_users_email(send_type) - assert not mock_send_mail.called + assert not self.mock_send_grid.called def test_remove_sent_digest_notifications(self): d = factories.NotificationDigestFactory( @@ -1007,6 +1005,9 @@ def test_remove_sent_digest_notifications(self): with pytest.raises(NotificationDigest.DoesNotExist): NotificationDigest.objects.get(_id=digest_id) + +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestNotificationsReviews(OsfTestCase): def setUp(self): super().setUp() @@ -1015,12 +1016,14 @@ def setUp(self): self.user = factories.UserFactory() self.sender = factories.UserFactory() self.context_info = { - 'email_sender': self.sender, 'domain': 'osf.io', 'reviewable': self.preprint, 'workflow': 'pre-moderation', 'provider_contact_email': settings.OSF_CONTACT_EMAIL, 'provider_support_email': settings.OSF_SUPPORT_EMAIL, + 'document_type': 'preprint', + 'referrer': self.sender, + 'provider_url': self.provider.landing_url, } self.action = factories.ReviewActionFactory() factories.NotificationSubscriptionFactory( @@ -1041,15 +1044,17 @@ def setUp(self): event_name='global_reviews' ).add_user_to_subscription(self.user, 'email_transactional') + from conftest import start_mock_send_grid + self.mock_send_grid = start_mock_send_grid(self) + def test_reviews_base_notification(self): contributor_subscriptions = list(utils.get_all_user_subscriptions(self.user)) event_types = [sub.event_name for sub in contributor_subscriptions] assert 'global_reviews' in event_types - @mock.patch('website.mails.mails.execute_email_send') - def test_reviews_submit_notification(self, mock_send_email): + def test_reviews_submit_notification(self): listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user]) - assert mock_send_email.called + assert self.mock_send_grid.called @mock.patch('website.notifications.emails.notify_global_event') def test_reviews_notification(self, mock_notify): diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 15361b4a7bb..13d44d362b5 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -53,6 +53,7 @@ update_or_enqueue_on_preprint_updated, should_update_preprint_identifiers ) +from conftest import start_mock_send_grid SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @@ -1984,6 +1985,8 @@ def test_update_or_enqueue_on_preprint_doi_created(self): assert should_update_preprint_identifiers(self.private_preprint, {}) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestPreprintConfirmationEmails(OsfTestCase): def setUp(self): super().setUp() @@ -1993,31 +1996,16 @@ def setUp(self): self.preprint = PreprintFactory(creator=self.user, project=self.project, provider=PreprintProviderFactory(_id='osf'), is_published=False) self.preprint.add_contributor(self.write_contrib, permissions=WRITE) self.preprint_branded = PreprintFactory(creator=self.user, is_published=False) + self.mock_send_grid = start_mock_send_grid(self) - @mock.patch('website.mails.execute_email_send') - def test_creator_gets_email(self, send_mail): + def test_creator_gets_email(self): self.preprint.set_published(True, auth=Auth(self.user), save=True) domain = self.preprint.provider.domain or settings.DOMAIN - send_mail.assert_called_with( - self.user.email, - mails.REVIEWS_SUBMISSION_CONFIRMATION, - user=self.user, - provider_url=f'{domain}preprints/{self.preprint.provider._id}', - domain=domain, - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - workflow=None, - reviewable=self.preprint, - is_creator=True, - provider_name=self.preprint.provider.name, - no_future_emails=[], - logo=settings.OSF_PREPRINTS_LOGO, - document_type=self.preprint.provider.preprint_word, - ) - assert send_mail.call_count == 1 + self.mock_send_grid.assert_called() + assert self.mock_send_grid.call_count == 1 self.preprint_branded.set_published(True, auth=Auth(self.user), save=True) - assert send_mail.call_count == 2 + assert self.mock_send_grid.call_count == 2 class TestPreprintOsfStorage(OsfTestCase): diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py index 1372815927b..4c6bee95449 100644 --- a/tests/test_registrations/test_embargoes.py +++ b/tests/test_registrations/test_embargoes.py @@ -29,6 +29,7 @@ from osf.models.sanctions import SanctionCallbackMixin, Embargo from osf.utils import permissions from osf.models import Registration, Contributor, OSFUser, SpamStatus +from conftest import start_mock_send_grid DUMMY_TOKEN = tokens.encode({ 'dummy': 'token' @@ -1053,6 +1054,8 @@ def test_GET_from_authorized_user_with_registration_rej_token_deleted_node(self) @pytest.mark.enable_bookmark_creation +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationEmbargoViewsTestCase(OsfTestCase): def setUp(self): super().setUp() @@ -1092,6 +1095,8 @@ def setUp(self): } }) + self.mock_send_grid = start_mock_send_grid(self) + @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_embargoed_registration_set_privacy_requests_embargo_termination(self, mock_ask): @@ -1125,8 +1130,7 @@ def test_cannot_request_termination_on_component_of_embargo(self): with pytest.raises(NodeStateError): reg._nodes.first().request_embargo_termination(node.creator) - @mock.patch('website.mails.execute_email_send') - def test_embargoed_registration_set_privacy_sends_mail(self, mock_send_mail): + def test_embargoed_registration_set_privacy_sends_mail(self): """ Integration test for https://github.com/CenterForOpenScience/osf.io/pull/5294#issuecomment-212613668 """ @@ -1150,7 +1154,7 @@ def test_embargoed_registration_set_privacy_sends_mail(self, mock_send_mail): if Contributor.objects.get(user_id=contributor.id, node_id=self.registration.id).permission == permissions.ADMIN: admin_contributors.append(contributor) for admin in admin_contributors: - assert any([each[0][0] == admin.username for each in mock_send_mail.call_args_list]) + assert any([each[1]['to_addr'] == admin.username for each in self.mock_send_grid.call_args_list]) @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_make_child_embargoed_registration_public_asks_all_admins_in_tree(self, mock_ask): diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 19129413137..7a256cf1e3e 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -24,6 +24,7 @@ ) from osf.models import Contributor, Retraction from osf.utils import permissions +from conftest import start_mock_send_grid @pytest.mark.enable_bookmark_creation @@ -748,6 +749,8 @@ def test_POST_retraction_to_subproject_component_returns_HTTPError_BAD_REQUEST(s assert res.status_code == http_status.HTTP_400_BAD_REQUEST @pytest.mark.enable_bookmark_creation +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationRetractionViewsTestCase(OsfTestCase): def setUp(self): super().setUp() @@ -760,6 +763,8 @@ def setUp(self): self.retraction_get_url = self.registration.web_url_for('node_registration_retraction_get') self.justification = fake.sentence() + self.mock_send_grid = start_mock_send_grid(self) + def test_GET_retraction_page_when_pending_retraction_returns_HTTPError_BAD_REQUEST(self): self.registration.retract_registration(self.user) self.registration.save() @@ -783,8 +788,7 @@ def test_POST_retraction_to_private_registration_returns_HTTPError_FORBIDDEN(sel self.registration.reload() assert self.registration.retraction is None - @mock.patch('website.mails.execute_email_send') - def test_POST_retraction_does_not_send_email_to_unregistered_admins(self, mock_send_mail): + def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): unreg = UnregUserFactory() self.registration.add_unregistered_contributor( unreg.fullname, @@ -800,7 +804,7 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self, mock_s auth=self.user.auth, ) # Only the creator gets an email; the unreg user does not get emailed - assert mock_send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -848,8 +852,7 @@ def test_POST_retraction_by_non_admin_retract_HTTPError_UNAUTHORIZED(self): self.registration.reload() assert self.registration.retraction is None - @mock.patch('website.mails.send_mail') - def test_POST_retraction_without_justification_returns_HTTPOK(self, mock_send): + def test_POST_retraction_without_justification_returns_HTTPOK(self): res = self.app.post( self.retraction_post_url, json={'justification': ''}, @@ -861,8 +864,7 @@ def test_POST_retraction_without_justification_returns_HTTPOK(self, mock_send): assert self.registration.is_pending_retraction assert self.registration.retraction.justification is None - @mock.patch('website.mails.execute_email_send') - def test_valid_POST_retraction_adds_to_parent_projects_log(self, mock_send): + def test_valid_POST_retraction_adds_to_parent_projects_log(self): initial_project_logs = self.registration.registered_from.logs.count() self.app.post( self.retraction_post_url, @@ -873,8 +875,7 @@ def test_valid_POST_retraction_adds_to_parent_projects_log(self, mock_send): # Logs: Created, registered, retraction initiated assert self.registration.registered_from.logs.count() == initial_project_logs + 1 - @mock.patch('website.mails.execute_email_send') - def test_valid_POST_retraction_when_pending_retraction_raises_400(self, mock_send): + def test_valid_POST_retraction_when_pending_retraction_raises_400(self): self.app.post( self.retraction_post_url, json={'justification': ''}, @@ -887,16 +888,13 @@ def test_valid_POST_retraction_when_pending_retraction_raises_400(self, mock_sen ) assert res.status_code == 400 - @mock.patch('website.mails.execute_email_send') - def test_valid_POST_calls_send_mail_with_username(self, mock_send): + def test_valid_POST_calls_send_mail_with_username(self): self.app.post( self.retraction_post_url, json={'justification': ''}, auth=self.user.auth, ) - assert mock_send.called - args, kwargs = mock_send.call_args - assert self.user.username in args + assert self.mock_send_grid.called def test_non_contributor_GET_approval_returns_HTTPError_UNAUTHORIZED(self): non_contributor = AuthUserFactory() diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index 28fd3ca1499..0713d0b4c54 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -15,8 +15,8 @@ @pytest.mark.django_db -@mock.patch('framework.auth.views.mails.execute_email_send') -def test_throttled_autoban(mock_mail): +@pytest.mark.usefixtures('mock_send_grid') +def test_throttled_autoban(mock_send_grid): settings.SPAM_THROTTLE_AUTOBAN = True user = AuthUserFactory() projects = [] @@ -25,11 +25,7 @@ def test_throttled_autoban(mock_mail): proj.flag_spam() proj.save() projects.append(proj) - mock_mail.assert_called_with(osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - to_addr=user.username, - user=user, - mail=mails.SPAM_USER_BANNED) + mock_send_grid.assert_called() user.reload() assert user.is_disabled for project in projects: diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 523626fde01..8403a9d63c9 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -97,6 +97,7 @@ from website.util import api_url_for, web_url_for from website.util import rubeus from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag +from conftest import start_mock_send_grid @pytest.mark.enable_enqueue_task @@ -412,8 +413,7 @@ def test_cannot_update_user_without_user_id(self): assert res.status_code == 400 assert res.json['message_long'] == '"id" is required' - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_add_emails_return_emails(self, send_mail): + def test_add_emails_return_emails(self): user1 = AuthUserFactory() url = api_url_for('update_user') email = 'test@cos.io' @@ -426,8 +426,7 @@ def test_add_emails_return_emails(self, send_mail): assert 'emails' in res.json['profile'] assert len(res.json['profile']['emails']) == 2 - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_resend_confirmation_return_emails(self, send_mail): + def test_resend_confirmation_return_emails(self): user1 = AuthUserFactory() url = api_url_for('resend_confirmation') email = 'test@cos.io' @@ -439,9 +438,8 @@ def test_resend_confirmation_return_emails(self, send_mail): assert 'emails' in res.json['profile'] assert len(res.json['profile']['emails']) == 2 - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('website.mailchimp_utils.get_mailchimp_api') - def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail): + def test_update_user_mailing_lists(self, mock_get_mailchimp_api): email = fake_email() email_hash = md5(email.lower().encode()).hexdigest() self.user.emails.create(address=email) @@ -484,9 +482,8 @@ def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail): ) handlers.celery_teardown_request() - @mock.patch('framework.auth.views.mails.execute_email_send') @mock.patch('website.mailchimp_utils.get_mailchimp_api') - def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_mailchimp_api, send_mail): + def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_mailchimp_api): email = fake_email() self.user.emails.create(address=email) list_name = MAILCHIMP_GENERAL_LIST @@ -588,6 +585,8 @@ def setUp(self): self.user.auth = (self.user.username, 'password') self.user.save() + self.mock_send_grid = start_mock_send_grid(self) + def test_password_change_valid(self, old_password='password', new_password='Pa$$w0rd', @@ -792,14 +791,15 @@ def test_password_change_invalid_empty_string_confirm_password(self): def test_password_change_invalid_blank_confirm_password(self): self.test_password_change_invalid_blank_password('password', 'new password', ' ') - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_user_cannot_request_account_export_before_throttle_expires(self, send_mail): + @mock.patch('website.mails.settings.USE_EMAIL', True) + @mock.patch('website.mails.settings.USE_CELERY', False) + def test_user_cannot_request_account_export_before_throttle_expires(self): url = api_url_for('request_export') self.app.post(url, auth=self.user.auth) - assert send_mail.called + assert self.mock_send_grid.called res = self.app.post(url, auth=self.user.auth) assert res.status_code == 400 - assert send_mail.call_count == 1 + assert self.mock_send_grid.call_count == 1 def test_get_unconfirmed_emails_exclude_external_identity(self): external_identity = { diff --git a/tests/test_webtests.py b/tests/test_webtests.py index f9a74dd03e1..ae1a30e7618 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -36,6 +36,7 @@ from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory from website import language from website.util import web_url_for, api_url_for +from conftest import start_mock_send_grid logging.getLogger('website.project.model').setLevel(logging.ERROR) @@ -714,6 +715,8 @@ def test_claim_user_registered_preprint_with_correct_password(self): assert preprint not in unreg_user.unclaimed_records +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestResendConfirmation(OsfTestCase): def setUp(self): @@ -723,6 +726,8 @@ def setUp(self): self.get_url = web_url_for('resend_confirmation_get') self.post_url = web_url_for('resend_confirmation_post') + self.mock_send_grid = start_mock_send_grid(self) + # test that resend confirmation page is load correctly def test_resend_confirmation_get(self): res = self.app.get(self.get_url) @@ -731,8 +736,7 @@ def test_resend_confirmation_get(self): assert res.get_form('resendForm') # test that unconfirmed user can receive resend confirmation email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_can_receive_resend_confirmation_email(self, mock_send_mail): + def test_can_receive_resend_confirmation_email(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -740,14 +744,13 @@ def test_can_receive_resend_confirmation_email(self, mock_send_mail): res = form.submit(self.app) # check email, request and response - assert mock_send_mail.called + assert self.mock_send_grid.called assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('If there is an OSF account', res.text) # test that confirmed user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail): + def test_cannot_receive_resend_confirmation_email_1(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -755,14 +758,13 @@ def test_cannot_receive_resend_confirmation_email_1(self, mock_send_mail): res = form.submit(self.app) # check email, request and response - assert not mock_send_mail.called + assert not self.mock_send_grid.called assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('has already been confirmed', res.text) # test that non-existing user cannot receive resend confirmation email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail): + def test_cannot_receive_resend_confirmation_email_2(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -770,14 +772,13 @@ def test_cannot_receive_resend_confirmation_email_2(self, mock_send_mail): res = form.submit(self.app) # check email, request and response - assert not mock_send_mail.called + assert not self.mock_send_grid.called assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('If there is an OSF account', res.text) # test that user cannot submit resend confirmation request too quickly - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_cannot_resend_confirmation_twice_quickly(self, mock_send_mail): + def test_cannot_resend_confirmation_twice_quickly(self): # load resend confirmation page and submit email res = self.app.get(self.get_url) form = res.get_form('resendForm') @@ -790,6 +791,8 @@ def test_cannot_resend_confirmation_twice_quickly(self, mock_send_mail): assert_in_html('Please wait', res.text) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestForgotPassword(OsfTestCase): def setUp(self): @@ -801,6 +804,8 @@ def setUp(self): self.user.verification_key_v2 = {} self.user.save() + self.mock_send_grid = start_mock_send_grid(self) + # log users out before they land on forgot password page def test_forgot_password_logs_out_user(self): # visit forgot password link while another user is logged in @@ -820,8 +825,7 @@ def test_get_forgot_password(self): assert res.get_form('forgotPasswordForm') # test that existing user can receive reset password email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_can_receive_reset_password_email(self, mock_send_mail): + def test_can_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -829,7 +833,7 @@ def test_can_receive_reset_password_email(self, mock_send_mail): res = form.submit(self.app) # check mail was sent - assert mock_send_mail.called + assert self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -843,8 +847,7 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_cannot_receive_reset_password_email(self, mock_send_mail): + def test_cannot_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -852,7 +855,7 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): res = form.submit(self.app) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -866,8 +869,7 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_not_active_user_no_reset_password_email(self, mock_send_mail): + def test_not_active_user_no_reset_password_email(self): self.user.deactivate_account() self.user.save() @@ -878,7 +880,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): res = form.submit(self.app) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -892,8 +894,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_cannot_reset_password_twice_quickly(self, mock_send_mail): + def test_cannot_reset_password_twice_quickly(self): # load forgot password page and submit email res = self.app.get(self.get_url) form = res.get_form('forgotPasswordForm') @@ -908,6 +909,8 @@ def test_cannot_reset_password_twice_quickly(self, mock_send_mail): assert_not_in_html('If there is an OSF account', res.text) +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class TestForgotPasswordInstitution(OsfTestCase): def setUp(self): @@ -919,6 +922,8 @@ def setUp(self): self.user.verification_key_v2 = {} self.user.save() + self.mock_send_grid = start_mock_send_grid(self) + # log users out before they land on institutional forgot password page def test_forgot_password_logs_out_user(self): # TODO: check in qa url encoding @@ -939,13 +944,12 @@ def test_get_forgot_password(self): assert 'campaign=unsupportedinstitution' in location # test that user from disabled institution can receive reset password email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_can_receive_reset_password_email(self, mock_send_mail): + def test_can_receive_reset_password_email(self): # submit email to institutional forgot-password page res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) # check mail was sent - assert mock_send_mail.called + assert self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword @@ -959,13 +963,12 @@ def test_can_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 != {} # test that non-existing user cannot receive reset password email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_cannot_receive_reset_password_email(self, mock_send_mail): + def test_cannot_receive_reset_password_email(self): # load forgot password page and submit email res = self.app.post(self.post_url, data={'forgot_password-email': 'fake' + self.user.username}) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword-institution @@ -979,15 +982,14 @@ def test_cannot_receive_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that non-existing user cannot receive institutional reset password email - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_not_active_user_no_reset_password_email(self, mock_send_mail): + def test_not_active_user_no_reset_password_email(self): self.user.deactivate_account() self.user.save() res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) # check mail was not sent - assert not mock_send_mail.called + assert not self.mock_send_grid.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword-institution @@ -1001,8 +1003,7 @@ def test_not_active_user_no_reset_password_email(self, mock_send_mail): assert self.user.verification_key_v2 == {} # test that user cannot submit forgot password request too quickly - @mock.patch('framework.auth.views.mails.execute_email_send') - def test_cannot_reset_password_twice_quickly(self, mock_send_mail): + def test_cannot_reset_password_twice_quickly(self): # submit institutional forgot-password request in rapid succession res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) diff --git a/website/mails/mails.py b/website/mails/mails.py index 39cd8feff0b..ab632e780ec 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -75,20 +75,6 @@ def render_message(tpl_name, **context): return tpl.render(**context) -def execute_email_send(celery, mailer, kwargs, callback=None): - if settings.USE_EMAIL: - if settings.USE_CELERY and celery: - logger.debug('Sending via celery...') - return mailer.apply_async(kwargs=kwargs, link=callback) - else: - logger.debug('Sending without celery') - ret = mailer(**kwargs) - if callback: - callback() - - return ret - - def send_mail( to_addr, mail, @@ -150,13 +136,17 @@ def send_mail( ) logger.debug('Preparing to send...') - ret = execute_email_send( - celery=celery, - mailer=mailer, - kwargs=kwargs, - callback=callback, - ) - return ret + if settings.USE_EMAIL: + if settings.USE_CELERY and celery: + logger.debug('Sending via celery...') + return mailer.apply_async(kwargs=kwargs, link=callback) + else: + logger.debug('Sending without celery') + ret = mailer(**kwargs) + if callback: + callback() + + return ret def get_english_article(word): From 73cc32f725a530e286a43d797f3dd78bd556a144 Mon Sep 17 00:00:00 2001 From: Bohdan Odintsov Date: Mon, 2 Jun 2025 17:26:35 +0300 Subject: [PATCH 015/176] remove quickfiles --- README-docker-compose.md | 6 +- addons/base/views.py | 16 +- addons/osfstorage/tests/test_views.py | 2 +- addons/osfstorage/views.py | 3 - admin/base/settings/defaults.py | 3 +- api/caching/tasks.py | 7 +- api/files/serializers.py | 12 -- api/files/views.py | 21 +-- api/users/urls.py | 1 - api/users/views.py | 18 +- api_tests/files/views/test_file_detail.py | 6 +- api_tests/wb/views/test_wb_hooks.py | 22 --- osf/management/commands/data_storage_usage.py | 27 --- .../delete_legacy_quickfiles_nodes.py | 58 ------- .../commands/export_user_account.py | 19 +- .../fix_quickfiles_waterbutler_logs.py | 140 --------------- .../commands/metrics_backfill_summaries.py | 22 ++- osf/management/commands/reindex_quickfiles.py | 43 ----- .../transfer_quickfiles_to_projects.py | 164 ------------------ .../commands/update_storage_usage.py | 2 +- .../reporters/osfstorage_file_count.py | 6 + osf/migrations/0001_initial.py | 12 +- .../0003_aggregated_runsql_calls.py | 1 - osf/migrations/0016_auto_20230828_1810.py | 2 +- ...bjects_alter_abstractnode_tags_and_more.py | 2 +- osf/models/__init__.py | 1 - osf/models/mixins.py | 2 +- osf/models/node.py | 11 +- osf/models/nodelog.py | 2 - osf/models/private_link.py | 12 -- osf/models/quickfiles.py | 93 ---------- osf/models/user.py | 19 +- .../test_fix_quickfiles_waterbutler_logs.py | 90 ---------- .../test_transfer_quickfiles_to_projects.py | 43 ----- scripts/fix_merged_user_quickfiles.py | 34 ---- scripts/generate_sitemap.py | 2 +- tests/test_addons.py | 6 +- website/ember_osf_web/views.py | 2 - website/notifications/utils.py | 1 - website/project/decorators.py | 4 +- website/project/tasks.py | 2 +- website/project/views/node.py | 2 +- website/routes.py | 8 - website/search/elastic_search.py | 20 +-- website/search_migration/__init__.py | 4 +- website/settings/defaults.py | 8 - .../static/js/anonymousLogActionsList.json | 1 - website/static/js/components/quickFiles.js | 150 ---------------- website/static/js/logActionsList.json | 1 - website/static/js/pages/profile-page.js | 1 - website/templates/include/profile/names.mako | 4 +- website/templates/public/register.mako | 2 +- 52 files changed, 61 insertions(+), 1079 deletions(-) delete mode 100644 osf/management/commands/delete_legacy_quickfiles_nodes.py delete mode 100644 osf/management/commands/fix_quickfiles_waterbutler_logs.py delete mode 100644 osf/management/commands/reindex_quickfiles.py delete mode 100644 osf/management/commands/transfer_quickfiles_to_projects.py delete mode 100644 osf/models/quickfiles.py delete mode 100644 osf_tests/management_commands/test_fix_quickfiles_waterbutler_logs.py delete mode 100644 osf_tests/management_commands/test_transfer_quickfiles_to_projects.py delete mode 100644 scripts/fix_merged_user_quickfiles.py delete mode 100644 website/static/js/components/quickFiles.js diff --git a/README-docker-compose.md b/README-docker-compose.md index c3ff7aed542..e3987b6b7bd 100644 --- a/README-docker-compose.md +++ b/README-docker-compose.md @@ -271,10 +271,8 @@ docker compose run --rm web python3 -m scripts.parse_citation_styles ``` - Start ember_osf_web - - Needed for quickfiles feature: - ```bash - docker compose up -d ember_osf_web - ``` + - Needed for ember app: + - `docker-compose up -d ember_osf_web` - OPTIONAL: Register OAuth Scopes - Needed for things such as the ember-osf dummy app ```bash diff --git a/addons/base/views.py b/addons/base/views.py index a6c90860b98..b302115bb7f 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -21,7 +21,6 @@ from addons.base import exceptions as addon_errors from addons.base.models import BaseStorageAddon -from addons.osfstorage.models import OsfStorageFile from addons.osfstorage.models import OsfStorageFileNode from addons.osfstorage.utils import enqueue_update_analytics @@ -34,7 +33,6 @@ from framework.exceptions import HTTPError from framework.flask import redirect from framework.sentry import log_exception -from framework.routing import proxy_url from framework.transactions.handlers import no_auto_transaction from website import mails from website import settings @@ -483,7 +481,7 @@ def _construct_payload(auth, resource, credentials, waterbutler_settings): @must_be_signed @no_auto_transaction -@must_be_valid_project(quickfiles_valid=True, preprints_valid=True) +@must_be_valid_project(preprints_valid=True) def create_waterbutler_log(payload, **kwargs): with transaction.atomic(): try: @@ -603,7 +601,7 @@ def create_waterbutler_log(payload, **kwargs): metadata = payload.get('metadata') or payload.get('destination') target_node = AbstractNode.load(metadata.get('nid')) - if target_node and not target_node.is_quickfiles and payload['action'] != 'download_file': + if target_node and payload['action'] != 'download_file': update_storage_usage_with_size(payload) with transaction.atomic(): @@ -1032,16 +1030,6 @@ def persistent_file_download(auth, **kwargs): ) -def addon_view_or_download_quickfile(**kwargs): - fid = kwargs.get('fid', 'NOT_AN_FID') - file_ = OsfStorageFile.load(fid) - if not file_: - raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={ - 'message_short': 'File Not Found', - 'message_long': 'The requested file could not be found.' - }) - return proxy_url(f'/project/{file_.target._id}/files/osfstorage/{fid}/') - def addon_view_file(auth, node, file_node, version): # TODO: resolve circular import issue from addons.wiki import settings as wiki_settings diff --git a/addons/osfstorage/tests/test_views.py b/addons/osfstorage/tests/test_views.py index 19940043548..d6c1fffff33 100644 --- a/addons/osfstorage/tests/test_views.py +++ b/addons/osfstorage/tests/test_views.py @@ -24,7 +24,7 @@ from framework.auth import cas from osf import features -from osf.models import Tag, QuickFilesNode +from osf.models import Tag from osf.models import files as models from addons.osfstorage.apps import osf_storage_root from addons.osfstorage import utils diff --git a/addons/osfstorage/views.py b/addons/osfstorage/views.py index a448f3c6edd..e387f34a768 100644 --- a/addons/osfstorage/views.py +++ b/addons/osfstorage/views.py @@ -314,9 +314,6 @@ def osfstorage_create_child(file_node, payload, **kwargs): if not (name or user) or '/' in name: raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - if getattr(file_node.target, 'is_quickfiles', False) and is_folder: - raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={'message_long': 'You may not create a folder for QuickFiles'}) - try: # Create a save point so that we can rollback and unlock # the parent record diff --git a/admin/base/settings/defaults.py b/admin/base/settings/defaults.py index 579b920949e..0ee9d0dbc36 100644 --- a/admin/base/settings/defaults.py +++ b/admin/base/settings/defaults.py @@ -40,7 +40,8 @@ CSRF_COOKIE_HTTPONLY = False ALLOWED_HOSTS = [ - '.osf.io' + '.osf.io', + '*' ] AUTH_PASSWORD_VALIDATORS = [ diff --git a/api/caching/tasks.py b/api/caching/tasks.py index e6d9492714b..a2a7753f51c 100644 --- a/api/caching/tasks.py +++ b/api/caching/tasks.py @@ -181,7 +181,7 @@ def update_storage_usage(target): # for fetching files we use AbstractNode instances, this is why we use branched_from property if isinstance(target, DraftRegistration): enqueue_postcommit_task(update_storage_usage_cache, (target.branched_from.id, target.branched_from._id), {}, celery=True) - elif not isinstance(target, Preprint) and not target.is_quickfiles: + elif not isinstance(target, Preprint): enqueue_postcommit_task(update_storage_usage_cache, (target.id, target._id), {}, celery=True) def update_storage_usage_with_size(payload): @@ -194,9 +194,6 @@ def update_storage_usage_with_size(payload): return target_node = AbstractNode.load(metadata['nid']) - if target_node.is_quickfiles: - return - action = payload['action'] provider = metadata.get('provider', 'osfstorage') @@ -225,7 +222,7 @@ def update_storage_usage_with_size(payload): source_provider = payload['source']['provider'] if target_node == source_node and source_provider == provider: return # Its not going anywhere. - if source_provider == 'osfstorage' and not source_node.is_quickfiles: + if source_provider == 'osfstorage': if source_node.storage_limit_status is settings.StorageLimits.NOT_CALCULATED: return update_storage_usage(source_node) diff --git a/api/files/serializers.py b/api/files/serializers.py index e68845c4cd1..1fa8f2e2264 100644 --- a/api/files/serializers.py +++ b/api/files/serializers.py @@ -448,18 +448,6 @@ def to_representation(self, value): return data -class QuickFilesSerializer(BaseFileSerializer): - user = RelationshipField( - related_view='users:user-detail', - related_view_kwargs={'user_id': ''}, - help_text='The user who uploaded this file', - ) - - -class QuickFilesDetailSerializer(QuickFilesSerializer): - id = IDField(source='_id', required=True) - - class FileVersionSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'id', diff --git a/api/files/views.py b/api/files/views.py index 5a498fa7089..bd2eb9979cd 100644 --- a/api/files/views.py +++ b/api/files/views.py @@ -24,11 +24,12 @@ from api.cedar_metadata_records.utils import can_view_record from api.nodes.permissions import ContributorOrPublic from api.files import annotations -from api.files.permissions import IsPreprintFile -from api.files.permissions import CheckedOutOrAdmin -from api.files.serializers import FileSerializer -from api.files.serializers import FileDetailSerializer -from api.files.serializers import FileVersionSerializer +from api.files.permissions import IsPreprintFile, CheckedOutOrAdmin +from api.files.serializers import ( + FileSerializer, + FileDetailSerializer, + FileVersionSerializer, +) from osf.utils.permissions import ADMIN @@ -53,10 +54,6 @@ def get_file(self, check_permissions=True): if getattr(obj.target, 'deleted', None): raise Gone(detail='The requested file is no longer available') - if getattr(obj.target, 'is_quickfiles', False) and getattr(obj.target, 'creator'): - if obj.target.creator.is_disabled: - raise Gone(detail='This user has been deactivated and their quickfiles are no longer available.') - if getattr(obj.target, 'is_retracted', False): raise Gone(detail='The requested file is no longer available.') @@ -85,9 +82,6 @@ class FileDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView, FileMixin): view_category = 'files' view_name = 'file-detail' - def get_serializer_class(self): - return FileDetailSerializer - def get_target(self): return self.get_file().target @@ -97,8 +91,7 @@ def get_object(self): file = self.get_file() if self.request.GET.get('create_guid', False): - # allows quickfiles to be given guids when another user wants a permanent link to it - if (self.get_target().has_permission(user, ADMIN) and utils.has_admin_scope(self.request)) or getattr(file.target, 'is_quickfiles', False): + if (self.get_target().has_permission(user, ADMIN) and utils.has_admin_scope(self.request)): file.get_guid(create=True) # We normally would pass this through `get_file` as an annotation, but the `select_for_update` feature prevents diff --git a/api/users/urls.py b/api/users/urls.py index f66939e3690..94fea944500 100644 --- a/api/users/urls.py +++ b/api/users/urls.py @@ -22,7 +22,6 @@ re_path(r'^(?P\w+)/registrations/$', views.UserRegistrations.as_view(), name=views.UserRegistrations.view_name), re_path(r'^(?P\w+)/settings/$', views.UserSettings.as_view(), name=views.UserSettings.view_name), re_path(r'^(?P\w+)/messages/$', views.UserMessageView.as_view(), name=views.UserMessageView.view_name), - re_path(r'^(?P\w+)/quickfiles/$', views.UserQuickFiles.as_view(), name=views.UserQuickFiles.view_name), re_path(r'^(?P\w+)/relationships/institutions/$', views.UserInstitutionsRelationship.as_view(), name=views.UserInstitutionsRelationship.view_name), re_path(r'^(?P\w+)/settings/emails/$', views.UserEmailsList.as_view(), name=views.UserEmailsList.view_name), re_path(r'^(?P\w+)/settings/emails/(?P\w+)/$', views.UserEmailsDetail.as_view(), name=views.UserEmailsDetail.view_name), diff --git a/api/users/views.py b/api/users/views.py index 3061e67321c..d186e5c5ea4 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -11,7 +11,7 @@ from api.addons.views import AddonSettingsMixin from api.base import permissions as base_permissions from api.users.permissions import UserMessagePermissions -from api.base.exceptions import Conflict, UserGone, Gone +from api.base.exceptions import Conflict, UserGone from api.base.filters import ListFilterMixin, PreprintFilterMixin from api.base.parsers import ( JSONAPIRelationshipParser, @@ -352,22 +352,6 @@ def get_queryset(self): ) -class UserQuickFiles(JSONAPIBaseView, generics.ListAPIView): - view_category = 'users' - view_name = 'user-quickfiles' - - permission_classes = ( - drf_permissions.IsAuthenticatedOrReadOnly, - base_permissions.TokenHasScope, - ) - - required_read_scopes = [CoreScopes.NULL] - required_write_scopes = [CoreScopes.NULL] - - def get(self, *args, **kwargs): - raise Gone() - - class UserPreprints(JSONAPIBaseView, generics.ListAPIView, UserMixin, PreprintFilterMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_preprints_list). """ diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index 01f7da21a26..58d58d0cf95 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -16,7 +16,7 @@ from api.base.settings.defaults import API_BASE from api_tests import utils as api_utils from framework.auth.core import Auth -from osf.models import NodeLog, QuickFilesNode, Node, FileVersionUserMetadata +from osf.models import NodeLog, Node, FileVersionUserMetadata from osf.utils.permissions import WRITE, READ from osf.utils.workflows import DefaultStates from osf_tests.factories import ( @@ -56,10 +56,6 @@ class TestFileView: def node(self, user): return ProjectFactory(creator=user, comment_level='public') - @pytest.fixture() - def quickfiles_node(self, user): - return QuickFilesNode.objects.get(creator=user) - @pytest.fixture() def file(self, user, node): return api_utils.create_test_file(node, user, create_guid=False) diff --git a/api_tests/wb/views/test_wb_hooks.py b/api_tests/wb/views/test_wb_hooks.py index 20c09b14e69..c36bdef4722 100644 --- a/api_tests/wb/views/test_wb_hooks.py +++ b/api_tests/wb/views/test_wb_hooks.py @@ -9,26 +9,12 @@ PreprintFactory ) from api_tests.utils import create_test_file, create_test_preprint_file -from osf.models import QuickFilesNode @pytest.fixture() def user(): return AuthUserFactory() -@pytest.fixture() -def quickfiles_node(user): - return QuickFilesNode.objects.get_for_user(user) - -@pytest.fixture() -def quickfiles_file(user, quickfiles_node): - file = create_test_file(quickfiles_node, user, filename='road_dogg.mp3') - return file - -@pytest.fixture() -def quickfiles_folder(quickfiles_node): - return OsfStorageFolder.objects.get_root(target=quickfiles_node) - @pytest.fixture() def node(user): return ProjectFactory(creator=user) @@ -72,10 +58,6 @@ class TestMove(): def move_url(self, node): return f'/_/wb/hooks/{node._id}/move/' - @pytest.fixture() - def quickfiles_move_url(self, quickfiles_node): - return f'/_/wb/hooks/{quickfiles_node._id}/move/' - @pytest.fixture() def payload(self, file, folder, root_node, user): return { @@ -568,10 +550,6 @@ class TestCopy(): def copy_url(self, node): return f'/_/wb/hooks/{node._id}/copy/' - @pytest.fixture() - def quickfiles_copy_url(self, quickfiles_node): - return f'/_/wb/hooks/{quickfiles_node._id}/copy/' - @pytest.fixture() def payload(self, file, folder, root_node, user): return { diff --git a/osf/management/commands/data_storage_usage.py b/osf/management/commands/data_storage_usage.py index 00af3c7246f..2a5ac1bf19b 100644 --- a/osf/management/commands/data_storage_usage.py +++ b/osf/management/commands/data_storage_usage.py @@ -139,23 +139,6 @@ GROUP BY node.type, node.is_public """ -# Aggregation of non-deleted quick file sizes (NOTE: This will break when QuickFolders is merged) -ND_QUICK_FILE_SIZE_SUM_SQL = """ - SELECT - node.type, sum(size) - FROM osf_basefileversionsthrough AS obfnv - LEFT JOIN osf_basefilenode file ON obfnv.basefilenode_id = file.id - LEFT JOIN osf_fileversion version ON obfnv.fileversion_id = version.id - LEFT JOIN osf_abstractnode node ON file.target_object_id = node.id - WHERE file.provider = 'osfstorage' AND file.target_content_type_id = %s - AND node.type = 'osf.quickfilesnode' - AND node.is_deleted = False - AND file.deleted_on IS NULL - AND obfnv.id >= %s AND obfnv.id <= %s - GROUP BY node.type - - """ - # Aggregation of size of non-deleted files in preprint supplemental nodes based on the node query above ND_PREPRINT_SUPPLEMENT_SIZE_SUM_SQL = """ SELECT @@ -320,16 +303,6 @@ def gather_usage_data(start, end, dry_run, zip_file): cursor=cursor, )) - # TODO: Move the next when Quick Folders is done - logger.debug(f'Gathering quickfile summary at {datetime.datetime.now()}') - summary_data = combine_summary_data(summary_data, summarize( - sql=ND_QUICK_FILE_SIZE_SUM_SQL, - content_type=abstractnode_content_type, - start=start, - end=end, - cursor=cursor, - )) - logger.debug(f'Gathering supplement summary at {datetime.datetime.now()}') summary_data = combine_summary_data(summary_data, summarize( sql=ND_PREPRINT_SUPPLEMENT_SIZE_SUM_SQL, diff --git a/osf/management/commands/delete_legacy_quickfiles_nodes.py b/osf/management/commands/delete_legacy_quickfiles_nodes.py deleted file mode 100644 index ee69ffd37ab..00000000000 --- a/osf/management/commands/delete_legacy_quickfiles_nodes.py +++ /dev/null @@ -1,58 +0,0 @@ -import logging -from django.db import transaction -from django.utils import timezone -from django.core.management.base import BaseCommand -from framework import sentry -from framework.celery_tasks import app as celery_app - -from osf.models import QuickFilesNode, Node -logger = logging.getLogger(__name__) - - -@celery_app.task(name='osf.management.commands.delete_legacy_quickfiles_nodes') -def delete_quickfiles(batch_size=1000, dry_run=False): - """ - This is a periodic command to sunset our Quickfiles feature and can be safely deleted after - Quickfiles are all marked as deleted. - """ - with transaction.atomic(): - i = 0 - for i, node in enumerate(QuickFilesNode.objects.all()[:batch_size]): - node.is_deleted = True - node.deleted = timezone.now() - node.recast(Node._typedmodels_type) - node.save() - - logger.info(f'{i} Quickfiles deleted') - - if dry_run: - raise RuntimeError('dry run rolling back changes') - - if not QuickFilesNode.objects.exists(): - sentry.log_message('Clean-up complete, none more QuickFilesNode delete this task.') - - -class Command(BaseCommand): - """ - Deletes unused legacy Quickfiles. - """ - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - required=False, - ) - parser.add_argument( - '--batch_size', - type=int, - help='how many many Quickfiles are we deleting tonight?', - required=True, - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - batch_size = options.get('batch_size', 1000) - delete_quickfiles(batch_size, dry_run) diff --git a/osf/management/commands/export_user_account.py b/osf/management/commands/export_user_account.py index deb299c004a..85d3671266d 100644 --- a/osf/management/commands/export_user_account.py +++ b/osf/management/commands/export_user_account.py @@ -20,8 +20,7 @@ FileVersion, OSFUser, Preprint, - Registration, - QuickFilesNode + Registration ) from osf.utils.workflows import DefaultStates from scripts.utils import Progress @@ -146,7 +145,7 @@ def export_resource(node, user, current_dir): def export_resources(nodes_to_export, user, dir, nodes_type): """ Creates appropriate directory structure and exports a given set of resources - (projects, registrations, quickfiles or preprints) by calling export helper functions. + (projects, registrations or preprints) by calling export helper functions. """ progress = Progress() @@ -159,7 +158,7 @@ def export_resources(nodes_to_export, user, dir, nodes_type): progress.stop() def get_usage(user): - # includes nodes, registrations, quickfiles + # includes nodes, registrations nodes = user.nodes.filter(is_deleted=False).exclude(type='osf.collection').values_list('id', flat=True) node_ctype = ContentType.objects.get_for_model(AbstractNode) node_files = get_resource_files(nodes, node_ctype) @@ -214,12 +213,6 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f registrations/ *same as projects* - quickfiles/ - / - metadata.json - files/ - osfstorage-archive.zip - """ user = OSFUser.objects.get(guids___id=user_id, guids___id__isnull=False) proceed = input(f'\nUser has {get_usage(user):.2f} GB of data in OSFStorage that will be exported.\nWould you like to continue? [y/n] ') @@ -231,13 +224,11 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f preprints_dir = os.path.join(base_dir, 'preprints') projects_dir = os.path.join(base_dir, 'projects') registrations_dir = os.path.join(base_dir, 'registrations') - quickfiles_dir = os.path.join(base_dir, 'quickfiles') os.mkdir(base_dir) os.mkdir(preprints_dir) os.mkdir(projects_dir) os.mkdir(registrations_dir) - os.mkdir(quickfiles_dir) preprints_to_export = get_preprints_to_export(user) @@ -251,14 +242,10 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f .get_roots() ) - quickfiles_to_export = ( - QuickFilesNode.objects.filter(creator=user) - ) export_resources(projects_to_export, user, projects_dir, 'projects') export_resources(preprints_to_export, user, preprints_dir, 'preprints') export_resources(registrations_to_export, user, registrations_dir, 'registrations') - export_resources(quickfiles_to_export, user, quickfiles_dir, 'quickfiles') timestamp = dt.datetime.fromtimestamp(time.time()).strftime('%Y%m%d%H%M%S') output = os.path.join(path, f'{user_id}-export-{timestamp}') diff --git a/osf/management/commands/fix_quickfiles_waterbutler_logs.py b/osf/management/commands/fix_quickfiles_waterbutler_logs.py deleted file mode 100644 index 904bfa82c84..00000000000 --- a/osf/management/commands/fix_quickfiles_waterbutler_logs.py +++ /dev/null @@ -1,140 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from django.db import transaction -from osf.models import Node, NodeLog -from framework.celery_tasks import app as celery_app -from urllib.parse import urljoin -from website import settings - -logger = logging.getLogger(__name__) - - -def swap_guid(url, node): - url = url.split('/')[:-1] - url[2] = node._id - url = '/'.join(url) - return f'{url}/?pid={node._id}' - - -def swap_guid_view_download(url, node): - url = url.split('/')[:-1] - url[1] = node._id - url = '/'.join(url) - url = url.partition('?pid=')[0] + f'/?pid={node._id}' - return url - - -error_causing_log_actions = { - 'addon_file_renamed', - 'addon_file_moved', - 'addon_file_copied', -} - -dead_links_actions = { - 'osf_storage_file_added', - 'file_tag_removed', - 'file_tag_added', - 'osf_storage_file_removed', - 'osf_storage_file_updated', -} - -affected_log_actions = error_causing_log_actions.union(dead_links_actions) - - -@celery_app.task(name='osf.management.commands.fix_quickfiles_waterbutler_logs') -def fix_logs(node_id, dry_run=False): - ''' - Fixes view/download links for waterbutler based file logs, and also fixes old 10 digit node params for moved/renamed - files. - ''' - logger.info(f'{node_id} Quickfiles logs started') - - with transaction.atomic(): - logger.debug(f'{node_id} Quickfiles logs started') - - node = Node.load(node_id) - for log in node.logs.filter(action__in=error_causing_log_actions): - log.params['params_node'] = { - '_id': node._id, - 'title': node.title - } - if log.params.get('auth'): - log.params['auth']['callback_url'] = urljoin( - settings.DOMAIN, - f'project/{node_id}/node/{node_id}/waterbutler/logs/' - ) - - url = swap_guid(log.params['source']['url'], node) - - if log.params['source']['resource'] == log.params['destination']['resource']: - log.params['source']['url'] = url - log.params['source']['nid'] = node._id - if log.params['source'].get('node'): - log.params['source']['node']['url'] = f'/{node._id}/' - log.params['source']['node']['_id'] = node._id - if log.params['source'].get('resource'): - log.params['source']['resource'] = node._id - - log.params['destination']['url'] = url - log.params['destination']['nid'] = node._id - - if log.params['destination'].get('node'): - log.params['destination']['node']['url'] = f'/{node._id}/' - log.params['destination']['node']['_id'] = node._id - - if log.params['destination'].get('resource'): - log.params['destination']['resource'] = node._id - - if log.params.get('urls'): - url = swap_guid_view_download(log.params['urls']['view'], node) - log.params['urls'] = { - 'view': url, - 'download': f'{url}&action=download' - } - - log.save() - - for log in node.logs.filter(action__in=dead_links_actions): - log.params['params_node'] = { - '_id': node._id, - 'title': node.title - } - - url = swap_guid_view_download(log.params['urls']['view'], node) - - log.params['urls'] = { - 'view': url, - 'download': f'{url}&action=download' - } - log.save() - - node.save() - if dry_run: - raise RuntimeError('This was a dry run.') - - logger.info(f'{node._id} Quickfiles logs fixed') - - -def fix_quickfiles_waterbutler_logs(dry_run=False): - nodes = Node.objects.filter(logs__action=NodeLog.MIGRATED_QUICK_FILES).values_list('guids___id', flat=True) - logger.info(f'{nodes.count()} Quickfiles nodes with bugged logs found.') - - for node_id in nodes: - logger.info(f'{node_id} Quickfiles logs fixing started') - fix_logs.apply_async(args=(node_id,), kwargs={'dry_run': dry_run}) - - -class Command(BaseCommand): - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Dry run', - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run') - fix_quickfiles_waterbutler_logs(dry_run=dry_run) diff --git a/osf/management/commands/metrics_backfill_summaries.py b/osf/management/commands/metrics_backfill_summaries.py index 0edd4e6810d..d259e9b2a52 100644 --- a/osf/management/commands/metrics_backfill_summaries.py +++ b/osf/management/commands/metrics_backfill_summaries.py @@ -78,22 +78,20 @@ def _map_download_count(row): def _map_file_summary(row): # date(keen.timestamp) => _source.report_date # "2022-12-30", # keen.created_at => _source.timestamp # "2023-01-02T14:59:04.397056+00:00" - # osfstorage_files_including_quickfiles.total => _source.files.total # 12272, - # osfstorage_files_including_quickfiles.public => _source.files.public # 126, - # osfstorage_files_including_quickfiles.private => _source.files.private # 12146, - # osfstorage_files_including_quickfiles.total_daily => _source.files.total_daily # 0, - # osfstorage_files_including_quickfiles.public_daily => _source.files.public_daily # 0, - # osfstorage_files_including_quickfiles.private_daily => _source.files.private_daily # 0 + # osfstorage_files.private => _source.files.private # 12146, + # osfstorage_files.total_daily => _source.files.total_daily # 0, + # osfstorage_files.public_daily => _source.files.public_daily # 0, + # osfstorage_files.private_daily => _source.files.private_daily # 0 return { 'report_date': _timestamp_to_date(row['keen.timestamp']), 'timestamp': _timestamp_to_dt(row['keen.created_at']), 'files': { - 'total': int(row['osfstorage_files_including_quickfiles.total']), - 'public': int(row['osfstorage_files_including_quickfiles.public']), - 'private': int(row['osfstorage_files_including_quickfiles.private']), - 'total_daily': int(row['osfstorage_files_including_quickfiles.total_daily']), - 'public_daily': int(row['osfstorage_files_including_quickfiles.public_daily']), - 'private_daily': int(row['osfstorage_files_including_quickfiles.private_daily']), + 'total': int(row['osfstorage_files.total']), + 'public': int(row['osfstorage_files.public']), + 'private': int(row['osfstorage_files.private']), + 'total_daily': int(row['osfstorage_files.total_daily']), + 'public_daily': int(row['osfstorage_files.public_daily']), + 'private_daily': int(row['osfstorage_files.private_daily']), }, } diff --git a/osf/management/commands/reindex_quickfiles.py b/osf/management/commands/reindex_quickfiles.py deleted file mode 100644 index 84bdfa7d310..00000000000 --- a/osf/management/commands/reindex_quickfiles.py +++ /dev/null @@ -1,43 +0,0 @@ -from django.core.paginator import Paginator -from website.search.search import update_file -from osf.models import Node, NodeLog -from addons.osfstorage.models import OsfStorageFileNode -from django.core.management.base import BaseCommand -from tqdm import tqdm - -PAGE_SIZE = 100 - - -def paginated_progressbar(queryset, page_size, function): - paginator = Paginator(queryset, page_size) - progress_bar = tqdm(total=queryset.count()) - n_processed = 0 - for page_num in paginator.page_range: - page = paginator.page(page_num) - for item in page.object_list: - function(item) - n_processed += len(page.object_list) - progress_bar.update(n_processed) - progress_bar.close() - - -def reindex_quickfiles(): - nodes = Node.objects.filter( - logs__action=NodeLog.MIGRATED_QUICK_FILES - ) - - file_ids = nodes.values_list('files__id', flat=True) - - files_to_reindex = OsfStorageFileNode.objects.filter(id__in=file_ids) - paginated_progressbar(files_to_reindex, PAGE_SIZE, update_file) - - for node in nodes: - node.update_search() - - -class Command(BaseCommand): - """ - Reindex all Quickfiles files that were moved during migration. h/t to erinspace who's code old I'm cribbing here. - """ - def handle(self, *args, **options): - reindex_quickfiles() diff --git a/osf/management/commands/transfer_quickfiles_to_projects.py b/osf/management/commands/transfer_quickfiles_to_projects.py deleted file mode 100644 index 4d26eb02f4a..00000000000 --- a/osf/management/commands/transfer_quickfiles_to_projects.py +++ /dev/null @@ -1,164 +0,0 @@ -import pytz -import logging -import datetime - -from django.db import transaction -from django.db.models import Exists, F, Func, OuterRef, Value -from django.core.management.base import BaseCommand -from tqdm import tqdm - -from osf.models import ( - OSFUser, - QuickFilesNode, - NodeLog, - AbstractNode, - Guid, -) -from osf.models.base import generate_guid -from osf.models.quickfiles import get_quickfiles_project_title -from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField - -from addons.osfstorage.models import OsfStorageFile -from django.contrib.contenttypes.models import ContentType - -logger = logging.getLogger(__name__) -QUICKFILES_DESC = 'The Quick Files feature was discontinued and it’s files were migrated into this Project on March' \ - ' 11, 2022. The file URL’s will still resolve properly, and the Quick Files logs are available in' \ - ' the Project’s Recent Activity.' -QUICKFILES_DATE = datetime.datetime(2022, 3, 11, tzinfo=pytz.utc) - - -def remove_quickfiles(): - node_content_type = ContentType.objects.get_for_model(AbstractNode) - quick_file_annotation = Exists( - OsfStorageFile.objects.filter( - target_object_id=OuterRef('id'), - target_content_type=node_content_type - ) - ) - quick_files_nodes = QuickFilesNode.objects.annotate(has_files=quick_file_annotation).filter(has_files=True) - target_count = quick_files_nodes.count() - logger.info(f'Acquired {target_count} targets') - - _ = Guid.objects.filter( - id__in=quick_files_nodes.values_list('guids__id', flat=True) - ).delete() - logger.info(f'Deleted guids: {_}') - - # generate unique guids prior to record creation to avoid collisions, set object ensures all guids are unique - guids = set() - while len(guids) < target_count: - guids.add(generate_guid()) - guids = list(guids) - logger.info(f'Generated {len(guids)} Guids') - - guids = [ - Guid( - _id=_id, - object_id=node_id, - content_type=node_content_type, - ) for _id, node_id in zip(guids, quick_files_nodes.values_list('id', flat=True)) - ] - Guid.objects.bulk_create(guids) - logger.info(f'Created {len(guids)} Guids') - - node_logs = [] - pbar = tqdm(total=target_count) - for node in quick_files_nodes: - node_logs.append(NodeLog( - node=node, - user=node.creator, - original_node=node, - params={'node': node._id}, - action=NodeLog.MIGRATED_QUICK_FILES - )) - node.logs.update( - params=Func( - F('params'), - Value(['node']), - Value(node._id, DateTimeAwareJSONField()), - function='jsonb_set' - ) - ) - pbar.update(1) - pbar.close() - - logger.info('Updated logs') - NodeLog.objects.bulk_create(node_logs) - logger.info(f'Created {len(node_logs)} logs') - - quick_files_nodes.update(description=QUICKFILES_DESC, type='osf.node') - logger.info(f'Projectified {target_count} QuickFilesNodes') - - -def reverse_remove_quickfiles(): - quickfiles_nodes_with_files = AbstractNode.objects.filter( - logs__action=NodeLog.MIGRATED_QUICK_FILES - ) - for node in quickfiles_nodes_with_files: - node.guids.all().delete() - node.save() - - quickfiles_nodes_with_files.update( - type='osf.quickfilesnode', - is_deleted=False, - deleted=None, - ) - - users_without_nodes = OSFUser.objects.exclude( - id__in=QuickFilesNode.objects.all().values_list( - 'creator__id', - flat=True - ) - ) - quickfiles_created = [] - for user in users_without_nodes: - quickfiles_created.append( - QuickFilesNode( - title=get_quickfiles_project_title(user), - creator=user - ) - ) - - QuickFilesNode.objects.bulk_create(quickfiles_created) - - for quickfiles in quickfiles_created: - quickfiles.add_addon('osfstorage', auth=None, log=False) - quickfiles.save() - - NodeLog.objects.filter(action=NodeLog.MIGRATED_QUICK_FILES).delete() - - logger.info(f'{len(QuickFilesNode.objects.all())} quickfiles were restored.') - - -class Command(BaseCommand): - """ - Puts all Quickfiles into projects or reverses the effect. - """ - - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - required=False, - ) - parser.add_argument( - '--reverse', - type=bool, - help='is the reverse to be run?.', - required=False, - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - reverse = options.get('reverse', False) - with transaction.atomic(): - if reverse: - reverse_remove_quickfiles() - else: - remove_quickfiles() - if dry_run: - raise RuntimeError('Dry run complete, rolling back.') diff --git a/osf/management/commands/update_storage_usage.py b/osf/management/commands/update_storage_usage.py index 09f087bd3e4..b6825de3343 100644 --- a/osf/management/commands/update_storage_usage.py +++ b/osf/management/commands/update_storage_usage.py @@ -20,7 +20,7 @@ def update_storage_usage(dry_run=False, days=DAYS): recently_modified = AbstractNode.objects.filter(modified__gt=modified_limit) for modified_node in recently_modified: file_op_occurred = modified_node.logs.filter(action__contains='file', created__gt=modified_limit).exists() - if not modified_node.is_quickfiles and file_op_occurred: + if file_op_occurred: update_storage_usage_cache(modified_node.id, modified_node._id) if dry_run: diff --git a/osf/metrics/reporters/osfstorage_file_count.py b/osf/metrics/reporters/osfstorage_file_count.py index 2f35e1e81fd..dd449da5f08 100644 --- a/osf/metrics/reporters/osfstorage_file_count.py +++ b/osf/metrics/reporters/osfstorage_file_count.py @@ -45,3 +45,9 @@ def report(self, date): ) return [report] + + def keen_events_from_report(self, report): + event = { + 'osfstorage_files': report.files.to_dict(), + } + return {'file_summary': [event]} diff --git a/osf/migrations/0001_initial.py b/osf/migrations/0001_initial.py index 75c7297114e..afa5a632f9c 100644 --- a/osf/migrations/0001_initial.py +++ b/osf/migrations/0001_initial.py @@ -116,7 +116,7 @@ class Migration(migrations.Migration): ('category', models.CharField(blank=True, choices=[('analysis', 'Analysis'), ('communication', 'Communication'), ('data', 'Data'), ('hypothesis', 'Hypothesis'), ('instrumentation', 'Instrumentation'), ('methods and measures', 'Methods and Measures'), ('procedure', 'Procedure'), ('project', 'Project'), ('software', 'Software'), ('other', 'Other'), ('', 'Uncategorized')], default='', max_length=255)), ('registration_responses', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder)), ('registration_responses_migrated', models.NullBooleanField(db_index=True, default=True)), - ('type', models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.registration', 'registration'), ('osf.quickfilesnode', 'quick files node')], db_index=True, max_length=255)), + ('type', models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.registration', 'registration')], db_index=True, max_length=255)), ('child_node_subscriptions', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder)), ('deleted_date', osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True)), ('deleted', osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True)), @@ -2225,16 +2225,6 @@ class Migration(migrations.Migration): }, bases=('osf.abstractprovider',), ), - migrations.CreateModel( - name='QuickFilesNode', - fields=[ - ], - options={ - 'proxy': True, - 'indexes': [], - }, - bases=('osf.abstractnode',), - ), migrations.CreateModel( name='Registration', fields=[ diff --git a/osf/migrations/0003_aggregated_runsql_calls.py b/osf/migrations/0003_aggregated_runsql_calls.py index 985bed65e86..bf945b0f2dd 100644 --- a/osf/migrations/0003_aggregated_runsql_calls.py +++ b/osf/migrations/0003_aggregated_runsql_calls.py @@ -11,7 +11,6 @@ class Migration(migrations.Migration): migrations.RunSQL( [ """ - CREATE UNIQUE INDEX one_quickfiles_per_user ON public.osf_abstractnode USING btree (creator_id, type, is_deleted) WHERE (((type)::text = 'osf.quickfilesnode'::text) AND (is_deleted = false)); CREATE INDEX osf_abstractnode_collection_pub_del_type_index ON public.osf_abstractnode USING btree (is_public, is_deleted, type) WHERE ((is_public = true) AND (is_deleted = false) AND ((type)::text = 'osf.collection'::text)); CREATE INDEX osf_abstractnode_date_modified_ef1e2ad8 ON public.osf_abstractnode USING btree (last_logged); CREATE INDEX osf_abstractnode_node_pub_del_type_index ON public.osf_abstractnode USING btree (is_public, is_deleted, type) WHERE ((is_public = true) AND (is_deleted = false) AND ((type)::text = 'osf.node'::text)); diff --git a/osf/migrations/0016_auto_20230828_1810.py b/osf/migrations/0016_auto_20230828_1810.py index 50af929ea95..36f056c8ef1 100644 --- a/osf/migrations/0016_auto_20230828_1810.py +++ b/osf/migrations/0016_auto_20230828_1810.py @@ -23,6 +23,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='abstractnode', name='type', - field=models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.quickfilesnode', 'quick files node'), ('osf.registration', 'registration')], db_index=True, max_length=255), + field=models.CharField(choices=[('osf.node', 'node'), ('osf.draftnode', 'draft node'), ('osf.registration', 'registration')], db_index=True, max_length=255), ), ] diff --git a/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py b/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py index e0b29a4ba9c..17294bd76a1 100644 --- a/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py +++ b/osf/migrations/0022_alter_abstractnode_subjects_alter_abstractnode_tags_and_more.py @@ -26,7 +26,7 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='abstractnode', name='type', - field=models.CharField(choices=[('osf.draftnode', 'draft node'), ('osf.node', 'node'), ('osf.quickfilesnode', 'quick files node'), ('osf.registration', 'registration')], db_index=True, max_length=255), + field=models.CharField(choices=[('osf.draftnode', 'draft node'), ('osf.node', 'node'), ('osf.registration', 'registration')], db_index=True, max_length=255), ), migrations.AlterField( model_name='abstractprovider', diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 0c65b67ee1a..909183adab6 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -81,7 +81,6 @@ WhitelistedSHAREPreprintProvider, ) from .queued_mail import QueuedMail -from .quickfiles import QuickFilesNode from .registrations import ( DraftRegistration, DraftRegistrationLog, diff --git a/osf/models/mixins.py b/osf/models/mixins.py index b7fe97b7ece..e356d7023cf 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -2238,7 +2238,7 @@ def suspend_spam_user(self, user): # Make public nodes private from this contributor for node in user.all_nodes: - if self._id != node._id and len(node.contributors) == 1 and node.is_public and not node.is_quickfiles: + if self._id != node._id and len(node.contributors) == 1 and node.is_public: node.confirm_spam(save=True, train_spam_services=False) node.set_privacy('private', log=False, save=True, force=True) diff --git a/osf/models/node.py b/osf/models/node.py index 7d196f238dc..2aca6c7c985 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -92,7 +92,7 @@ class AbstractNodeQuerySet(GuidMixinQuerySet): def get_roots(self): return self.filter( - id__in=self.exclude(type__in=['osf.collection', 'osf.quickfilesnode', 'osf.draftnode']).values_list( + id__in=self.exclude(type__in=['osf.collection', 'osf.draftnode']).values_list( 'root_id', flat=True)) def get_children(self, root, active=False, include_root=False): @@ -492,10 +492,6 @@ def is_registration(self): """For v1 compat.""" return False - @property - def is_quickfiles(self): - return False - @property def is_original(self): return not self.is_registration and not self.is_fork @@ -2097,10 +2093,10 @@ def update(self, fields, auth=None, save=True): if not hasattr(self, 'is_bookmark_collection'): self.set_title(title=value, auth=auth, save=False) continue - if not self.is_bookmark_collection or not self.is_quickfiles: + if not self.is_bookmark_collection: self.set_title(title=value, auth=auth, save=False) else: - raise NodeUpdateError(reason='Bookmark collections or QuickFilesNodes cannot be renamed.', key=key) + raise NodeUpdateError(reason='Bookmark collections cannot be renamed.', key=key) elif key == 'description': self.set_description(description=value, auth=auth, save=False) elif key == 'category': @@ -2550,7 +2546,6 @@ def add_default_node_addons(sender, instance, created, **kwargs): @receiver(post_save, sender=Node) @receiver(post_save, sender='osf.Registration') -@receiver(post_save, sender='osf.QuickFilesNode') @receiver(post_save, sender='osf.DraftNode') def set_parent_and_root(sender, instance, created, *args, **kwargs): if getattr(instance, '_parent', None): diff --git a/osf/models/nodelog.py b/osf/models/nodelog.py index d6e01f4822b..a9f0bf63103 100644 --- a/osf/models/nodelog.py +++ b/osf/models/nodelog.py @@ -141,8 +141,6 @@ class NodeLog(ObjectIDMixin, BaseModel): FLAG_SPAM = 'flag_spam' CONFIRM_SPAM = 'confirm_spam' - MIGRATED_QUICK_FILES = 'migrated_quickfiles' - RESOURCE_ADDED = 'resource_identifier_added' RESOURCE_UPDATED = 'resource_identifier_udpated' RESOURCE_REMOVED = 'resource_identifier_removed' diff --git a/osf/models/private_link.py b/osf/models/private_link.py index af861ccf9ff..a29c854e659 100644 --- a/osf/models/private_link.py +++ b/osf/models/private_link.py @@ -1,6 +1,4 @@ from django.db import models -from django.dispatch import receiver -from django.core.exceptions import ValidationError from framework.utils import iso8601format @@ -43,13 +41,3 @@ def to_json(self): for x in self.nodes.filter(is_deleted=False)], 'anonymous': self.anonymous } - - -##### Signal listeners ##### -@receiver(models.signals.m2m_changed, sender=PrivateLink.nodes.through) -def check_if_private_link_is_to_quickfiles(sender, instance, action, reverse, model, pk_set, **kwargs): - from .node import AbstractNode - - if action == 'pre_add' and pk_set: - if model == AbstractNode and model.objects.get(id=list(pk_set)[0]).is_quickfiles: - raise ValidationError('A private link cannot be added to a QuickFilesNode') diff --git a/osf/models/quickfiles.py b/osf/models/quickfiles.py deleted file mode 100644 index 2f0fa576e93..00000000000 --- a/osf/models/quickfiles.py +++ /dev/null @@ -1,93 +0,0 @@ -import logging - -from .node import ( - AbstractNode, - AbstractNodeManager, - Node -) -from .nodelog import NodeLog - -from osf.exceptions import NodeStateError - - -logger = logging.getLogger(__name__) - - -class QuickFilesNodeManager(AbstractNodeManager): - - def create_for_user(self, user): - possessive_title = get_quickfiles_project_title(user) - - quickfiles, created = QuickFilesNode.objects.get_or_create( - title=possessive_title, - creator=user - ) - - if not created: - raise NodeStateError('Users may only have one quickfiles project') - - quickfiles.add_addon('osfstorage', auth=None, log=False) - - return quickfiles - - def get_for_user(self, user): - try: - return QuickFilesNode.objects.get(creator=user) - except AbstractNode.DoesNotExist: - return Node.objects.filter( - logs__action=NodeLog.MIGRATED_QUICK_FILES, - creator=user - ).order_by('created').first() # Returns None if there are none - - -class QuickFilesNode(AbstractNode): - __guid_min_length__ = 10 - - objects = QuickFilesNodeManager() - - def __init__(self, *args, **kwargs): - kwargs['is_public'] = True - super().__init__(*args, **kwargs) - - def remove_node(self, auth, date=None): - # QuickFilesNodes are only delete-able for disabled users - # This is only done when doing a GDPR-delete - if auth.user.is_disabled: - super().remove_node(auth=auth, date=date) - else: - raise NodeStateError('A QuickFilesNode may not be deleted.') - - def set_privacy(self, permissions, *args, **kwargs): - raise NodeStateError('You may not set privacy for a QuickFilesNode.') - - def add_contributor(self, contributor, *args, **kwargs): - if contributor == self.creator: - return super().add_contributor(contributor, *args, **kwargs) - raise NodeStateError('A QuickFilesNode may not have additional contributors.') - - def clone(self): - raise NodeStateError('A QuickFilesNode may not be forked, used as a template, or registered.') - - def add_addon(self, name, auth, log=True): - if name != 'osfstorage': - raise NodeStateError('A QuickFilesNode can only have the osfstorage addon.') - return super().add_addon(name, auth, log) - - @property - def is_registration(self): - """For v1 compat.""" - return False - - @property - def is_collection(self): - """For v1 compat.""" - return False - - @property - def is_quickfiles(self): - return True - - -def get_quickfiles_project_title(user): - possessive_title_name = user.fullname + "'s" if user.fullname[-1] != 's' else user.fullname + "'" - return f'{possessive_title_name} Quick Files' diff --git a/osf/models/user.py b/osf/models/user.py index 33b8728ce08..c8fa24052ae 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -67,8 +67,6 @@ logger = logging.getLogger(__name__) -MAX_QUICKFILES_MERGE_RENAME_ATTEMPTS = 1000 - def get_default_mailing_lists(): return {'Open Science Framework Help': True} @@ -808,9 +806,6 @@ def merge_user(self, user): # - projects where the user was a contributor (group member only are not included). for node in user.contributed: - # Skip quickfiles - if node.is_quickfiles: - continue user_perms = Contributor(node=node, user=user).permission # if both accounts are contributor of the same project if node.is_contributor(self) and node.is_contributor(user): @@ -835,10 +830,9 @@ def merge_user(self, user): user.collection_set.exclude(is_bookmark_collection=True).update(creator=self) from .files import BaseFileNode - from .quickfiles import QuickFilesNode # - projects where the user was the creator - user.nodes_created.exclude(type=QuickFilesNode._typedmodels_type).update(creator=self) + user.nodes_created.update(creator=self) # - file that the user has checked_out, import done here to prevent import error for file_node in BaseFileNode.files_checked_out(user=user): @@ -1043,13 +1037,6 @@ def save(self, *args, **kwargs): if self.SEARCH_UPDATE_FIELDS.intersection(dirty_fields) and self.is_confirmed: self.update_search() self.update_search_nodes_contributors() - if 'fullname' in dirty_fields: - from .quickfiles import get_quickfiles_project_title, QuickFilesNode - - quickfiles = QuickFilesNode.objects.filter(creator=self).first() - if quickfiles: - quickfiles.title = get_quickfiles_project_title(self) - quickfiles.save() if 'username' in dirty_fields: for list_name, subscription in self.mailchimp_mailing_lists.items(): if subscription: @@ -1453,7 +1440,7 @@ def confirm_spam(self, domains=None, save=True, train_spam_services=False): super().confirm_spam(domains=domains, save=save, train_spam_services=train_spam_services) # Don't train on resources merely associated with spam user - for node in self.nodes.filter(is_public=True, is_deleted=False).exclude(type='osf.quickfilesnode'): + for node in self.nodes.filter(is_public=True, is_deleted=False): node.confirm_spam(train_spam_services=train_spam_services) for preprint in self.preprints.filter(is_public=True, deleted__isnull=True): preprint.confirm_spam(train_spam_services=train_spam_services) @@ -1463,7 +1450,7 @@ def confirm_ham(self, save=False, train_spam_services=False): super().confirm_ham(save=save, train_spam_services=train_spam_services) # Don't train on resources merely associated with spam user - for node in self.nodes.filter().exclude(type='osf.quickfilesnode'): + for node in self.nodes.filter(): node.confirm_ham(save=save, train_spam_services=train_spam_services) for preprint in self.preprints.filter(): preprint.confirm_ham(save=save, train_spam_services=train_spam_services) diff --git a/osf_tests/management_commands/test_fix_quickfiles_waterbutler_logs.py b/osf_tests/management_commands/test_fix_quickfiles_waterbutler_logs.py deleted file mode 100644 index 04b4619e108..00000000000 --- a/osf_tests/management_commands/test_fix_quickfiles_waterbutler_logs.py +++ /dev/null @@ -1,90 +0,0 @@ -import pytest -from osf.management.commands.fix_quickfiles_waterbutler_logs import fix_quickfiles_waterbutler_logs -from osf_tests.factories import ProjectFactory -from osf.models import NodeLog - - -@pytest.mark.django_db -class TestFixQuickFilesLogs: - - @pytest.fixture() - def node(self): - return ProjectFactory() - - @pytest.fixture() - def node_log_files_added(self, node): - return NodeLog( - action='osf_storage_file_added', - node=node, - params={ - 'contributors': [], - 'params_node': { - 'id': 'jpmxy', - 'title': "John Tordoff's Quick Files" - }, - 'params_project': None, - 'path': '/test.json', - 'pointer': None, - 'preprint_provider': None, - 'urls': { - 'view': f'/{node._id}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={node._id}', - 'download': f'/{node._id}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={node._id}?action=download' - } - } - ).save() - - @pytest.fixture() - def node_log_files_renamed(self, node): - return NodeLog( - action='addon_file_renamed', - node=node, - params={ - 'contributors': [], - 'destination': { - 'materialized': 'test-JATS1.xml', - 'url': '/project/jpmxy/files/osfstorage/622aad914ef4bb0ac0333f9f/', - 'addon': 'OSF Storage', - 'node_url': '/jpmxy/', - 'resource': 'jpmxy', - 'node_title': "John Tordoff's Quick Files" - }, - 'params_node': { - 'id': 'jpmxy', - 'title': "John Tordoff's Quick Files" - }, - 'params_project': None, - 'pointer': None, - 'preprint_provider': None, - 'source': { - 'materialized': 'test-JATS.xml', - 'url': '/project/jpmxy/files/osfstorage/622aad914ef4bb0ac0333f9f/', - 'addon': 'OSF Storage', - 'node_url': '/jpmxy/', - 'resource': 'jpmxy', - 'node_title': "John Tordoff's Quick Files" - } - } - ).save() - - @pytest.mark.enable_enqueue_task - def test_fix_quickfiles_waterbutler_logs_files_added(self, node, node_log_files_added): - NodeLog(node=node, action=NodeLog.MIGRATED_QUICK_FILES).save() - fix_quickfiles_waterbutler_logs() - log = node.logs.all().get(action='osf_storage_file_added') - guid = node.guids.last()._id - - assert log.params['urls'] == { - 'view': f'/{guid}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={guid}', - 'download': f'/{guid}/files/osfstorage/622aad8d1e399c0c296017b0/?pid={guid}&action=download' - } - - @pytest.mark.enable_enqueue_task - def test_fix_quickfiles_waterbutler_logs_files_renamed(self, node, node_log_files_renamed): - NodeLog(node=node, action=NodeLog.MIGRATED_QUICK_FILES).save() - fix_quickfiles_waterbutler_logs() - log = node.logs.all().get(action='addon_file_renamed') - guid = node.guids.last()._id - - assert log.params['source']['url'] == f'/project/{guid}/files/osfstorage/622aad914ef4bb0ac0333f9f/?pid={guid}' - assert log.params['destination']['url'] == f'/project/{guid}/files/osfstorage/622aad914ef4bb0ac0333f9f/?pid={guid}' - assert log.params['params_node']['_id'] == guid diff --git a/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py b/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py deleted file mode 100644 index daaeee7703c..00000000000 --- a/osf_tests/management_commands/test_transfer_quickfiles_to_projects.py +++ /dev/null @@ -1,43 +0,0 @@ -import pytest - -from api_tests.utils import create_test_file - -from osf.management.commands.transfer_quickfiles_to_projects import ( - remove_quickfiles, - reverse_remove_quickfiles, - QUICKFILES_DESC -) -from osf.models import NodeLog -from osf.models.quickfiles import QuickFilesNode, get_quickfiles_project_title - -from osf_tests.factories import AuthUserFactory - - -@pytest.mark.django_db -class TestTransferQuickfilesToProjects: - - @pytest.fixture() - def user_with_quickfiles(self): - user = AuthUserFactory() - qfnode = QuickFilesNode.objects.create_for_user(user) - create_test_file(target=qfnode, user=user) - return user - - def test_tranfer_quickfiles_to_projects(self, user_with_quickfiles): - remove_quickfiles() - - assert not QuickFilesNode.objects.all() - node = user_with_quickfiles.nodes.get( - title=get_quickfiles_project_title(user_with_quickfiles), - logs__action=NodeLog.MIGRATED_QUICK_FILES, - description=QUICKFILES_DESC - ) - assert node.files.all() - - def test_reverse_tranfer_quickfiles_to_projects(self, user_with_quickfiles): - remove_quickfiles() - reverse_remove_quickfiles() - - quickfiles_node = QuickFilesNode.objects.get_for_user(user_with_quickfiles) - assert QuickFilesNode.objects.all().get() == quickfiles_node - assert quickfiles_node.files.exists() diff --git a/scripts/fix_merged_user_quickfiles.py b/scripts/fix_merged_user_quickfiles.py deleted file mode 100644 index 8d0d1a89b71..00000000000 --- a/scripts/fix_merged_user_quickfiles.py +++ /dev/null @@ -1,34 +0,0 @@ -import logging -import sys - -from django.db import transaction -from django.db.models import F, Count - -from website.app import setup_django -setup_django() -from osf.models import QuickFilesNode -from scripts import utils as script_utils - - -logger = logging.getLogger(__name__) - -def main(): - dry = '--dry' in sys.argv - if not dry: - # If we're not running in dry mode log everything to a file - script_utils.add_file_logger(logger, __file__) - with transaction.atomic(): - qs = QuickFilesNode.objects.exclude(_contributors=F('creator')).annotate(contrib_count=Count('_contributors')).exclude(contrib_count=0) - logger.info(f'Found {qs.count()} quickfiles nodes with mismatched creator and _contributors') - - for node in qs: - bad_contrib = node._contributors.get() - logger.info(f'Fixing {node._id} (quickfiles node): Replacing {bad_contrib._id} (bad contributor) with {node.creator._id} (creator)') - node.contributor_set.filter(user=bad_contrib).update(user=node.creator) - node.save() - if dry: - raise Exception('Abort Transaction - Dry Run') - print('Done') - -if __name__ == '__main__': - main() diff --git a/scripts/generate_sitemap.py b/scripts/generate_sitemap.py index c0b38739789..d7c92deae34 100644 --- a/scripts/generate_sitemap.py +++ b/scripts/generate_sitemap.py @@ -185,7 +185,7 @@ def generate(self): # AbstractNode urls (Nodes and Registrations, no Collections) objs = (AbstractNode.objects .filter(is_public=True, is_deleted=False, retraction_id__isnull=True) - .exclude(type__in=['osf.collection', 'osf.quickfilesnode']) + .exclude(type__in=['osf.collection']) .values('guids___id', 'modified')) progress.start(objs.count(), 'NODE: ') for obj in objs: diff --git a/tests/test_addons.py b/tests/test_addons.py index f6fda06a024..f8421f2bd74 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -1,7 +1,6 @@ import datetime import time import functools -import logging from importlib import import_module from unittest.mock import Mock @@ -16,13 +15,12 @@ from framework.auth.core import Auth from framework.exceptions import HTTPError from framework.sessions import get_session -from tests.base import OsfTestCase, get_default_metaschema +from tests.base import OsfTestCase from api_tests.utils import create_test_file from osf_tests.factories import ( AuthUserFactory, ProjectFactory, RegistrationFactory, - DraftRegistrationFactory, ) from website import settings from addons.base import views @@ -44,8 +42,6 @@ from api.caching.utils import storage_usage_cache from dateutil.parser import parse as parse_date from framework import sentry -from api.base.settings.defaults import API_BASE -from tests.json_api_test_app import JSONAPITestApp from website.settings import EXTERNAL_EMBER_APPS from waffle.testutils import override_flag from django.conf import settings as django_conf_settings diff --git a/website/ember_osf_web/views.py b/website/ember_osf_web/views.py index ce8e1978a89..84f23ad8327 100644 --- a/website/ember_osf_web/views.py +++ b/website/ember_osf_web/views.py @@ -8,8 +8,6 @@ ember_osf_web_dir = os.path.abspath(os.path.join(os.getcwd(), EXTERNAL_EMBER_APPS['ember_osf_web']['path'])) routes = [ - '/quickfiles/', - '//quickfiles/', '/institutions/', ] diff --git a/website/notifications/utils.py b/website/notifications/utils.py index af8275ab5fb..bc79781abc4 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -205,7 +205,6 @@ def get_configured_projects(user): configured_projects = set() user_subscriptions = get_all_user_subscriptions(user, extra=( ~Q(node__type='osf.collection') & - ~Q(node__type='osf.quickfilesnode') & Q(node__is_deleted=False) )) diff --git a/website/project/decorators.py b/website/project/decorators.py index 39db4099fd5..427c55f3b16 100644 --- a/website/project/decorators.py +++ b/website/project/decorators.py @@ -75,7 +75,7 @@ def wrapped(*args, **kwargs): return wrapped -def must_be_valid_project(func=None, retractions_valid=False, quickfiles_valid=False, preprints_valid=False, groups_valid=False): +def must_be_valid_project(func=None, retractions_valid=False, preprints_valid=False, groups_valid=False): """ Ensures permissions to retractions are never implicitly granted. """ # TODO: Check private link @@ -90,7 +90,7 @@ def wrapped(*args, **kwargs): _inject_nodes(kwargs) - if getattr(kwargs['node'], 'is_collection', True) or (getattr(kwargs['node'], 'is_quickfiles', True) and not quickfiles_valid): + if getattr(kwargs['node'], 'is_collection', True): raise HTTPError( http_status.HTTP_404_NOT_FOUND ) diff --git a/website/project/tasks.py b/website/project/tasks.py index 62d0d79c2af..7b36a903959 100644 --- a/website/project/tasks.py +++ b/website/project/tasks.py @@ -15,7 +15,7 @@ def on_node_updated(node_id, user_id, first_save, saved_fields, request_headers= AbstractNode = apps.get_model('osf.AbstractNode') node = AbstractNode.load(node_id) - if node.is_collection or node.archiving or node.is_quickfiles: + if node.is_collection or node.archiving: return need_update = bool(node.SEARCH_UPDATE_FIELDS.intersection(saved_fields)) diff --git a/website/project/views/node.py b/website/project/views/node.py index bc48ab8561e..32048444c7a 100644 --- a/website/project/views/node.py +++ b/website/project/views/node.py @@ -1294,7 +1294,7 @@ def search_node(auth, **kwargs): can_view_query, title__icontains=query, is_deleted=False - ).exclude(id__in=nin).exclude(type='osf.collection').exclude(type='osf.quickfilesnode')) + ).exclude(id__in=nin).exclude(type='osf.collection')) count = nodes.count() pages = math.ceil(count / size) diff --git a/website/routes.py b/website/routes.py index 7b0f325fa9f..1d03f538c31 100644 --- a/website/routes.py +++ b/website/routes.py @@ -1424,14 +1424,6 @@ def make_url_map(app): 'get', addon_views.addon_view_or_download_file_legacy, json_renderer - ), - Rule( - [ - '/quickfiles//' - ], - 'get', - addon_views.addon_view_or_download_quickfile, - json_renderer ) ]) diff --git a/website/search/elastic_search.py b/website/search/elastic_search.py index c0201e6fdfd..00f9e96ceb8 100644 --- a/website/search/elastic_search.py +++ b/website/search/elastic_search.py @@ -19,7 +19,6 @@ from osf.models import BaseFileNode from osf.models import GuidMetadataRecord from osf.models import Institution -from osf.models import QuickFilesNode from osf.models import Preprint from osf.models import SpamStatus from addons.wiki.models import WikiPage @@ -482,7 +481,7 @@ def update_node(node, index=None, bulk=False, async_update=False): file_.update_search() is_qa_node = bool(set(settings.DO_NOT_INDEX_LIST['tags']).intersection(node.tags.all().values_list('name', flat=True))) or any(substring in node.title for substring in settings.DO_NOT_INDEX_LIST['titles']) - if node.is_deleted or not node.is_public or node.archiving or node.is_spam or (node.spam_status == SpamStatus.FLAGGED and settings.SPAM_FLAGGED_REMOVE_FROM_SEARCH) or node.is_quickfiles or is_qa_node: + if node.is_deleted or not node.is_public or node.archiving or node.is_spam or (node.spam_status == SpamStatus.FLAGGED and settings.SPAM_FLAGGED_REMOVE_FROM_SEARCH) or is_qa_node: delete_doc(node._id, node, index=index) else: category = get_doctype_from_node(node) @@ -640,18 +639,6 @@ def update_user(user, index=None): if not user.is_active: try: client().delete(index=index, doc_type='user', id=user._id, refresh=True, ignore=[404]) - # update files in their quickfiles node if the user has been marked as spam - if user.spam_status == SpamStatus.SPAM: - quickfiles = QuickFilesNode.objects.get_for_user(user) - if quickfiles: - for quickfile_id in quickfiles.files.values_list('_id', flat=True): - client().delete( - index=index, - doc_type='file', - id=quickfile_id, - refresh=True, - ignore=[404] - ) except NotFoundError: pass return @@ -710,10 +697,7 @@ def update_file(file_, index=None, delete=False): provider=file_.provider, path=file_.path, ) - if getattr(target, 'is_quickfiles', None): - node_url = f'/{target.creator._id}/quickfiles/' - else: - node_url = f'/{target._id}/' + node_url = f'/{target._id}/' guid_url = None file_guid = file_.get_guid(create=False) diff --git a/website/search_migration/__init__.py b/website/search_migration/__init__.py index 6b673d96f53..d63116bcfcb 100644 --- a/website/search_migration/__init__.py +++ b/website/search_migration/__init__.py @@ -426,7 +426,7 @@ AND name != '' AND target_object_id = ANY (SELECT id FROM osf_abstractnode - WHERE (TYPE = 'osf.node' OR TYPE = 'osf.registration' OR TYPE = 'osf.quickfilesnode') + WHERE (TYPE = 'osf.node' OR TYPE = 'osf.registration') AND is_public IS TRUE AND is_deleted IS FALSE AND (spam_status IS NULL OR NOT (spam_status = 2 or (spam_status = 1 AND {spam_flagged_removed_from_search}))) @@ -612,7 +612,7 @@ AND content_type_id = (SELECT id FROM django_content_type WHERE model = 'abstractnode') LIMIT 1 ) PARENT_GUID ON TRUE -WHERE NOT ((TYPE = 'osf.node' OR TYPE = 'osf.registration' OR TYPE = 'osf.quickfilesnode') +WHERE NOT ((TYPE = 'osf.node' OR TYPE = 'osf.registration') AND N.is_public IS TRUE AND N.is_deleted IS FALSE AND (spam_status IS NULL OR NOT (spam_status = 2 or (spam_status = 1 AND {spam_flagged_removed_from_search}))) diff --git a/website/settings/defaults.py b/website/settings/defaults.py index afa3698c73b..8140d3b353c 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -460,7 +460,6 @@ class CeleryConfig: 'website.archiver.tasks', 'scripts.add_missing_identifiers_to_preprints', 'osf.management.commands.approve_pending_schema_response', - 'osf.management.commands.fix_quickfiles_waterbutler_logs', 'api.share.utils', } @@ -531,8 +530,6 @@ class CeleryConfig: 'osf.management.commands.archive_registrations_on_IA', 'osf.management.commands.populate_initial_schema_responses', 'osf.management.commands.approve_pending_schema_responses', - 'osf.management.commands.delete_legacy_quickfiles_nodes', - 'osf.management.commands.fix_quickfiles_waterbutler_logs', 'osf.management.commands.sync_doi_metadata', 'api.providers.tasks', 'osf.management.commands.daily_reporters_go', @@ -698,11 +695,6 @@ class CeleryConfig: 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, - 'delete_legacy_quickfiles_nodes': { - 'task': 'osf.management.commands.delete_legacy_quickfiles_nodes', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False, 'batch_size': 10000}, - }, } # Tasks that need metrics and release requirements diff --git a/website/static/js/anonymousLogActionsList.json b/website/static/js/anonymousLogActionsList.json index e047fbdfc29..17642a945f6 100644 --- a/website/static/js/anonymousLogActionsList.json +++ b/website/static/js/anonymousLogActionsList.json @@ -92,7 +92,6 @@ "subjects_updated": "A user updated the subjects", "view_only_link_added": "A user created a view-only link to a project", "view_only_link_removed": "A user removed a view-only link to a project", - "migrated_quickfiles": "QuickFiles were migrated into a public project", "resource_identifier_added": "A Resource has been added to the Node", "resource_identifier_removed": "A Resource has been removed from the Node", "resource_identifier_updated": "A Resource on the Node has had its PID updated" diff --git a/website/static/js/components/quickFiles.js b/website/static/js/components/quickFiles.js deleted file mode 100644 index 3515d61da34..00000000000 --- a/website/static/js/components/quickFiles.js +++ /dev/null @@ -1,150 +0,0 @@ -'use strict'; - -var m = require('mithril'); // exposes mithril methods, useful for redraw etc. -var $osf = require('js/osfHelpers'); -var iconmap = require('js/iconmap'); -var lodashFind = require('lodash.find'); -var mHelpers = require('js/mithrilHelpers'); -var Raven = require('raven-js'); - -var withPagination = require('js/components/pagination').withPagination; - -var QUICKFILES_PAGE_SIZE = 10; - - -var _buildUrl = function(page, user) { - - var query = { - 'page[size]': QUICKFILES_PAGE_SIZE, - 'page': page || 1, - 'version': '2.2', - }; - - return $osf.apiV2Url('users/' + user + '/quickfiles/', { query: query}); -}; - - -var _getNextItems = function(ctrl, url, updatePagination) { - if(ctrl.requestPending()) { - return; - } - - ctrl.quickFiles([]); - ctrl.requestPending(true); - - var promise = m.request({ - method : 'GET', - url : url, - background : true, - config: mHelpers.apiV2Config({withCredentials: window.contextVars.isOnRootDomain}) - }); - - promise.then( - function(result) { - ctrl.requestPending(false); - ctrl.quickFiles(result.data); - updatePagination(result, url); - m.redraw(); - return promise; - }, function(xhr, textStatus, error) { - ctrl.failed = true; - ctrl.requestPending(false); - m.redraw(); - Raven.captureMessage('Error retrieving quickfiles', { - extra: { - url: url, - textStatus: textStatus, - error: error - } - }); - } - ); -}; - - -var QuickFile = { - - controller: function(options) { - var self = this; - self.file = options.file; - self.icon = iconmap.file; - }, - - view: function(ctrl) { - var viewBase = window.location.origin; - var viewUrl = ctrl.file.attributes.guid ? viewBase + '/' + ctrl.file.attributes.guid : viewBase + '/quickfiles' + ctrl.file.attributes.path; - return m('div', [ - m('li.project list-group-item list-group-item-node cite-container', [ - m('p.list-group-item-heading', [ - m('span.component-overflow.f-w-lg', {style: {lineHeight: 1.5, width: '100%'}}, [ - m('span.col-md-8.project-statuses-lg', [ - m('span', {class: ctrl.icon, style: 'padding-right: 5px;'}, ''), - m('a', {'href': viewUrl, - onclick : function () { - $osf.trackClick('QuickFiles', 'view', 'view-quickfile-from-profile-page'); - } - }, ctrl.file.attributes.name), - ]) - ]) - ]) - ]) - ]); - } -}; - -var QuickFiles = { - - controller: function (options) { - var self = this; - self.failed = false; - self.user = options.user._id; - self.isProfile = options.user.is_profile; - - self.quickFiles = m.prop([]); - self.requestPending = m.prop(false); - - self.getCurrentQuickFiles = function _getCurrentQuickFiles(page) { - if (!self.requestPending()) { - var url = _buildUrl(page, self.user); - return _getNextItems(self, url, options.updatePagination); - } - }; - self.getCurrentQuickFiles(); - }, - - view: function (ctrl) { - - return m('p.list-group m-md', [ - // Error message if the request fails - ctrl.failed ? m('p', [ - 'Unable to retrieve quickfiles at this time. Please refresh the page or contact ', - m('a', {'href': 'mailto:support@osf.io'}, 'support@osf.io'), - ' if the problem persists.' - ]) : - - // Show laoding icon while there is a pending request - ctrl.requestPending() ? m('.ball-pulse.ball-scale-blue.text-center', [m(''), m(''), m('')]) : - - // Display each quickfile - [ - ctrl.quickFiles().length !== 0 ? ctrl.quickFiles().map(function(file) { - return m.component(QuickFile, {file: file}); - }) : ctrl.isProfile ? - m('div.help-block', {}, 'You have no public quickfiles') - : m('div.help-block', {}, 'This user has no public quickfiles.') - ] - ]); - } -}; - -var PaginationWrapper = withPagination({ - buildUrl: _buildUrl, - getNextItems: _getNextItems -}); - -QuickFiles = new PaginationWrapper(QuickFiles); - - -module.exports = { - QuickFiles: QuickFiles -}; diff --git a/website/static/js/logActionsList.json b/website/static/js/logActionsList.json index 53c5ef02f04..4b17c8c855c 100644 --- a/website/static/js/logActionsList.json +++ b/website/static/js/logActionsList.json @@ -104,7 +104,6 @@ "prereg_links_updated": "${user} has updated their preregistration data links", "why_no_prereg_updated": "${user} has updated their preregistration data availability statement", "prereg_links_info_updated": "${user} has updated their preregistration links to ${value}", - "migrated_quickfiles": "${user} had their QuickFiles migrated into ${node}", "resource_identifier_added": "${user} has added a Resource with DOI ${new_identifier} to Registration ${node}", "resource_identifier_removed": "${user} has removed a Resource with DOI ${obsolete_identifier} to Registration ${node}", "resource_identifier_updated": "${user} has updated a Resource DOI on Registration ${node} from ${obsolete_identifier} to ${new_identifier}" diff --git a/website/static/js/pages/profile-page.js b/website/static/js/pages/profile-page.js index bb457f9a497..2df42fb3aaa 100644 --- a/website/static/js/pages/profile-page.js +++ b/website/static/js/pages/profile-page.js @@ -10,7 +10,6 @@ require('../project.js'); // Needed for nodelists to work require('../components/logFeed.js'); // Needed for nodelists to work var profile = require('../profile.js'); // Social, Job, Education classes var publicNodes = require('../components/publicNodes.js'); -var quickFiles = require('../components/quickFiles.js'); var ctx = window.contextVars; // Instantiate all the profile modules diff --git a/website/templates/include/profile/names.mako b/website/templates/include/profile/names.mako index 12449628081..b20e18773c6 100644 --- a/website/templates/include/profile/names.mako +++ b/website/templates/include/profile/names.mako @@ -4,8 +4,8 @@
    - ## Maxlength for full names must be 186 - quickfile titles use fullname + 's Quick Files - + ## Maxlength for full names must be 186 +

    diff --git a/website/templates/public/register.mako b/website/templates/public/register.mako index 05ccb59efa8..290c991a425 100644 --- a/website/templates/public/register.mako +++ b/website/templates/public/register.mako @@ -91,7 +91,7 @@
    - ## Maxlength for full names must be 186 - quickfile titles use fullname + 's Quick Files + ## Maxlength for full names must be 186
    From d109be9fa55900d45d418c2dc41ce096ae79e58c Mon Sep 17 00:00:00 2001 From: Bohdan Odintsov Date: Tue, 3 Jun 2025 00:18:11 +0300 Subject: [PATCH 016/176] fixed tests --- api_tests/wb/views/test_wb_hooks.py | 1 - 1 file changed, 1 deletion(-) diff --git a/api_tests/wb/views/test_wb_hooks.py b/api_tests/wb/views/test_wb_hooks.py index c36bdef4722..1de111b9b82 100644 --- a/api_tests/wb/views/test_wb_hooks.py +++ b/api_tests/wb/views/test_wb_hooks.py @@ -1,6 +1,5 @@ import pytest -from addons.osfstorage.models import OsfStorageFolder from framework.auth import signing from osf_tests.factories import ( From 41b3a72f3b0e4044872dd769f625579d9db234de Mon Sep 17 00:00:00 2001 From: Bohdan Odintsov Date: Tue, 3 Jun 2025 12:54:10 +0300 Subject: [PATCH 017/176] flake8 --- osf/management/commands/export_user_account.py | 1 - 1 file changed, 1 deletion(-) diff --git a/osf/management/commands/export_user_account.py b/osf/management/commands/export_user_account.py index 85d3671266d..027b421ec67 100644 --- a/osf/management/commands/export_user_account.py +++ b/osf/management/commands/export_user_account.py @@ -242,7 +242,6 @@ def export_account(user_id, path, only_private=False, only_admin=False, export_f .get_roots() ) - export_resources(projects_to_export, user, projects_dir, 'projects') export_resources(preprints_to_export, user, preprints_dir, 'preprints') export_resources(registrations_to_export, user, registrations_dir, 'registrations') From 22c62df393bda0be3219379126743202b620a86d Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 5 Jun 2025 11:16:35 -0400 Subject: [PATCH 018/176] ignore Django maintenance state outside block --- website/maintenance.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/website/maintenance.py b/website/maintenance.py index 98359540cfb..2424651d758 100644 --- a/website/maintenance.py +++ b/website/maintenance.py @@ -42,11 +42,19 @@ def set_maintenance(message, level=1, start=None, end=None): return {'start': state.start, 'end': state.end} + +class InFailedSqlTransaction: + pass + + def get_maintenance(): """Get the current start and end times for the maintenance state. Return None if there is no current maintenance state. """ - maintenance = MaintenanceState.objects.all().first() + try: + maintenance = MaintenanceState.objects.all().first() + except InFailedSqlTransaction: + return None return MaintenanceStateSerializer(maintenance).data if maintenance else None def unset_maintenance(): From 1c7e2d2bd59a582f33961dfc14ea33eb891a28b3 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 5 Jun 2025 21:59:39 +0300 Subject: [PATCH 019/176] Add MailHog email testing functionality --- docker-compose.yml | 9 ++++++++ osf/features.yaml | 6 +++++ website/mails/mails.py | 40 ++++++++++++++++++++++++++++++++++ website/settings/defaults.py | 3 +++ website/settings/local-ci.py | 3 +++ website/settings/local-dist.py | 3 +++ 6 files changed, 64 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 14ed365e611..e6d22e86318 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -511,6 +511,7 @@ services: - rabbitmq - elasticsearch - redis + - mailhog environment: DJANGO_SETTINGS_MODULE: api.base.settings env_file: @@ -607,3 +608,11 @@ services: stdin_open: true volumes: - /srv + + mailhog: + image: mailhog/mailhog + container_name: mailhog + ports: + - "1025:1025" # SMTP + - "8025:8025" # Web UI + restart: unless-stopped diff --git a/osf/features.yaml b/osf/features.yaml index 792244a7edc..2f192b0fcb1 100644 --- a/osf/features.yaml +++ b/osf/features.yaml @@ -236,3 +236,9 @@ switches: name: countedusage_unified_metrics_2024 note: use only `osf.metrics.counted_usage`-based metrics where possible; un-use PageCounter, PreprintView, PreprintDownload, etc active: false + + - flag_name: ENABLE_MAILHOG + name: enable + note: This is used to enable the MailHog email testing service, this will allow emails to be sent to the + MailHog service before sending them to real email addresses. + active: false diff --git a/website/mails/mails.py b/website/mails/mails.py index ab632e780ec..b98b7c37b87 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -26,6 +26,7 @@ from framework.email import tasks from osf import features from website import settings +from django.core.mail import EmailMessage, get_connection logger = logging.getLogger(__name__) @@ -75,6 +76,34 @@ def render_message(tpl_name, **context): return tpl.render(**context) +def send_to_mailhog(subject, message, from_email, to_email, attachment_name=None, attachment_content=None): + email = EmailMessage( + subject=subject, + body=message, + from_email=from_email, + to=[to_email], + connection=get_connection( + backend='django.core.mail.backends.smtp.EmailBackend', + host=settings.MAILHOG_HOST, + port=settings.MAILHOG_PORT, + username='', + password='', + use_tls=False, + use_ssl=False, + ) + ) + email.content_subtype = 'html' + + if attachment_name and attachment_content: + email.attach(attachment_name, attachment_content) + + try: + email.send() + except ConnectionRefusedError: + logger.debug('Mailhog is not running. Please start it to send emails.') + return + + def send_mail( to_addr, mail, @@ -119,6 +148,17 @@ def send_mail( logger.debug('Sending email...') logger.debug(f'To: {to_addr}\nFrom: {from_addr}\nSubject: {subject}\nMessage: {message}') + if waffle.switch_is_active(features.ENABLE_MAILHOG): + logger.debug('Intercepting email: sending via MailHog') + send_to_mailhog( + subject=subject, + message=message, + from_email=from_addr, + to_email=to_addr, + attachment_name=attachment_name, + attachment_content=attachment_content + ) + kwargs = dict( from_addr=from_addr, to_addr=to_addr, diff --git a/website/settings/defaults.py b/website/settings/defaults.py index 8140d3b353c..b5c954eb2b2 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -160,6 +160,9 @@ def parent_dir(path): MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = '' # Set this in local.py +MAILHOG_HOST = 'mailhog' +MAILHOG_PORT = 1025 + # OR, if using Sendgrid's API # WARNING: If `SENDGRID_WHITELIST_MODE` is True, # `tasks.send_email` would only email recipients included in `SENDGRID_EMAIL_WHITELIST` diff --git a/website/settings/local-ci.py b/website/settings/local-ci.py index 8bf283b6338..230be0b2123 100644 --- a/website/settings/local-ci.py +++ b/website/settings/local-ci.py @@ -52,6 +52,9 @@ MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' +MAILHOG_HOST = 'mailhog' +MAILHOG_PORT = 1025 + # Session COOKIE_NAME = 'osf' SECRET_KEY = 'CHANGEME' diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py index 3c91142dcb0..88c8cf6adbd 100644 --- a/website/settings/local-dist.py +++ b/website/settings/local-dist.py @@ -62,6 +62,9 @@ MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' +MAILHOG_HOST = 'mailhog' +MAILHOG_PORT = 1025 + # Mailchimp email subscriptions ENABLE_EMAIL_SUBSCRIPTIONS = False From 09cf6e8c7d3a226e0f32ca61f041d8cb75dc223d Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Sun, 8 Jun 2025 08:40:17 -0400 Subject: [PATCH 020/176] fix new and noteworth nodes bug going to Sentry --- scripts/populate_new_and_noteworthy_projects.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/scripts/populate_new_and_noteworthy_projects.py b/scripts/populate_new_and_noteworthy_projects.py index 9f12abc17e9..843193a6868 100644 --- a/scripts/populate_new_and_noteworthy_projects.py +++ b/scripts/populate_new_and_noteworthy_projects.py @@ -108,12 +108,15 @@ def main(dry_run=True): update_node_links(new_and_noteworthy_links_node, new_and_noteworthy_node_ids, 'new and noteworthy') - try: - new_and_noteworthy_links_node.save() - logger.info(f'Node links on {new_and_noteworthy_links_node._id} updated.') - except (KeyError, RuntimeError) as error: - logger.error('Could not migrate new and noteworthy nodes due to error') - logger.exception(error) + if new_and_noteworthy_node_ids: + try: + new_and_noteworthy_links_node.save() + logger.info(f'Node links on {new_and_noteworthy_links_node._id} updated.') + except (KeyError, RuntimeError) as error: + logger.error('Could not migrate new and noteworthy nodes due to error') + logger.exception(error) + else: + logger.error('No new and noteworthy node ids found.') if dry_run: raise RuntimeError('Dry run -- transaction rolled back.') From 97b97ab16fba1d9dba125fee87499af10c3c8042 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 9 Jun 2025 12:12:48 -0400 Subject: [PATCH 021/176] remove superfluildous `groups` from serializer --- api/logs/serializers.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/api/logs/serializers.py b/api/logs/serializers.py index 85e7a8058c6..5d5df3fc882 100644 --- a/api/logs/serializers.py +++ b/api/logs/serializers.py @@ -257,11 +257,6 @@ class Meta: related_view_kwargs={'node_id': ''}, ) - group = RelationshipField( - related_view='groups:group-detail', - related_view_kwargs={'group_id': ''}, - ) - def get_absolute_url(self, obj): return obj.absolute_url From e0a07653f60cdb5dc05ae73661ec2245534a55f8 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Tue, 10 Jun 2025 15:23:56 +0300 Subject: [PATCH 022/176] add mailhog unit tests --- .github/workflows/test-build.yml | 33 ++++++++++++ api_tests/mailhog/test_mailhog.py | 83 +++++++++++++++++++++++++++++++ docker-compose.yml | 1 - osf/features.yaml | 2 +- pytest.ini | 2 +- tasks/__init__.py | 17 +++++++ website/settings/defaults.py | 1 + website/settings/local-ci.py | 3 +- website/settings/local-dist.py | 1 + 9 files changed, 139 insertions(+), 4 deletions(-) create mode 100644 api_tests/mailhog/test_mailhog.py diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index c0bca56e661..cd7909e480b 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -167,3 +167,36 @@ jobs: - name: Upload report if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed uses: ./.github/actions/gen-report + + mailhog: + runs-on: ubuntu-22.04 + permissions: + checks: write + needs: build-cache + services: + postgres: + image: postgres + + env: + POSTGRES_PASSWORD: ${{ env.OSF_DB_PASSWORD }} + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps tcp port 5432 on service container to the host + - 5432:5432 + mailhog: + image: mailhog/mailhog + ports: + - 1025:1025 + - 8025:8025 + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/start-build + - name: Run tests + run: poetry run python3 -m invoke test-ci-mailhog -n 1 --junit + - name: Upload report + if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed + uses: ./.github/actions/gen-report diff --git a/api_tests/mailhog/test_mailhog.py b/api_tests/mailhog/test_mailhog.py new file mode 100644 index 00000000000..e7720e96afa --- /dev/null +++ b/api_tests/mailhog/test_mailhog.py @@ -0,0 +1,83 @@ +import requests +import pytest +from website.mails import send_mail, TEST +from waffle.testutils import override_switch +from osf import features +from website import settings +from osf_tests.factories import ( + fake_email, + AuthUserFactory, +) +from tests.base import ( + capture_signals, + fake +) +from framework import auth +from unittest import mock +from osf.models import OSFUser +from tests.base import ( + OsfTestCase, +) +from website.util import api_url_for +from conftest import start_mock_send_grid + + +@pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') +class TestMailHog: + + def test_mailhog_recived_mail(self, mock_send_grid): + with override_switch(features.ENABLE_MAILHOG, active=True): + mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' + mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' + requests.delete(mailhog_v1) + + send_mail('to_addr@mail.com', TEST, name='Mailhog') + res = requests.get(mailhog_v2).json() + assert res['count'] == 1 + assert res['items'][0]['Content']['Headers']['To'][0] == 'to_addr@mail.com' + assert res['items'][0]['Content']['Headers']['Subject'][0] == 'A test email to Mailhog' + mock_send_grid.assert_called() + requests.delete(mailhog_v1) + + +@pytest.mark.django_db +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) +class TestAuthMailhog(OsfTestCase): + + def setUp(self): + super().setUp() + self.user = AuthUserFactory() + self.auth = self.user.auth + + self.mock_send_grid = start_mock_send_grid(self) + + def test_recived_confirmation(self): + url = api_url_for('register_user') + name, email, password = fake.name(), fake_email(), 'underpressure' + mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' + mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' + requests.delete(mailhog_v1) + + with override_switch(features.ENABLE_MAILHOG, active=True): + with capture_signals() as mock_signals: + self.app.post( + url, + json={ + 'fullName': name, + 'email1': email, + 'email2': email, + 'password': password, + } + ) + res = requests.get(mailhog_v2).json() + + assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} + assert self.mock_send_grid.called + + user = OSFUser.objects.get(username=email) + user_token = list(user.email_verifications.keys())[0] + ideal_link_path = f'/confirm/{user._id}/{user_token}/' + + assert ideal_link_path in res['items'][0]['Content']['Body'] diff --git a/docker-compose.yml b/docker-compose.yml index e6d22e86318..e9ba66bc37e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -511,7 +511,6 @@ services: - rabbitmq - elasticsearch - redis - - mailhog environment: DJANGO_SETTINGS_MODULE: api.base.settings env_file: diff --git a/osf/features.yaml b/osf/features.yaml index 2f192b0fcb1..7247bc871c7 100644 --- a/osf/features.yaml +++ b/osf/features.yaml @@ -238,7 +238,7 @@ switches: active: false - flag_name: ENABLE_MAILHOG - name: enable + name: enable_mailhog note: This is used to enable the MailHog email testing service, this will allow emails to be sent to the MailHog service before sending them to real email addresses. active: false diff --git a/pytest.ini b/pytest.ini index f0126e4dfd5..4417f537dd0 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,5 @@ [pytest] -addopts = --ds=osf_tests.settings --tb=short --reuse-db --allow-hosts=127.0.0.1,192.168.168.167,localhost +addopts = --ds=osf_tests.settings --tb=short --reuse-db --allow-hosts=127.0.0.1,192.168.168.167,localhost,mailhog filterwarnings = once::UserWarning ignore:.*U.*mode is deprecated:DeprecationWarning diff --git a/tasks/__init__.py b/tasks/__init__.py index 1490638ac7f..c605b7490b1 100755 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -395,6 +395,9 @@ def test_module(ctx, module=None, numprocesses=None, nocapture=False, params=Non ADMIN_TESTS = [ 'admin_tests', ] +MAILHOG_TESTS = [ + 'api_tests/mailhog', +] @task @@ -431,6 +434,13 @@ def test_api3(ctx, numprocesses=None, coverage=False, testmon=False, junit=False test_module(ctx, module=API_TESTS3 + OSF_TESTS, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) +@task +def test_mailhog(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): + """Run the MAILHOG test suite.""" + print(f'Testing modules "{MAILHOG_TESTS}"') + test_module(ctx, module=MAILHOG_TESTS, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) + + @task def test_admin(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): """Run the Admin test suite.""" @@ -463,6 +473,7 @@ def test(ctx, all=False, lint=False): test_addons(ctx) # TODO: Enable admin tests test_admin(ctx) + test_mailhog(ctx) @task def remove_failures_from_testmon(ctx, db_path=None): @@ -512,6 +523,12 @@ def test_ci_api3_and_osf(ctx, numprocesses=None, coverage=False, testmon=False, #ci_setup(ctx) test_api3(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) + +@task +def test_ci_mailhog(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): + #ci_setup(ctx) + test_mailhog(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) + @task def wheelhouse(ctx, addons=False, release=False, dev=False, pty=True): """Build wheels for python dependencies. diff --git a/website/settings/defaults.py b/website/settings/defaults.py index b5c954eb2b2..cc685af9d1b 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -162,6 +162,7 @@ def parent_dir(path): MAILHOG_HOST = 'mailhog' MAILHOG_PORT = 1025 +MAILHOG_API_HOST = 'http://localhost:8025' # OR, if using Sendgrid's API # WARNING: If `SENDGRID_WHITELIST_MODE` is True, diff --git a/website/settings/local-ci.py b/website/settings/local-ci.py index 230be0b2123..c63fce5a86a 100644 --- a/website/settings/local-ci.py +++ b/website/settings/local-ci.py @@ -52,8 +52,9 @@ MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' -MAILHOG_HOST = 'mailhog' +MAILHOG_HOST = 'localhost' MAILHOG_PORT = 1025 +MAILHOG_API_HOST = 'http://localhost:8025' # Session COOKIE_NAME = 'osf' diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py index 88c8cf6adbd..8676afd496a 100644 --- a/website/settings/local-dist.py +++ b/website/settings/local-dist.py @@ -64,6 +64,7 @@ MAILHOG_HOST = 'mailhog' MAILHOG_PORT = 1025 +MAILHOG_API_HOST = 'http://localhost:8025' # Mailchimp email subscriptions ENABLE_EMAIL_SUBSCRIPTIONS = False From 1b70ff25c37353855fbf518054e8067792315446 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Tue, 10 Jun 2025 18:52:14 +0300 Subject: [PATCH 023/176] update CI --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 4417f537dd0..f0126e4dfd5 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,5 @@ [pytest] -addopts = --ds=osf_tests.settings --tb=short --reuse-db --allow-hosts=127.0.0.1,192.168.168.167,localhost,mailhog +addopts = --ds=osf_tests.settings --tb=short --reuse-db --allow-hosts=127.0.0.1,192.168.168.167,localhost filterwarnings = once::UserWarning ignore:.*U.*mode is deprecated:DeprecationWarning From 62f0fd9f4ff6eef22217782d11ff193cf1355a98 Mon Sep 17 00:00:00 2001 From: Bohdan Odintsov Date: Wed, 11 Jun 2025 14:49:13 +0300 Subject: [PATCH 024/176] fixed bug with contributors --- website/templates/project/contributors.mako | 39 +++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/website/templates/project/contributors.mako b/website/templates/project/contributors.mako index ff3a5194183..d58a04f7bd7 100644 --- a/website/templates/project/contributors.mako +++ b/website/templates/project/contributors.mako @@ -63,6 +63,45 @@

    Drag and drop contributors to change listing order.

    % endif +
    + +
    +
    No contributors found
    From 2911fb3aed4f0d5555e5219a9dbff45fefca3d06 Mon Sep 17 00:00:00 2001 From: Anton Krytskyi Date: Mon, 16 Jun 2025 16:52:16 +0300 Subject: [PATCH 025/176] handle django core validation error in drf view --- api/actions/views.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/api/actions/views.py b/api/actions/views.py index 5a9f8803781..f137487ebc6 100644 --- a/api/actions/views.py +++ b/api/actions/views.py @@ -1,7 +1,8 @@ +from django.core.exceptions import ValidationError as DjangoValidationError from guardian.shortcuts import get_objects_for_user from rest_framework import generics from rest_framework import permissions -from rest_framework.exceptions import NotFound, PermissionDenied +from rest_framework.exceptions import NotFound, PermissionDenied, ValidationError from api.actions.permissions import ReviewActionPermission from api.actions.serializers import NodeRequestActionSerializer, ReviewActionSerializer, PreprintRequestActionSerializer @@ -186,7 +187,10 @@ def perform_create(self, serializer): ), ) - serializer.save(user=self.request.user) + try: + serializer.save(user=self.request.user) + except DjangoValidationError as exc: + raise ValidationError(str(exc)) from exc # overrides ListFilterMixin def get_default_queryset(self): From f550c61146c82a7ba835c1746d2cb44ca6ccc08a Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 16 Jun 2025 10:46:40 -0400 Subject: [PATCH 026/176] fix absolute url issue --- api/subscriptions/serializers.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index 2bb1041d227..17065db6029 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -7,7 +7,7 @@ from website.util import api_v2_url -from api.base.serializers import JSONAPISerializer +from api.base.serializers import JSONAPISerializer, LinksField from .fields import FrequencyField class SubscriptionSerializer(JSONAPISerializer): @@ -24,6 +24,13 @@ class SubscriptionSerializer(JSONAPISerializer): class Meta: type_ = 'subscription' + links = LinksField({ + 'self': 'get_absolute_url', + }) + + def get_absolute_url(self, obj): + return obj.absolute_api_v2_url + def update(self, instance, validated_data): user = self.context['request'].user frequency = validated_data.get('frequency') From 458fbfdf3e79f1e0de6c8d8b44fe03ee3c8fdb50 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 16 Jun 2025 13:14:46 -0400 Subject: [PATCH 027/176] fix up unit test issues --- api/subscriptions/fields.py | 5 ++--- api/subscriptions/serializers.py | 2 +- osf/models/notification.py | 24 ++++++++++++++++++++++++ 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/api/subscriptions/fields.py b/api/subscriptions/fields.py index c26ffaf5d4e..ddbcd4f4aa5 100644 --- a/api/subscriptions/fields.py +++ b/api/subscriptions/fields.py @@ -1,12 +1,11 @@ from rest_framework import serializers as ser -from osf.models import NotificationSubscription class FrequencyField(ser.ChoiceField): def __init__(self, **kwargs): super().__init__(choices=['none', 'instantly', 'daily', 'weekly', 'monthly'], **kwargs) - def to_representation(self, obj: NotificationSubscription): - return obj.message_frequency + def to_representation(self, frequency: str): + return frequency or 'none' def to_internal_value(self, freq): return super().to_internal_value(freq) diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index 17065db6029..ceb6d602db7 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -19,7 +19,7 @@ class SubscriptionSerializer(JSONAPISerializer): id = ser.CharField(read_only=True) event_name = ser.CharField(read_only=True) - frequency = FrequencyField(source='*', required=True) + frequency = FrequencyField(source='message_frequency', required=True) class Meta: type_ = 'subscription' diff --git a/osf/models/notification.py b/osf/models/notification.py index 6f0fae57067..d2e4244cb0a 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -301,6 +301,30 @@ def absolute_api_v2_url(self): from api.base.utils import absolute_reverse return absolute_reverse('institutions:institution-detail', kwargs={'institution_id': self._id, 'version': 'v2'}) + from django.contrib.contenttypes.models import ContentType + + @property + def _id(self): + """ + Legacy subscription id for API compatibility. + Provider: _ + User/global: _global_ + Node/etc: _ + """ + # Safety checks + event = self.notification_type.name + ct = self.notification_type.object_content_type + match getattr(ct, 'model', None): + case 'preprintprovider' | 'collectionprovider' | 'registrationprovider': + # Providers: use subscribed_object._id (which is the provider short name, e.g. 'mindrxiv') + return f'{self.subscribed_object._id}_new_pending_submissions' + case 'node' | 'collection' | 'preprint': + # Node-like objects: use object_id (guid) + return f'{self.subscribed_object._id}_{event}' + case 'osfuser' | 'user', _: + # Global: _global + return f'{self.subscribed_object._id}_global_{event}' + class Notification(models.Model): subscription = models.ForeignKey( From b11c15c62312d2a1f4f181f3aa50df81054a9204 Mon Sep 17 00:00:00 2001 From: Anton Krytskyi Date: Wed, 18 Jun 2025 18:17:59 +0300 Subject: [PATCH 028/176] add unit tests --- api/actions/views.py | 2 +- api_tests/actions/views/test_action_list.py | 35 +++++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/api/actions/views.py b/api/actions/views.py index f137487ebc6..f28ba99c435 100644 --- a/api/actions/views.py +++ b/api/actions/views.py @@ -189,7 +189,7 @@ def perform_create(self, serializer): try: serializer.save(user=self.request.user) - except DjangoValidationError as exc: + except (ValueError, DjangoValidationError) as exc: raise ValidationError(str(exc)) from exc # overrides ListFilterMixin diff --git a/api_tests/actions/views/test_action_list.py b/api_tests/actions/views/test_action_list.py index c33048656f1..b9254bb3337 100644 --- a/api_tests/actions/views/test_action_list.py +++ b/api_tests/actions/views/test_action_list.py @@ -366,3 +366,38 @@ def test_invalid_target_id(self, app, moderator): expect_errors=True ) assert res.status_code == 404 + + def test_submit_preprint_without_files_returns_400(self, app, url, preprint, node_admin): + # Ensure preprint has no files + preprint.primary_file = None + preprint.save() + + submit_payload = self.create_payload( + preprint._id, + trigger='submit' + ) + + res = app.post_json_api( + url, + submit_payload, + auth=node_admin.auth, + expect_errors=True + ) + assert res.status_code == 400 + + def test_provider_not_reviewed_returns_409(self, app, url, preprint, node_admin): + preprint.provider = PreprintProviderFactory(reviews_workflow=None) + preprint.save() + + submit_payload = self.create_payload( + preprint._id, + trigger='submit' + ) + + res = app.post_json_api( + url, + submit_payload, + auth=node_admin.auth, + expect_errors=True + ) + assert res.status_code == 409 From d7f8c7a8e3b6c27fa23d67f1764a0e12ffe1d95b Mon Sep 17 00:00:00 2001 From: Anton Krytskyi Date: Mon, 30 Jun 2025 17:09:41 +0300 Subject: [PATCH 029/176] add registration_word for registration provider --- api/providers/serializers.py | 1 + .../0031_abstractprovider_registration_word.py | 18 ++++++++++++++++++ osf/models/provider.py | 1 + 3 files changed, 20 insertions(+) create mode 100644 osf/migrations/0031_abstractprovider_registration_word.py diff --git a/api/providers/serializers.py b/api/providers/serializers.py index ef89388e281..3de618d4a39 100644 --- a/api/providers/serializers.py +++ b/api/providers/serializers.py @@ -170,6 +170,7 @@ class Meta: reviews_comments_anonymous = ser.BooleanField(read_only=True) allow_updates = ser.BooleanField(read_only=True) allow_bulk_uploads = ser.BooleanField(read_only=True) + registration_word = ser.CharField(read_only=True, allow_null=True) registrations = ReviewableCountsRelationshipField( related_view='providers:registration-providers:registrations-list', diff --git a/osf/migrations/0031_abstractprovider_registration_word.py b/osf/migrations/0031_abstractprovider_registration_word.py new file mode 100644 index 00000000000..eccdf38a9fd --- /dev/null +++ b/osf/migrations/0031_abstractprovider_registration_word.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.15 on 2025-06-30 14:02 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0030_abstractnode__manual_guid'), + ] + + operations = [ + migrations.AddField( + model_name='abstractprovider', + name='registration_word', + field=models.CharField(default='registration', max_length=50), + ), + ] diff --git a/osf/models/provider.py b/osf/models/provider.py index 2ee920a77e5..c78e2f52c94 100644 --- a/osf/models/provider.py +++ b/osf/models/provider.py @@ -305,6 +305,7 @@ class RegistrationProvider(AbstractProvider): bulk_upload_auto_approval = models.BooleanField(null=True, blank=True, default=False) allow_updates = models.BooleanField(null=True, blank=True, default=False) allow_bulk_uploads = models.BooleanField(null=True, blank=True, default=False) + registration_word = models.CharField(max_length=50, default='registration') def __init__(self, *args, **kwargs): self._meta.get_field('share_publish_type').default = 'Registration' From 42b382901f7850f97c18002cd60d126a9b1877be Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 17:15:42 +0300 Subject: [PATCH 030/176] [ENG-8290] Allow collection search POST with token scope (#11201) Purpose Add scope for POST collection search --- api/search/views.py | 1 + api_tests/search/views/test_views.py | 23 +++++++++++++++++++++++ framework/auth/oauth_scopes.py | 5 ++++- 3 files changed, 28 insertions(+), 1 deletion(-) diff --git a/api/search/views.py b/api/search/views.py index 2af2afd79a3..958c5907f3b 100644 --- a/api/search/views.py +++ b/api/search/views.py @@ -656,6 +656,7 @@ class SearchCollections(BaseSearchView): doc_type = 'collectionSubmission' view_category = 'search' view_name = 'search-collected-metadata' + required_write_scopes = [CoreScopes.ADVANCED_SEARCH] @property def search_fields(self): diff --git a/api_tests/search/views/test_views.py b/api_tests/search/views/test_views.py index 675381668dc..5259e76d6ed 100644 --- a/api_tests/search/views/test_views.py +++ b/api_tests/search/views/test_views.py @@ -1,8 +1,10 @@ import pytest import uuid +from unittest import mock from api.base.settings.defaults import API_BASE from api_tests import utils +from framework.auth.cas import CasResponse from framework.auth.core import Auth from osf.models import RegistrationSchema from osf_tests.factories import ( @@ -1025,3 +1027,24 @@ def test_POST_search_collections_disease_data_type( assert res.status_code == 200 assert res.json['links']['meta']['total'] == 2 assert len(res.json['data']) == 2 + + def test_POST_search_collections_scope(self, app, url_collection_search, user): + payload = self.post_payload(q='Collection') + + token_invalid = CasResponse( + authenticated=True, + user=user._id, + attributes={'accessTokenScope': ['osf.full_read']} + ) + with mock.patch('framework.auth.cas.CasClient.profile', return_value=token_invalid): + res = app.post_json_api(url_collection_search, payload, auth='some-invalid-token', expect_errors=True, auth_type='jwt') + assert res.status_code == 403 + + token_valid = CasResponse( + authenticated=True, + user=user._id, + attributes={'accessTokenScope': ['osf.full_read', 'osf.full_write']} + ) + with mock.patch('framework.auth.cas.CasClient.profile', return_value=token_valid): + res = app.post_json_api(url_collection_search, payload, auth='some-valid-token', auth_type='jwt') + assert res.status_code == 200 diff --git a/framework/auth/oauth_scopes.py b/framework/auth/oauth_scopes.py index 65811b9a981..65a852f058b 100644 --- a/framework/auth/oauth_scopes.py +++ b/framework/auth/oauth_scopes.py @@ -210,6 +210,8 @@ class CoreScopes: READ_COLLECTION_SUBMISSION = 'read_collection_submission' WRITE_COLLECTION_SUBMISSION = 'write_collection_submission' + ADVANCED_SEARCH = 'advanced_search' + class ComposedScopes: """ @@ -370,7 +372,8 @@ class ComposedScopes: CoreScopes.CEDAR_METADATA_RECORD_WRITE, CoreScopes.WRITE_COLLECTION_SUBMISSION_ACTION, CoreScopes.WRITE_COLLECTION_SUBMISSION, - CoreScopes.USERS_MESSAGE_WRITE_EMAIL + CoreScopes.USERS_MESSAGE_WRITE_EMAIL, + CoreScopes.ADVANCED_SEARCH ) # Admin permissions- includes functionality not intended for third-party use From ef9c076c948b0a307baaa0dce4eaabeed701c2b4 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Mon, 30 Jun 2025 17:25:10 +0300 Subject: [PATCH 031/176] [ENG-8247] Ability to delete draft preprints from database (#11191) ## Purpose User should be able to delete draft preprints, so that we don't save them in database ## Changes Added `delete` method. User can delete a preprint only if it's in initial state, so it means this preprint is a draft ## Ticket https://openscience.atlassian.net/browse/ENG-8247 --- api/preprints/views.py | 7 ++- .../preprints/views/test_preprint_detail.py | 44 +++++++++++++++---- 2 files changed, 41 insertions(+), 10 deletions(-) diff --git a/api/preprints/views.py b/api/preprints/views.py index cd5dba8ba8d..ebd6dabfb97 100644 --- a/api/preprints/views.py +++ b/api/preprints/views.py @@ -279,7 +279,7 @@ def create(self, request, *args, **kwargs): return super().create(request, *args, **kwargs) -class PreprintDetail(PreprintOldVersionsImmutableMixin, PreprintMetricsViewMixin, JSONAPIBaseView, generics.RetrieveUpdateAPIView, PreprintMixin, WaterButlerMixin): +class PreprintDetail(PreprintOldVersionsImmutableMixin, PreprintMetricsViewMixin, JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, PreprintMixin, WaterButlerMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprints_read). """ permission_classes = ( @@ -326,6 +326,11 @@ def get_parser_context(self, http_request): res['legacy_type_allowed'] = True return res + def delete(self, request, *args, **kwargs): + if self.get_preprint().machine_state == 'initial': + return super().delete(request, *args, **kwargs) + + raise ValidationError('You cannot delete created preprint') class PreprintNodeRelationship(PreprintOldVersionsImmutableMixin, JSONAPIBaseView, generics.RetrieveUpdateAPIView, PreprintMixin): permission_classes = ( diff --git a/api_tests/preprints/views/test_preprint_detail.py b/api_tests/preprints/views/test_preprint_detail.py index a706bb05105..4b25ec7bf83 100644 --- a/api_tests/preprints/views/test_preprint_detail.py +++ b/api_tests/preprints/views/test_preprint_detail.py @@ -195,15 +195,41 @@ def published_preprint(self, user): def url(self, user): return f'/{API_BASE}preprints/{{}}/' - def test_cannot_delete_preprints( - self, app, user, url, unpublished_preprint, published_preprint): - res = app.delete(url.format(unpublished_preprint._id), auth=user.auth, expect_errors=True) - assert res.status_code == 405 - assert unpublished_preprint.deleted is None - - res = app.delete(url.format(published_preprint._id), auth=user.auth, expect_errors=True) - assert res.status_code == 405 - assert published_preprint.deleted is None + def test_can_delete_draft_preprints( + self, app, user, url, unpublished_preprint + ): + + url = f'/{API_BASE}preprints/{unpublished_preprint._id}/' + res = app.delete(url, auth=user.auth) + assert res.status_code == 204 + + res = app.get(url, auth=user.auth, expect_errors=True) + assert res.status_code == 404 + + def test_cannot_delete_published_preprints(self, app, user, url, published_preprint): + url = f'/{API_BASE}preprints/{published_preprint._id}/' + + res = app.delete(url, auth=user.auth, expect_errors=True) + assert res.json['errors'][0]['detail'] == 'You cannot delete created preprint' + res = app.get(url, auth=user.auth) + assert res.status_code == 200 + + def test_cannot_delete_in_moderation_preprints(self, app, user, url): + pre_moderation_preprint = PreprintFactory(creator=user, reviews_workflow='pre-moderation') + post_moderation_preprint = PreprintFactory(creator=user, reviews_workflow='post-moderation') + + url = f'/{API_BASE}preprints/{pre_moderation_preprint._id}/' + res = app.delete(url, auth=user.auth, expect_errors=True) + assert res.json['errors'][0]['detail'] == 'You cannot delete created preprint' + res = app.get(url, auth=user.auth) + assert res.status_code == 200 + + url = f'/{API_BASE}preprints/{post_moderation_preprint._id}/' + + res = app.delete(url, auth=user.auth, expect_errors=True) + assert res.json['errors'][0]['detail'] == 'You cannot delete created preprint' + res = app.get(url, auth=user.auth) + assert res.status_code == 200 @pytest.mark.django_db From 9aa22ee58c0936a2c1b0dd3fb0c67cb3333d97c7 Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 17:26:57 +0300 Subject: [PATCH 032/176] [ENG-8193] Fix issues with Preprint submission via API (#11185) ## Purpose handle django core ValidationError in drf view ## Ticket https://openscience.atlassian.net/browse/ENG-8193 --- api/actions/views.py | 8 +++-- api_tests/actions/views/test_action_list.py | 35 +++++++++++++++++++++ 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/api/actions/views.py b/api/actions/views.py index 5a9f8803781..f28ba99c435 100644 --- a/api/actions/views.py +++ b/api/actions/views.py @@ -1,7 +1,8 @@ +from django.core.exceptions import ValidationError as DjangoValidationError from guardian.shortcuts import get_objects_for_user from rest_framework import generics from rest_framework import permissions -from rest_framework.exceptions import NotFound, PermissionDenied +from rest_framework.exceptions import NotFound, PermissionDenied, ValidationError from api.actions.permissions import ReviewActionPermission from api.actions.serializers import NodeRequestActionSerializer, ReviewActionSerializer, PreprintRequestActionSerializer @@ -186,7 +187,10 @@ def perform_create(self, serializer): ), ) - serializer.save(user=self.request.user) + try: + serializer.save(user=self.request.user) + except (ValueError, DjangoValidationError) as exc: + raise ValidationError(str(exc)) from exc # overrides ListFilterMixin def get_default_queryset(self): diff --git a/api_tests/actions/views/test_action_list.py b/api_tests/actions/views/test_action_list.py index c33048656f1..b9254bb3337 100644 --- a/api_tests/actions/views/test_action_list.py +++ b/api_tests/actions/views/test_action_list.py @@ -366,3 +366,38 @@ def test_invalid_target_id(self, app, moderator): expect_errors=True ) assert res.status_code == 404 + + def test_submit_preprint_without_files_returns_400(self, app, url, preprint, node_admin): + # Ensure preprint has no files + preprint.primary_file = None + preprint.save() + + submit_payload = self.create_payload( + preprint._id, + trigger='submit' + ) + + res = app.post_json_api( + url, + submit_payload, + auth=node_admin.auth, + expect_errors=True + ) + assert res.status_code == 400 + + def test_provider_not_reviewed_returns_409(self, app, url, preprint, node_admin): + preprint.provider = PreprintProviderFactory(reviews_workflow=None) + preprint.save() + + submit_payload = self.create_payload( + preprint._id, + trigger='submit' + ) + + res = app.post_json_api( + url, + submit_payload, + auth=node_admin.auth, + expect_errors=True + ) + assert res.status_code == 409 From 3e609ac7c44f2af4edbe8947c5fe71d410a0b1d4 Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 17:28:17 +0300 Subject: [PATCH 033/176] [ENG-8192] Ability to force archive registrations when OSFS Folders have changed (#11194) ## Purpose fix force archive for some actions ## Changes - divide revert_log_actions into separate functions - include trashed children when build a file tree - add additional retrieval logic for osf_storage_folder_created and osf_storage_file_removed - add generic retrieval fallback ## Ticket https://openscience.atlassian.net/browse/ENG-8192 --- osf/management/commands/force_archive.py | 106 +++++++--- .../management_commands/test_force_archive.py | 194 ++++++++++++++++++ 2 files changed, 266 insertions(+), 34 deletions(-) create mode 100644 osf_tests/management_commands/test_force_archive.py diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py index 0d12d12bfdf..1f5612a2f91 100644 --- a/osf/management/commands/force_archive.py +++ b/osf/management/commands/force_archive.py @@ -37,6 +37,7 @@ from framework import sentry from framework.exceptions import HTTPError from osf.models import Node, NodeLog, Registration, BaseFileNode +from osf.models.files import TrashedFileNode from osf.exceptions import RegistrationStuckRecoverableException, RegistrationStuckBrokenException from api.base.utils import waterbutler_api_url_for from scripts import utils as script_utils @@ -280,48 +281,72 @@ def get_logs_to_revert(reg): Q(node=reg.registered_from) | (Q(params__source__nid=reg.registered_from._id) | Q(params__destination__nid=reg.registered_from._id))).order_by('-date') +def get_file_obj_from_log(log, reg): + try: + return BaseFileNode.objects.get(_id=log.params['urls']['view'].split('/')[4]) + except KeyError: + if log.action == 'osf_storage_folder_created': + return OsfStorageFolder.objects.get( + target_object_id=reg.registered_from.id, + name=log.params['path'].split('/')[-2] + ) + elif log.action == 'osf_storage_file_removed': + path = log.params['path'].split('/') + return TrashedFileNode.objects.get( + target_object_id=reg.registered_from.id, + name=path[-1] or path[-2] # file name or folder name + ) + elif log.action in ['addon_file_moved', 'addon_file_renamed']: + try: + return BaseFileNode.objects.get(_id=log.params['source']['path'].rstrip('/').split('/')[-1]) + except (KeyError, BaseFileNode.DoesNotExist): + return BaseFileNode.objects.get(_id=log.params['destination']['path'].rstrip('/').split('/')[-1]) + else: + # Generic fallback + path = log.params.get('path', '').split('/') + if len(path) >= 2: + name = path[-1] or path[-2] # file name or folder name + return BaseFileNode.objects.get( + target_object_id=reg.registered_from.id, + name=name + ) + + raise ValueError(f'Cannot determine file obj for log {log._id} [Registration id {reg._id}]: {log.action}') + + +def handle_file_operation(file_tree, reg, file_obj, log, obj_cache): + logger.info(f'Reverting {log.action} {file_obj._id}:{file_obj.name} from {log.date}') + + if log.action in ['osf_storage_file_added', 'osf_storage_folder_created']: + return modify_file_tree_recursive(reg._id, file_tree, file_obj, deleted=True, cached=bool(file_obj._id in obj_cache)) + elif log.action == 'osf_storage_file_removed': + return modify_file_tree_recursive(reg._id, file_tree, file_obj, deleted=False, cached=bool(file_obj._id in obj_cache)) + elif log.action == 'osf_storage_file_updated': + return modify_file_tree_recursive(reg._id, file_tree, file_obj, cached=bool(file_obj._id in obj_cache), revert=True) + elif log.action == 'addon_file_moved': + parent = BaseFileNode.objects.get(_id__in=obj_cache, name='/{}'.format(log.params['source']['materialized']).rstrip('/').split('/')[-2]) + return modify_file_tree_recursive(reg._id, file_tree, file_obj, cached=bool(file_obj._id in obj_cache), move_under=parent) + elif log.action == 'addon_file_renamed': + return modify_file_tree_recursive(reg._id, file_tree, file_obj, cached=bool(file_obj._id in obj_cache), rename=log.params['source']['name']) + else: + raise ValueError(f'Unexpected log action: {log.action}') + def revert_log_actions(file_tree, reg, obj_cache, permissible_addons): logs_to_revert = get_logs_to_revert(reg) + if len(permissible_addons) > 1: logs_to_revert = logs_to_revert.exclude(action__in=PERMISSIBLE_BLACKLIST) + for log in list(logs_to_revert): - try: - file_obj = BaseFileNode.objects.get(_id=log.params['urls']['view'].split('/')[4]) - except KeyError: - try: - file_obj = BaseFileNode.objects.get(_id=log.params['source']['path'].rstrip('/').split('/')[-1]) - except BaseFileNode.DoesNotExist: - # Bad log data - file_obj = BaseFileNode.objects.get(_id=log.params['destination']['path'].rstrip('/').split('/')[-1]) + file_obj = get_file_obj_from_log(log, reg) assert file_obj.target in reg.registered_from.root.node_and_primary_descendants() - if log.action == 'osf_storage_file_added': - # Find and mark deleted - logger.info(f'Reverting add {file_obj._id}:{file_obj.name} from {log.date}') - file_tree, noop = modify_file_tree_recursive(reg._id, file_tree, file_obj, deleted=True, cached=bool(file_obj._id in obj_cache)) - elif log.action == 'osf_storage_file_removed': - # Find parent and add to children, or undelete - logger.info(f'Reverting delete {file_obj._id}:{file_obj.name} from {log.date}') - file_tree, noop = modify_file_tree_recursive(reg._id, file_tree, file_obj, deleted=False, cached=bool(file_obj._id in obj_cache)) - elif log.action == 'osf_storage_file_updated': - # Find file and revert version - logger.info(f'Reverting update {file_obj._id}:{file_obj.name} from {log.date}') - file_tree, noop = modify_file_tree_recursive(reg._id, file_tree, file_obj, cached=bool(file_obj._id in obj_cache), revert=True) - elif log.action == 'osf_storage_folder_created': - # Find folder and mark deleted - logger.info(f'Reverting folder {file_obj._id}:{file_obj.name} from {log.date}') - file_tree, noop = modify_file_tree_recursive(reg._id, file_tree, file_obj, deleted=True, cached=bool(file_obj._id in obj_cache)) - elif log.action == 'addon_file_moved': - logger.info(f'Reverting move {file_obj._id}:{file_obj.name} from {log.date}') - parent = BaseFileNode.objects.get(_id__in=obj_cache, name='/{}'.format(log.params['source']['materialized']).rstrip('/').split('/')[-2]) - file_tree, noop = modify_file_tree_recursive(reg._id, file_tree, file_obj, cached=bool(file_obj._id in obj_cache), move_under=parent) - elif log.action == 'addon_file_renamed': - logger.info('Reverting rename {}:{} -> {} from {}'.format(file_obj._id, log.params['source']['name'], file_obj.name, log.date)) - file_tree, noop = modify_file_tree_recursive(reg._id, file_tree, file_obj, cached=bool(file_obj._id in obj_cache), rename=log.params['source']['name']) - else: - raise Exception(f'Unexpected log action: {log.action}') + + file_tree, noop = handle_file_operation(file_tree, reg, file_obj, log, obj_cache) assert not noop, f'{reg._id}: Failed to revert action for NodeLog {log._id}' + if file_obj._id not in obj_cache: obj_cache.add(file_obj._id) + return file_tree def build_file_tree(reg, node_settings, *args, **kwargs): @@ -337,7 +362,20 @@ def _recurse(file_obj, node): 'version': int(file_obj.versions.latest('created').identifier) if file_obj.versions.exists() else None } if not file_obj.is_file: - serialized['children'] = [_recurse(child, node) for child in OsfStorageFileNode.objects.filter(target_object_id=node.id, target_content_type_id=ct_id, parent_id=file_obj.id)] + nonlocal reg + all_children = OsfStorageFileNode.objects.filter( + target_object_id=node.id, + target_content_type_id=ct_id, + parent_id=file_obj.id + ).union( + TrashedFileNode.objects.filter( + target_object_id=node.id, + target_content_type_id=ct_id, + parent_id=file_obj.id, + modified__gt=reg.archive_job.created, + ) + ) + serialized['children'] = [_recurse(child, node) for child in all_children] return serialized current_tree = _recurse(node_settings.get_root(), n) diff --git a/osf_tests/management_commands/test_force_archive.py b/osf_tests/management_commands/test_force_archive.py new file mode 100644 index 00000000000..cdd134a02d3 --- /dev/null +++ b/osf_tests/management_commands/test_force_archive.py @@ -0,0 +1,194 @@ +import pytest +from django.utils import timezone + +from addons.osfstorage.models import OsfStorageFile, OsfStorageFolder +from osf.models import NodeLog, BaseFileNode +from osf.models.files import TrashedFileNode, TrashedFolder +from osf.management.commands.force_archive import get_file_obj_from_log, build_file_tree, DEFAULT_PERMISSIBLE_ADDONS +from osf_tests.factories import NodeFactory, RegistrationFactory + + +class TestGetFileObjFromLog: + + @pytest.fixture + def node(self): + return NodeFactory(title='Test Node', category='project') + + @pytest.fixture + def reg(self, node): + return RegistrationFactory(project=node, registered_date=timezone.now()) + + @pytest.mark.django_db + def test_file_added(self, node, reg): + file = OsfStorageFile.objects.create(target=node, name='file1.txt') + file.save() + log = NodeLog.objects.create( + node=node, + action='osf_storage_file_added', + params={'urls': {'view': f'/{node._id}/files/osfstorage/{file._id}/'}}, + date=timezone.now(), + ) + file_obj = get_file_obj_from_log(log, reg) + assert isinstance(file_obj, BaseFileNode) + assert file_obj == file + + @pytest.mark.django_db + def test_file_removed(self, node, reg): + file = OsfStorageFile.create(target=node, name='trashed.txt') + file.delete() + log = NodeLog.objects.create( + node=node, + action='osf_storage_file_removed', + params={'path': '/folder1/trashed.txt'}, + date=timezone.now(), + ) + file_obj = get_file_obj_from_log(log, reg) + assert isinstance(file_obj, TrashedFileNode) + assert file_obj == file + + folder = OsfStorageFolder.create(target=node, name='folder1') + folder.delete() + log = NodeLog.objects.create( + node=node, + action='osf_storage_file_removed', + params={'path': '/folder1/'}, + date=timezone.now(), + ) + file_obj = get_file_obj_from_log(log, reg) + assert isinstance(file_obj, TrashedFolder) + assert file_obj == folder + + @pytest.mark.django_db + def test_folder_created(self, node, reg): + folder = OsfStorageFolder.create(target=node, name='folder1') + folder.save() + log = NodeLog.objects.create( + node=node, + action='osf_storage_folder_created', + params={'path': '/folder1/'}, + date=timezone.now(), + ) + file_obj = get_file_obj_from_log(log, reg) + assert isinstance(file_obj, OsfStorageFolder) + assert file_obj == folder + + @pytest.mark.django_db + def test_move_rename(self, node, reg): + file = OsfStorageFile.create(target=node, name='file2.txt') + file.save() + log = NodeLog.objects.create( + node=node, + action='addon_file_renamed', + params={ + 'source': {'path': f'/{file._id}', 'name': 'file1.txt'}, + 'destination': {'path': f'/{file._id}', 'name': 'file2.txt'} + }, + date=timezone.now(), + ) + file_obj = get_file_obj_from_log(log, reg) + assert isinstance(file_obj, BaseFileNode) + assert file_obj == file + + @pytest.mark.django_db + def test_generic_fallback(self, node, reg): + file = OsfStorageFile.create(target=node, name='fallback.txt') + file.save() + log = NodeLog.objects.create( + node=node, + action='some_other_action', + params={'path': '/fallback.txt'}, + date=timezone.now(), + ) + file_obj = get_file_obj_from_log(log, reg) + assert file_obj == file + + +class TestBuildFileTree: + + @pytest.fixture + def node(self): + return NodeFactory(title='Test Node', category='project') + + @pytest.fixture + def reg(self, node): + return RegistrationFactory(project=node, registered_date=timezone.now()) + + @pytest.fixture + def permissible_addons(self): + return DEFAULT_PERMISSIBLE_ADDONS + + @pytest.mark.django_db + def test_empty_folder(self, node, reg, permissible_addons): + folder = OsfStorageFolder.create(target=node, name='empty') + folder.save() + + class DummyNodeSettings: + def get_root(self): + return folder + + node_settings = DummyNodeSettings() + tree = build_file_tree(reg, node_settings, permissible_addons=permissible_addons) + assert tree['object'] == folder + assert tree['children'] == [] + + @pytest.mark.django_db + def test_nested_folders(self, node, reg, permissible_addons): + parent = OsfStorageFolder.create(target=node, name='parent') + parent.save() + + child = OsfStorageFolder.create(target=node, name='child') + child.save() + child.move_under(parent) + + file = OsfStorageFile.objects.create(target=node, name='file1.txt') + file.save() + file.move_under(child) + + class DummyNodeSettings: + def get_root(self): + return parent + + node_settings = DummyNodeSettings() + tree = build_file_tree(reg, node_settings, permissible_addons=permissible_addons) + assert tree['object'] == parent + + child_node = next((c for c in tree['children'] if c['object'] == child), None) + assert child_node is not None + assert any(grandchild['object'] == file for grandchild in child_node['children']) + + @pytest.mark.django_db + def test_active_and_trashed_children(self, node, reg, permissible_addons): + folder = OsfStorageFolder.create(target=node, name='parent') + folder.save() + + file = OsfStorageFile.create(target=node, name='file1.txt') + file.save() + file.move_under(folder) + + deleted_file = OsfStorageFile.create(target=node, name='file2.txt') + deleted_file.save() + deleted_file.move_under(folder) + deleted_file.delete() + + class DummyNodeSettings: + def get_root(self): + return folder + + node_settings = DummyNodeSettings() + tree = build_file_tree(reg, node_settings, permissible_addons=permissible_addons) + assert tree['object'] == folder + + names = [child['object'].name for child in tree['children']] + assert 'file1.txt' in names + assert 'file2.txt' in names + + # make sure only valid thrashed file nodes are included + new_reg = RegistrationFactory(project=node, registered_date=timezone.now()) + file.delete() + + tree = build_file_tree(new_reg, node_settings, permissible_addons=permissible_addons) + assert tree['object'] == folder + + names = [child['object'].name for child in tree['children']] + assert 'file1.txt' in names + assert 'file2.txt' not in names From ee1aeb453c0cd3c8c7c7a7d2e350fb27b8f735c2 Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 17:49:43 +0300 Subject: [PATCH 034/176] add additional information to user admin (#11184) ## Purpose add more information to user admin ## Changes Added fields: - id - Deactivation request - Change password last attempt - Change password invalid attempts - Socials - User is staff - Groups Added dates to: - Disabled - Registered - Confirmed ## Ticket https://openscience.atlassian.net/browse/ENG-8190 --- admin/templates/users/user.html | 66 +++++++++++++++++++++++++++++++-- 1 file changed, 63 insertions(+), 3 deletions(-) diff --git a/admin/templates/users/user.html b/admin/templates/users/user.html index 453d8f232d0..71bb25a5989 100644 --- a/admin/templates/users/user.html +++ b/admin/templates/users/user.html @@ -70,6 +70,10 @@

    User: {{ user.username }} ({{user. + + id + {{ user.id }} + Full Name {{ user.fullname }} @@ -86,20 +90,42 @@

    User: {{ user.username }} ({{user. Disabled - {{ user.is_disabled }} + {{ user.is_disabled }} [{{ user.date_disabled }}] + + + Deactivation request + + {% if user.requested_deactivation %} + {% if user.contacted_deactivation %} + Contacted + {% else %} + Requested + {% endif %} + {% else %} + Not requested + {% endif %} + Registered - {{ user.is_registered }} + {{ user.is_registered }} [{{ user.date_registered }}] Confirmed - {{ user.is_confirmed }} + {{ user.is_confirmed }} [{{ user.date_confirmed }}] Last login {{ user.last_login }} + + Change password last attempt + {{ user.change_password_last_attempt }} + + + Change password invalid attempts + {{ user.old_password_invalid_attempts }} + Emails @@ -116,6 +142,20 @@

    User: {{ user.username }} ({{user. {% endfor %} + + Socials + + {% if user.social_links %} + {% for name, link in user.social_links.items %} +
  • + {{ name }}: {{ link }} +
  • + {% endfor %} + {% else %} + None + {% endif %} + + {% include "users/two_factor.html" with user=user %} @@ -124,6 +164,12 @@

    User: {{ user.username }} ({{user. {{ user.is_spammy }} + + User is staff + + {{ user.is_staff }} + + {% include "users/add_system_tags.html" with user=user %} {% include "nodes/spam_status.html" with resource=user %} @@ -139,6 +185,20 @@

    User: {{ user.username }} ({{user. {% include "util/node_preprint_paginated_list.html" with items=nodes items_paginator=node_page resource_type="node" current_other_param=current_preprint %} + + Groups + + {% if user.groups %} + {% for group in user.groups.all %} +
  • + {{ group.name }} +
  • + {% endfor %} + {% else %} + None + {% endif %} + +

    From 33e86b381d10ab47292c710d0c93ffa71412e949 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Mon, 30 Jun 2025 18:14:04 +0300 Subject: [PATCH 035/176] upgrade django to 4.2.17 (#11173) ## Purpose Because Django 4.2.15 version has a vulnerability, it was upgraded to 4.2.17 ## Changes Updated pyproject.toml and lock file ## Ticket https://openscience.atlassian.net/browse/ENG-8176 --- poetry.lock | 11 ++++++----- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 27061024317..28d99cf780e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "amqp" @@ -926,13 +926,13 @@ files = [ [[package]] name = "django" -version = "4.2.15" +version = "4.2.17" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.15-py3-none-any.whl", hash = "sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30"}, - {file = "Django-4.2.15.tar.gz", hash = "sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a"}, + {file = "Django-4.2.17-py3-none-any.whl", hash = "sha256:3a93350214ba25f178d4045c0786c61573e7dbfa3c509b3551374f1e11ba8de0"}, + {file = "Django-4.2.17.tar.gz", hash = "sha256:6b56d834cc94c8b21a8f4e775064896be3b4a4ca387f2612d4406a5927cd2fdc"}, ] [package.dependencies] @@ -1113,6 +1113,7 @@ python-versions = "*" files = [ {file = "django-sendgrid-v5-1.2.3.tar.gz", hash = "sha256:3887aafbb10d5b808efc2c1031dcd96fd357d542eb5affe38fef07cc0f3cfae9"}, {file = "django_sendgrid_v5-1.2.3-py2.py3-none-any.whl", hash = "sha256:2d2fa8a085d21c95e5f97fc60b61f199ccc57a27df8da90cd3f29a5702346dc6"}, + {file = "django_sendgrid_v5-1.2.3-py3-none-any.whl", hash = "sha256:f6a44ee37c1c3cc7d683a43c55ead530417be1849a8a41bde02b158009559d9d"}, ] [package.dependencies] @@ -4492,4 +4493,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "81b3fc071f1be070d1072d4cfe1a45c8c44815e803c4ba17cf6da85a6b7b3894" +content-hash = "97027b7b20e0909d572fa21fc49a80fc5d67cfc61f40262928e239086a6c46cf" diff --git a/pyproject.toml b/pyproject.toml index 5e74a6defb3..dab80daf6da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,7 @@ requests-oauthlib = "1.3.1" sentry-sdk = {version= "2.2.0", extras = ["django", "flask", "celery"]} django-redis = "5.4.0" # API requirements -Django = "4.2.15" +Django = "4.2.17" djangorestframework = "3.15.1" django-cors-headers = "4.3.1" djangorestframework-bulk = "0.2.1" From 6c23802d6579f0181bf956194a15b13ccf35d093 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Mon, 30 Jun 2025 20:05:58 +0300 Subject: [PATCH 036/176] added retry to avoid race condition (#11179) ## Purpose User received an email of archive failure regardless of it was successful. It may happen because `archive_success` is run asynchronously https://github.com/CenterForOpenScience/osf.io/blob/2328dd60f55e9c1281dcb29dcd45a78a7fd2cc5f/website/archiver/listeners.py#L33-L49 and may be finished before the main thread finishes archiving process. So at first `archive_success` is processed and no files found, thus email is sent, then the main thread finishes files processing and this archiving is successful actually. Also it's possible that the celery queue had too many tasks to process and when the main thread finishes archiving, user sees his registration and when celery processes `archive_success` tasks that fails, user receives this email. ## Changes Added a one-time retry. ## Ticket https://openscience.atlassian.net/browse/ENG-8175?atlOrigin=eyJpIjoiMjg4MWM1YWI1ZTE3NDMyZmEyODk2Y2QxZjlhNjFlOGQiLCJwIjoiaiJ9 --- website/archiver/tasks.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/website/archiver/tasks.py b/website/archiver/tasks.py index f8c3b18feb1..42e5bfb568b 100644 --- a/website/archiver/tasks.py +++ b/website/archiver/tasks.py @@ -118,7 +118,7 @@ def on_failure(self, exc, task_id, args, kwargs, einfo): dst.save() sentry.log_message( - 'An error occured while archiving node', + f'An error occured while archiving node: {src._id} and registration: {dst._id}', extra_data={ 'source node guid': src._id, 'registration node guid': dst._id, @@ -325,9 +325,9 @@ def archive(job_pk): ) -@celery_app.task(base=ArchiverTask, ignore_result=False) +@celery_app.task(bind=True, base=ArchiverTask, ignore_result=False, max_retries=1, default_retry_delay=60 * 5, acks_late=True) @logged('archive_success') -def archive_success(dst_pk, job_pk): +def archive_success(self, dst_pk, job_pk): """Archiver's final callback. For the time being the use case for this task is to rewrite references to files selected in a registration schema (the Prereg Challenge being the first to expose this feature). The created references point @@ -352,7 +352,17 @@ def archive_success(dst_pk, job_pk): # Update file references in the Registration's responses to point to the archived # file on the Registration instead of the "live" version on the backing project - utils.migrate_file_metadata(dst) + try: + utils.migrate_file_metadata(dst) + except ArchivedFileNotFound as err: + sentry.log_message( + f'Some files were not found while archiving the node {dst_pk}', + extra_data={ + 'missing_files': err.missing_files, + }, + ) + self.retry(exc=err) + job = ArchiveJob.load(job_pk) if not job.sent: job.sent = True From ffaef7abe724e7a42f8ecb6001f7c4a523da4138 Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 20:11:16 +0300 Subject: [PATCH 037/176] [ENG-8096] Admins on projects are unable to reject user access requests (#11163) ## Purpose Add default value when reject access request ## Ticket https://openscience.atlassian.net/browse/ENG-8096 --- website/static/js/accessRequestManager.js | 2 ++ website/templates/project/contributors.mako | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/website/static/js/accessRequestManager.js b/website/static/js/accessRequestManager.js index 96446cd3a34..bed7b65696f 100644 --- a/website/static/js/accessRequestManager.js +++ b/website/static/js/accessRequestManager.js @@ -62,6 +62,8 @@ var AccessRequestModel = function(accessRequest, pageOwner, isRegistration, isPa self.respondToAccessRequest = function(trigger, data, event) { $osf.trackClick('button', 'click', trigger + '-project-access'); $osf.block(); + data = data || {}; + var requestUrl = $osf.apiV2Url('actions/requests/nodes/'); var payload = self.requestAccessPayload(trigger, data.permissions, data.visible); var request = $osf.ajaxJSON( diff --git a/website/templates/project/contributors.mako b/website/templates/project/contributors.mako index d58a04f7bd7..1736d73cf53 100644 --- a/website/templates/project/contributors.mako +++ b/website/templates/project/contributors.mako @@ -433,7 +433,10 @@ visible: visible() })}" > Add - +
    From 2beb44ef08c291b6ce1268117f84e08a664afb66 Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 20:13:46 +0300 Subject: [PATCH 038/176] fix content overflow for node page (#11182) ## Purpose fix content overflow on the node admin page ## Ticket https://openscience.atlassian.net/browse/ENG-8063 --- admin/templates/nodes/contributors.html | 98 +++++++++++++------------ admin/templates/nodes/node.html | 2 +- 2 files changed, 51 insertions(+), 49 deletions(-) diff --git a/admin/templates/nodes/contributors.html b/admin/templates/nodes/contributors.html index 373f121cb2a..945e0bf7578 100644 --- a/admin/templates/nodes/contributors.html +++ b/admin/templates/nodes/contributors.html @@ -3,58 +3,60 @@ Contributors - - - - - - - - {% if perms.osf.change_node %} - - {% endif %} - - - - {% for user in node.contributors %} +
    +
    EmailNamePermissionsActions
    + - - - + + + + {% if perms.osf.change_node %} + + {% endif %} + + + + {% for user in node.contributors %} + + + + {% if perms.osf.change_node %} + - {% endif %} - - {% endfor %} - -
    - - {{ user }} - - {{ user.fullname }}{% get_permissions user node %}EmailNamePermissionsActions
    - Remove - {{ user.fullname }}{% get_permissions user node %} + Remove + - -
    + + {% endif %} + + {% endfor %} + + + \ No newline at end of file diff --git a/admin/templates/nodes/node.html b/admin/templates/nodes/node.html index 33b5731e32c..dba2973116b 100644 --- a/admin/templates/nodes/node.html +++ b/admin/templates/nodes/node.html @@ -27,7 +27,7 @@ -
    +

    {{ node.type|cut:'osf.'|title }}: {{ node.title }} ({{node.guid}})

    From 689aa782ba5b7e418821fb7a223fb7fafdfbb3d6 Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 20:15:17 +0300 Subject: [PATCH 039/176] don't add multiple group perms for preprint provider (#11159) ## Purpose don't add multiple group perms for preprint provider ## Changes - check if user already belongs to some provider group - change checkbox to radio select ## Ticket https://openscience.atlassian.net/browse/ENG-8016 --- admin/preprint_providers/forms.py | 4 ++-- admin/preprint_providers/views.py | 8 ++++++-- admin/providers/views.py | 3 +-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/admin/preprint_providers/forms.py b/admin/preprint_providers/forms.py index 1393aae41ef..cb7a0f1b1e9 100644 --- a/admin/preprint_providers/forms.py +++ b/admin/preprint_providers/forms.py @@ -116,10 +116,10 @@ def __init__(self, *args, provider_groups=None, **kwargs): super().__init__(*args, **kwargs) provider_groups = provider_groups or Group.objects.none() - self.fields['group_perms'] = forms.ModelMultipleChoiceField( + self.fields['group_perms'] = forms.ModelChoiceField( queryset=provider_groups, required=False, - widget=forms.CheckboxSelectMultiple + widget=forms.RadioSelect ) user_id = forms.CharField(required=True, max_length=5, min_length=5) diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py index 4c7439f4554..d841981fe84 100644 --- a/admin/preprint_providers/views.py +++ b/admin/preprint_providers/views.py @@ -481,10 +481,14 @@ def form_valid(self, form): if not osf_user: raise Http404(f'OSF user with id "{user_id}" not found. Please double check.') - for group in form.cleaned_data.get('group_perms'): - self.target_provider.add_to_group(osf_user, group) + if osf_user.has_groups(self.target_provider.group_names): + messages.error(self.request, f'User with guid: {user_id} is already a moderator or admin') + return super().form_invalid(form) + group = form.cleaned_data.get('group_perms') + self.target_provider.add_to_group(osf_user, group) osf_user.save() + messages.success(self.request, f'Permissions update successful for OSF User {osf_user.username}!') return super().form_valid(form) diff --git a/admin/providers/views.py b/admin/providers/views.py index e42d25bb5c9..d21cd65a93b 100644 --- a/admin/providers/views.py +++ b/admin/providers/views.py @@ -29,8 +29,7 @@ def post(self, request, *args, **kwargs): messages.error(request, f'User for guid: {data["add-moderators-form"][0]} could not be found') return redirect(f'{self.url_namespace}:add_admin_or_moderator', provider_id=provider.id) - groups = [provider.format_group(name) for name in provider.groups.keys()] - if target_user.has_groups(groups): + if target_user.has_groups(provider.group_names): messages.error(request, f'User with guid: {data["add-moderators-form"][0]} is already a moderator or admin') return redirect(f'{self.url_namespace}:add_admin_or_moderator', provider_id=provider.id) From a02120f639a3e3a20fd1531c64f0bdc1903b134f Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Mon, 30 Jun 2025 20:17:25 +0300 Subject: [PATCH 040/176] fixed children/parent fields in admin templates (#11156) ## Purpose Admin templates used nonexistent fields to display children and parent of a node (potentially new fields were added but the old fields weren't replaced by new ones). The templates used `parent` and `children` fields. However an endpoint that adds children and parent to a node uses fields `descendants` (or through `NodeRelation` using `get_nodes` method) and `parent_node` property through `_parents` field ## Changes Use correct fields ## Notes Deletion of children nodes that are displayed now is broken. Together with Mark decided to create a separate ticket for this issue ## Ticket https://openscience.atlassian.net/browse/ENG-7969 --- admin/nodes/views.py | 1 + admin/templates/nodes/node.html | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/admin/nodes/views.py b/admin/nodes/views.py index 40cf261945d..c2bc48774bf 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -107,6 +107,7 @@ def get_context_data(self, **kwargs): 'SPAM_STATUS': SpamStatus, 'STORAGE_LIMITS': settings.StorageLimits, 'node': node, + 'children': node.get_nodes(is_node_link=False), 'duplicates': detailed_duplicates }) diff --git a/admin/templates/nodes/node.html b/admin/templates/nodes/node.html index dba2973116b..9cba3a4255c 100644 --- a/admin/templates/nodes/node.html +++ b/admin/templates/nodes/node.html @@ -85,10 +85,10 @@

    {{ node.type|cut:'osf.'|title }}: {{ node.title }} {{ node.parent }} + {{ node.parent_node.title }} {% endif %} @@ -102,7 +102,7 @@

    {{ node.type|cut:'osf.'|title }}: {{ node.title }} Date: Mon, 30 Jun 2025 13:19:08 -0400 Subject: [PATCH 041/176] [ENG-7962] Fix User Setting Response Payload async mailchimp perference change issues (#11136) ## Purpose Correct issues where PATCH responses wouldn't show preferences as updated due to async mailchimp task execution. Overrides instance model to return changed value, even though db value is unchanged until mailchimp confirms. ## Changes - adds special case for returning desired value synchronously - removes `update_osf_help_mails_subscription` because it's not used, but tested. - adds test cases ## Ticket https://openscience.atlassian.net/browse/ENG-7962 --- api/users/serializers.py | 21 +++++- .../users/views/test_user_settings_detail.py | 9 ++- .../commands/update_mailchimp_email.py | 2 +- osf/models/user.py | 2 +- scripts/fix_user_mailchimp.py | 4 +- tests/test_configure_mailing_list_view.py | 16 ---- tests/test_mailchimp.py | 7 +- tests/test_user_profile_view.py | 75 +------------------ website/mailchimp_utils.py | 48 ++++++------ website/profile/views.py | 11 +-- 10 files changed, 56 insertions(+), 139 deletions(-) diff --git a/api/users/serializers.py b/api/users/serializers.py index f31130d0420..522fa556aec 100644 --- a/api/users/serializers.py +++ b/api/users/serializers.py @@ -38,7 +38,7 @@ from osf.models.user_message import MessageTypes from osf.models.provider import AbstractProviderGroupObjectPermission from osf.utils.requests import string_type_request_headers -from website.profile.views import update_osf_help_mails_subscription, update_mailchimp_subscription +from website import settings, mailchimp_utils from website.settings import MAILCHIMP_GENERAL_LIST, OSF_HELP_LIST, CONFIRM_REGISTRATIONS_BY_EMAIL from website.util import api_v2_url @@ -568,10 +568,24 @@ class UserSettingsUpdateSerializer(UserSettingsSerializer): def update_email_preferences(self, instance, attr, value): if self.MAP_MAIL[attr] == OSF_HELP_LIST: - update_osf_help_mails_subscription(user=instance, subscribe=value) + instance.osf_mailing_lists[settings.OSF_HELP_LIST] = value + elif self.MAP_MAIL[attr] == MAILCHIMP_GENERAL_LIST: + if value: + mailchimp_utils.subscribe_mailchimp( + self.MAP_MAIL[attr], + instance._id, + ) + else: + mailchimp_utils.unsubscribe_mailchimp( + self.MAP_MAIL[attr], + instance._id, + username=instance.username, + ) else: - update_mailchimp_subscription(instance, self.MAP_MAIL[attr], value) + raise exceptions.ValidationError(detail='Invalid email preference.') + instance.save() + return instance def update_two_factor(self, instance, value, two_factor_addon): if value: @@ -606,6 +620,7 @@ def to_representation(self, instance): Overriding to_representation allows using different serializers for the request and response. """ context = self.context + return UserSettingsSerializer(instance=instance, context=context).data def update(self, instance, validated_data): diff --git a/api_tests/users/views/test_user_settings_detail.py b/api_tests/users/views/test_user_settings_detail.py index cf9194409f6..50da5e6cbfa 100644 --- a/api_tests/users/views/test_user_settings_detail.py +++ b/api_tests/users/views/test_user_settings_detail.py @@ -4,7 +4,7 @@ from osf_tests.factories import ( AuthUserFactory, ) -from website.settings import MAILCHIMP_GENERAL_LIST, OSF_HELP_LIST +from website.settings import OSF_HELP_LIST @pytest.fixture() @@ -200,14 +200,15 @@ def bad_payload(self, user_one): } } - @mock.patch('api.users.serializers.update_mailchimp_subscription') - def test_authorized_patch_200(self, mailchimp_mock, app, user_one, payload, url): + @mock.patch('website.mailchimp_utils.get_mailchimp_api') + def test_authorized_patch_200(self, mock_mailchimp_client, app, user_one, payload, url): res = app.patch_json_api(url, payload, auth=user_one.auth) assert res.status_code == 200 user_one.refresh_from_db() + assert res.json['data']['attributes']['subscribe_osf_help_email'] is False assert user_one.osf_mailing_lists[OSF_HELP_LIST] is False - mailchimp_mock.assert_called_with(user_one, MAILCHIMP_GENERAL_LIST, True) + mock_mailchimp_client.assert_called_with() def test_bad_payload_patch_400(self, app, user_one, bad_payload, url): res = app.patch_json_api(url, bad_payload, auth=user_one.auth, expect_errors=True) diff --git a/osf/management/commands/update_mailchimp_email.py b/osf/management/commands/update_mailchimp_email.py index 8c39a9b8edf..6b742d4706e 100644 --- a/osf/management/commands/update_mailchimp_email.py +++ b/osf/management/commands/update_mailchimp_email.py @@ -13,7 +13,7 @@ def update_mailchimp_email(): for user in OSFUser.objects.filter(deleted__isnull=True): for list_name, subscription in user.mailchimp_mailing_lists.items(): if subscription: - mailchimp_utils.subscribe_mailchimp(list_name, user._id) + mailchimp_utils.subscribe_mailchimp_async(list_name, user._id) users_updated += 1 return users_updated diff --git a/osf/models/user.py b/osf/models/user.py index 008f2affe60..28711f03e68 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -1069,7 +1069,7 @@ def save(self, *args, **kwargs): if 'username' in dirty_fields: for list_name, subscription in self.mailchimp_mailing_lists.items(): if subscription: - mailchimp_utils.subscribe_mailchimp(list_name, self._id) + mailchimp_utils.subscribe_mailchimp_async(list_name, self._id) return ret # Legacy methods diff --git a/scripts/fix_user_mailchimp.py b/scripts/fix_user_mailchimp.py index d8937f8d7a7..a687725cca2 100644 --- a/scripts/fix_user_mailchimp.py +++ b/scripts/fix_user_mailchimp.py @@ -9,7 +9,7 @@ setup_django() from osf.models import OSFUser from scripts import utils as script_utils -from website.mailchimp_utils import subscribe_mailchimp +from website.mailchimp_utils import subscribe_mailchimp_async from website import settings logger = logging.getLogger(__name__) @@ -31,7 +31,7 @@ def main(): for user in users: if settings.MAILCHIMP_GENERAL_LIST not in user.mailchimp_mailing_lists: if not dry: - subscribe_mailchimp(settings.MAILCHIMP_GENERAL_LIST, user._id) + subscribe_mailchimp_async(settings.MAILCHIMP_GENERAL_LIST, user._id) logger.info(f'User {user._id} has been subscribed to OSF general mailing list') count += 1 diff --git a/tests/test_configure_mailing_list_view.py b/tests/test_configure_mailing_list_view.py index e36afec5e02..1f6960a451b 100644 --- a/tests/test_configure_mailing_list_view.py +++ b/tests/test_configure_mailing_list_view.py @@ -11,7 +11,6 @@ OsfTestCase, ) from website import mailchimp_utils, settings -from website.profile.views import update_osf_help_mails_subscription from website.settings import MAILCHIMP_GENERAL_LIST from website.util import api_url_for @@ -46,21 +45,6 @@ def test_get_notifications(self): res = self.app.get(url, auth=user.auth) assert mailing_lists == res.json['mailing_lists'] - def test_osf_help_mails_subscribe(self): - user = UserFactory() - user.osf_mailing_lists[settings.OSF_HELP_LIST] = False - user.save() - update_osf_help_mails_subscription(user, True) - assert user.osf_mailing_lists[settings.OSF_HELP_LIST] - - def test_osf_help_mails_unsubscribe(self): - user = UserFactory() - user.osf_mailing_lists[settings.OSF_HELP_LIST] = True - user.save() - update_osf_help_mails_subscription(user, False) - assert not user.osf_mailing_lists[settings.OSF_HELP_LIST] - - @unittest.skipIf(settings.USE_CELERY, 'Subscription must happen synchronously for this test') @mock.patch('website.mailchimp_utils.get_mailchimp_api') def test_user_choose_mailing_lists_updates_user_dict(self, mock_get_mailchimp_api): user = AuthUserFactory() diff --git a/tests/test_mailchimp.py b/tests/test_mailchimp.py index ac30e2763d4..fb269c09797 100644 --- a/tests/test_mailchimp.py +++ b/tests/test_mailchimp.py @@ -36,7 +36,7 @@ def test_subscribe_called(self, mock_get_mailchimp_api): mock_get_mailchimp_api.return_value = mock_client mock_client.lists.get.return_value = {'id': 1, 'list_name': list_name} list_id = mailchimp_utils.get_list_id_from_name(list_name) - mailchimp_utils.subscribe_mailchimp(list_name, user._id) + mailchimp_utils.subscribe_mailchimp_async(list_name, user._id) handlers.celery_teardown_request() mock_client.lists.members.create_or_update.assert_called() @@ -48,10 +48,9 @@ def test_subscribe_fake_email_does_not_throw_validation_error(self, mock_get_mai mock_client = mock.MagicMock() mock_get_mailchimp_api.return_value = mock_client mock_client.lists.members.create_or_update.side_effect = MailChimpError - mailchimp_utils.subscribe_mailchimp(list_name, user._id) - handlers.celery_teardown_request() + mailchimp_utils.subscribe_mailchimp_async(list_name, user._id) user.reload() - assert not user.mailchimp_mailing_lists[list_name] + assert not user.mailchimp_mailing_lists @mock.patch('website.mailchimp_utils.get_mailchimp_api') def test_unsubscribe_called_with_correct_arguments(self, mock_get_mailchimp_api): diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 3e1c455c078..4350bbcd714 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -1,103 +1,30 @@ #!/usr/bin/env python3 """Views tests for the OSF.""" -from unittest.mock import MagicMock, ANY -from urllib import parse - -import datetime as dt -import time -import unittest from hashlib import md5 -from http.cookies import SimpleCookie from unittest import mock -from urllib.parse import quote_plus - import pytest -from django.core.exceptions import ValidationError -from django.utils import timezone -from flask import request, g -from lxml import html -from pytest import approx from rest_framework import status as http_status from addons.github.tests.factories import GitHubAccountFactory -from addons.osfstorage import settings as osfstorage_settings -from addons.wiki.models import WikiPage -from framework import auth -from framework.auth import Auth, authenticate, cas, core -from framework.auth.campaigns import ( - get_campaigns, - is_institution_login, - is_native_login, - is_proxy_login, - campaign_url_for -) -from framework.auth.exceptions import InvalidTokenError -from framework.auth.utils import impute_names_model, ensure_external_identity_uniqueness -from framework.auth.views import login_and_register_handler from framework.celery_tasks import handlers -from framework.exceptions import HTTPError, TemplateHTTPError -from framework.flask import redirect -from framework.transactions.handlers import no_auto_transaction from osf.external.spam import tasks as spam_tasks from osf.models import ( - Comment, - AbstractNode, - OSFUser, - Tag, - SpamStatus, - NodeRelation, NotableDomain ) -from osf.utils import permissions from osf_tests.factories import ( fake_email, ApiOAuth2ApplicationFactory, ApiOAuth2PersonalTokenFactory, AuthUserFactory, - CollectionFactory, - CommentFactory, - NodeFactory, - OSFGroupFactory, - PreprintFactory, - PreprintProviderFactory, - PrivateLinkFactory, - ProjectFactory, - ProjectWithAddonFactory, - RegistrationProviderFactory, - UserFactory, - UnconfirmedUserFactory, - UnregUserFactory, RegionFactory, - DraftRegistrationFactory, ) from tests.base import ( - assert_is_redirect, - capture_signals, fake, - get_default_metaschema, OsfTestCase, - assert_datetime_equal, - test_app -) -from tests.test_cas_authentication import generate_external_user_with_resp -from tests.utils import run_celery_tasks -from website import mailchimp_utils, mails, settings, language -from website.profile.utils import add_contributor_json, serialize_unregistered -from website.profile.views import update_osf_help_mails_subscription -from website.project.decorators import check_can_access -from website.project.model import has_anonymous_link -from website.project.signals import contributor_added -from website.project.views.contributor import ( - deserialize_contributors, - notify_added_contributor, - send_claim_email, - send_claim_registered_email, ) -from website.project.views.node import _should_show_wiki_widget, abbrev_authors +from website import mailchimp_utils from website.settings import MAILCHIMP_GENERAL_LIST from website.util import api_url_for, web_url_for -from website.util import rubeus -from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag @pytest.mark.enable_enqueue_task diff --git a/website/mailchimp_utils.py b/website/mailchimp_utils.py index c14df5d865e..becc770541a 100644 --- a/website/mailchimp_utils.py +++ b/website/mailchimp_utils.py @@ -39,37 +39,35 @@ def get_list_name_from_id(list_id): @queued_task @app.task @transaction.atomic +def subscribe_mailchimp_async(list_name, user_id): + return subscribe_mailchimp(list_name=list_name, user_id=user_id) + def subscribe_mailchimp(list_name, user_id): user = OSFUser.load(user_id) - user_hash = hashlib.md5(user.username.lower().encode()).hexdigest() - m = get_mailchimp_api() - list_id = get_list_id_from_name(list_name=list_name) + if not user: + raise OSFError('User not found.') if user.mailchimp_mailing_lists is None: user.mailchimp_mailing_lists = {} - try: - m.lists.members.create_or_update( - list_id=list_id, - subscriber_hash=user_hash, - data={ - 'status': 'subscribed', - 'status_if_new': 'subscribed', - 'email_address': user.username, - 'merge_fields': { - 'FNAME': user.given_name, - 'LNAME': user.family_name - } + get_mailchimp_api().lists.members.create_or_update( + list_id=get_list_id_from_name(list_name=list_name), + subscriber_hash=hashlib.md5( + user.username.lower().encode() + ).hexdigest(), + data={ + 'status': 'subscribed', + 'status_if_new': 'subscribed', + 'email_address': user.username, + 'merge_fields': { + 'FNAME': user.given_name, + 'LNAME': user.family_name } - ) - except MailChimpError as error: - sentry.log_exception(error) - sentry.log_message(error) - user.mailchimp_mailing_lists[list_name] = False - else: - user.mailchimp_mailing_lists[list_name] = True - finally: - user.save() + } + ) + + user.mailchimp_mailing_lists[list_name] = True + user.save() def unsubscribe_mailchimp(list_name, user_id, username=None): @@ -120,4 +118,4 @@ def unsubscribe_mailchimp_async(list_name, user_id, username=None): def subscribe_on_confirm(user): # Subscribe user to general OSF mailing list upon account confirmation if settings.ENABLE_EMAIL_SUBSCRIPTIONS: - subscribe_mailchimp(settings.MAILCHIMP_GENERAL_LIST, user._id) + subscribe_mailchimp_async(settings.MAILCHIMP_GENERAL_LIST, user._id) diff --git a/website/profile/views.py b/website/profile/views.py index c4306b92125..c4d49147454 100644 --- a/website/profile/views.py +++ b/website/profile/views.py @@ -496,7 +496,7 @@ def user_choose_mailing_lists(auth, **kwargs): for list_name, subscribe in json_data.items(): # TO DO: change this to take in any potential non-mailchimp, something like try: update_subscription(), except IndexNotFound: update_mailchimp_subscription() if list_name == settings.OSF_HELP_LIST: - update_osf_help_mails_subscription(user=user, subscribe=subscribe) + user.osf_mailing_lists[settings.OSF_HELP_LIST] = subscribe else: update_mailchimp_subscription(user, list_name, subscribe) else: @@ -519,10 +519,7 @@ def update_mailchimp_subscription(user, list_name, subscription): :param boolean subscription: true if user is subscribed """ if subscription: - try: - mailchimp_utils.subscribe_mailchimp(list_name, user._id) - except (MailChimpError, OSFError): - pass + mailchimp_utils.subscribe_mailchimp_async(list_name, user._id) else: try: mailchimp_utils.unsubscribe_mailchimp_async(list_name, user._id, username=user.username) @@ -602,10 +599,6 @@ def impute_names(**kwargs): return auth_utils.impute_names(name) -def update_osf_help_mails_subscription(user, subscribe): - user.osf_mailing_lists[settings.OSF_HELP_LIST] = subscribe - user.save() - @must_be_logged_in def serialize_names(**kwargs): user = kwargs['auth'].user From a035dbfcf2a56b55ddd60089c70536aa507c2232 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Mon, 30 Jun 2025 20:22:10 +0300 Subject: [PATCH 042/176] improved displaying of stashed urls and approval state in admin (#11193) ## Purpose Admins feel discomfort while looking for tokens in stashed urls and approval state fields on a registration page in admin ## Changes Format fields in a json-like way ## Ticket https://openscience.atlassian.net/browse/ENG-6614?atlOrigin=eyJpIjoiMTdkMTM0YzBmNjljNDcxYTkxNjYzZDRjYTY3NjEyODMiLCJwIjoiaiJ9 --- admin/templates/nodes/embargo.html | 4 ++-- admin/templates/nodes/embargo_termination_approval.html | 4 ++-- admin/templates/nodes/registration_approval.html | 4 ++-- admin/templates/nodes/retraction.html | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/admin/templates/nodes/embargo.html b/admin/templates/nodes/embargo.html index 054f6236776..0721f2befa3 100644 --- a/admin/templates/nodes/embargo.html +++ b/admin/templates/nodes/embargo.html @@ -51,11 +51,11 @@

    - + - + diff --git a/admin/templates/nodes/embargo_termination_approval.html b/admin/templates/nodes/embargo_termination_approval.html index c8b949368be..698f3c56f4c 100644 --- a/admin/templates/nodes/embargo_termination_approval.html +++ b/admin/templates/nodes/embargo_termination_approval.html @@ -44,11 +44,11 @@ - + - +
    Approval State

    {{ embargo.approval_state }}

    {{ embargo.approval_state | pprint }}
    Stashed Urls

    {{ embargo.stashed_urls }}

    {{ embargo.stashed_urls | pprint }}
    For Existing Registration
    Approval State
    {{ embargo_termination_approval.approval_state }}
    {{ embargo_termination_approval.approval_state | pprint }}
    Stashed Urls
    {{ embargo_termination_approval.stashed_urls }}
    {{ embargo_termination_approval.stashed_urls | pprint }}
    diff --git a/admin/templates/nodes/registration_approval.html b/admin/templates/nodes/registration_approval.html index 54475cce439..b99370010ae 100644 --- a/admin/templates/nodes/registration_approval.html +++ b/admin/templates/nodes/registration_approval.html @@ -40,11 +40,11 @@ Approval State -
    {{ registration_approval.approval_state }}
    +
    {{ registration_approval.approval_state | pprint }}
    Stashed Urls -
    {{ registration_approval.stashed_urls }}
    +
    {{ registration_approval.stashed_urls | pprint }}
    notify_initiator_on_complete diff --git a/admin/templates/nodes/retraction.html b/admin/templates/nodes/retraction.html index c0468e399b1..67d8ad15327 100644 --- a/admin/templates/nodes/retraction.html +++ b/admin/templates/nodes/retraction.html @@ -46,11 +46,11 @@ Approval State -
    {{ retraction.approval_state }}
    +
    {{ retraction.approval_state | pprint }}
    Stashed Urls -
    {{ retraction.stashed_urls }}
    +
    {{ retraction.stashed_urls | pprint }}
    From 5af4c80d179d937146606864e1abfd85e3ef40b2 Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 30 Jun 2025 20:23:26 +0300 Subject: [PATCH 043/176] [ENG-5862] SPAM - Fix Wiki Spamming (#11171) ## Purpose verify if spammy domains are detected ## Changes - merge check_resource_for_domains_postcommit and check_resource_with_spam_services tasks to avoid race condition - compare note to value not enum - log detected domains to sentry ## QA Notes You can test it with domain [xakw1.com](https://admin.staging3.osf.io/admin/osf/notabledomain/1180/change/?_changelist_filters=q%3Dxakw1.com) on staging3. Currently project won't be banned with this domain, regardless of whether it's public or not. ## Ticket https://openscience.atlassian.net/browse/ENG-5862 --- osf/external/spam/tasks.py | 83 +++++++++++------- osf/models/spam.py | 28 ++----- osf_tests/test_node.py | 135 ++++++++++++++++++++++++++++++ osf_tests/test_notable_domains.py | 82 ++++++++++-------- 4 files changed, 246 insertions(+), 82 deletions(-) diff --git a/osf/external/spam/tasks.py b/osf/external/spam/tasks.py index e00df54d237..9c2a348cdcd 100644 --- a/osf/external/spam/tasks.py +++ b/osf/external/spam/tasks.py @@ -1,12 +1,14 @@ import re import logging import requests +from framework import sentry from framework.celery_tasks import app as celery_app from framework.postcommit_tasks.handlers import run_postcommit from django.contrib.contenttypes.models import ContentType from django.db import transaction from osf.external.askismet.client import AkismetClient from osf.external.oopspam.client import OOPSpamClient +from osf.utils.fields import ensure_str from website import settings logger = logging.getLogger(__name__) @@ -38,11 +40,8 @@ def reclassify_domain_references(notable_domain_id, current_note, previous_note) item.referrer.save() -def _check_resource_for_domains(guid, content): - from osf.models import Guid, NotableDomain, DomainReference - resource, _ = Guid.load_referent(guid) - if not resource: - return f'{guid} not found' +def _check_resource_for_domains(resource, content): + from osf.models import NotableDomain, DomainReference spammy_domains = [] referrer_content_type = ContentType.objects.get_for_model(resource) @@ -51,7 +50,7 @@ def _check_resource_for_domains(guid, content): domain=domain, defaults={'note': note} ) - if notable_domain.note == NotableDomain.Note.EXCLUDE_FROM_ACCOUNT_CREATION_AND_CONTENT: + if notable_domain.note == NotableDomain.Note.EXCLUDE_FROM_ACCOUNT_CREATION_AND_CONTENT.value: spammy_domains.append(notable_domain.domain) DomainReference.objects.get_or_create( domain=notable_domain, @@ -61,19 +60,8 @@ def _check_resource_for_domains(guid, content): 'is_triaged': notable_domain.note not in (NotableDomain.Note.UNKNOWN, NotableDomain.Note.UNVERIFIED) } ) - if spammy_domains: - resource.confirm_spam(save=True, domains=list(spammy_domains)) - - -@run_postcommit(once_per_request=False, celery=True) -@celery_app.task(ignore_results=False, max_retries=5, default_retry_delay=60) -def check_resource_for_domains_postcommit(guid, content): - _check_resource_for_domains(guid, content) - -@celery_app.task(ignore_results=False, max_retries=5, default_retry_delay=60) -def check_resource_for_domains_async(guid, content): - _check_resource_for_domains(guid, content) + return spammy_domains def _extract_domains(content): @@ -111,16 +99,11 @@ def _extract_domains(content): yield domain, note -@run_postcommit(once_per_request=False, celery=True) -@celery_app.task(ignore_results=False, max_retries=5, default_retry_delay=60) -def check_resource_with_spam_services(guid, content, author, author_email, request_kwargs): +def check_resource_with_spam_services(resource, content, author, author_email, request_kwargs): """ Return statements used only for debugging and recording keeping """ any_is_spam = False - from osf.models import Guid, OSFUser - guid = Guid.load(guid) - resource = guid.referent kwargs = dict( user_ip=request_kwargs.get('remote_addr'), @@ -163,10 +146,54 @@ def check_resource_with_spam_services(guid, content, author, author_email, reque resource.spam_data['author_email'] = author_email resource.flag_spam() - if hasattr(resource, 'check_spam_user'): - user = OSFUser.objects.get(username=author_email) - resource.check_spam_user(user) + return any_is_spam + + +@run_postcommit(once_per_request=False, celery=True) +@celery_app.task(ignore_results=False, max_retries=5, default_retry_delay=60) +def check_resource_for_spam_postcommit(guid, content, author, author_email, request_headers): + from osf.models import Guid, OSFUser + + resource, _ = Guid.load_referent(guid) + if not resource: + return f'{guid} not found' + + spammy_domains = _check_resource_for_domains(resource, content) + if spammy_domains: + sentry.log_message(f"Spammy domains detected for {guid}: {spammy_domains}") + resource.confirm_spam(save=False, domains=list(spammy_domains)) + elif settings.SPAM_SERVICES_ENABLED: + request_kwargs = { + 'remote_addr': request_headers.get('Remote-Addr') or request_headers.get('Host'), # for local testing + 'user_agent': request_headers.get('User-Agent'), + 'referer': request_headers.get('Referer'), + } + for key, value in request_kwargs.items(): + request_kwargs[key] = ensure_str(value) + + check_resource_with_spam_services( + resource, + content, + author, + author_email, + request_kwargs, + ) resource.save() - return f'{resource} is spam: {any_is_spam} {resource.spam_data.get("content")}' + if hasattr(resource, 'check_spam_user'): + user = OSFUser.objects.get(username=author_email) + resource.check_spam_user(user) + + +@celery_app.task(ignore_results=False, max_retries=5, default_retry_delay=60) +def check_resource_for_domains_async(guid, content): + from osf.models import Guid + + resource, _ = Guid.load_referent(guid) + if not resource: + return f'{guid} not found' + + spammy_domains = _check_resource_for_domains(resource, content) + if spammy_domains: + resource.confirm_spam(save=True, domains=list(spammy_domains)) diff --git a/osf/models/spam.py b/osf/models/spam.py index cc7d86474e8..d2f5946533c 100644 --- a/osf/models/spam.py +++ b/osf/models/spam.py @@ -6,10 +6,9 @@ from osf.exceptions import ValidationValueError, ValidationTypeError from osf.external.askismet import tasks as akismet_tasks -from osf.external.spam.tasks import check_resource_for_domains_postcommit, check_resource_with_spam_services +from osf.external.spam.tasks import check_resource_for_spam_postcommit from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField -from osf.utils.fields import ensure_str, NonNaiveDateTimeField -from website import settings +from osf.utils.fields import NonNaiveDateTimeField logger = logging.getLogger(__name__) @@ -197,28 +196,15 @@ def do_check_spam(self, author, author_email, content, request_headers): if self.is_spammy: return - request_kwargs = { - 'remote_addr': request_headers.get('Remote-Addr') or request_headers.get('Host'), # for local testing - 'user_agent': request_headers.get('User-Agent'), - 'referer': request_headers.get('Referer'), - } if isinstance(self, Preprint): guid__id = self._id else: guid__id = self.guids.first()._id - check_resource_for_domains_postcommit( + + check_resource_for_spam_postcommit( guid__id, content, + author, + author_email, + request_headers, ) - - if settings.SPAM_SERVICES_ENABLED: - for key, value in request_kwargs.items(): - request_kwargs[key] = ensure_str(value) - - check_resource_with_spam_services( - guid__id, - content, - author, - author_email, - request_kwargs, - ) diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index a84a9565356..e5bae2d866a 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -67,6 +67,8 @@ from .factories import get_default_metaschema from addons.wiki.tests.factories import WikiVersionFactory, WikiFactory from osf_tests.utils import capture_signals, assert_datetime_equal, mock_archive +from osf.models.spam import SpamStatus +from osf.external.spam import tasks as spam_tasks pytestmark = pytest.mark.django_db @@ -2335,6 +2337,38 @@ def test_check_spam_only_public_node_by_default(self, project, user): project.check_spam(user, None, None) assert not project.is_public + @mock.patch('osf.models.node.get_request_and_user_id') + def test_do_check_spam_called_on_set_public(self, mock_get_request, project, user): + mock_request = { + 'headers': { + 'Remote-Addr': '1.2.3.4', + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)', + 'Referer': 'https://osf.io' + } + } + mock_get_request.return_value = (mock_request, user._id) + + project.title = 'Spam' + project.description = 'spammy content' + project.is_public = False + project.save() + + wiki = WikiFactory(node=project, user=user) + WikiVersionFactory(wiki_page=wiki, content='Some wiki content') + + with mock.patch.object(Node, 'do_check_spam') as mock_do_check_spam: + mock_do_check_spam.return_value = False + project.set_privacy('public', auth=Auth(user)) + + mock_do_check_spam.assert_called_once() + args = mock_do_check_spam.call_args[0] + assert args[0] == user.fullname # author + assert args[1] == user.username # author email + # content + assert 'Some wiki content' in args[2] + assert project.title in args[2] + assert project.description in args[2] + @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) def test_check_spam_skips_ham_user(self, project, user): with mock.patch('osf.models.AbstractNode._get_spam_content', mock.Mock(return_value='some content!')): @@ -2464,6 +2498,107 @@ def test_multiple_privacy_changing(self, project): project.confirm_ham() assert not project.is_public + def test_get_spam_content_includes_wiki_content(self, project, user): + wiki = WikiFactory(node=project, user=user) + WikiVersionFactory(wiki_page=wiki, content='Some wiki content') + + content = project._get_spam_content() + assert 'Some wiki content' in content + + project.title = 'Test Title' + project.save() + content = project._get_spam_content() + assert 'Test Title' in content + assert 'Some wiki content' in content + + +class TestCheckResourceForSpamPostcommit: + + @pytest.fixture() + def user(self): + return UserFactory() + + @pytest.fixture() + def project(self, user): + return ProjectFactory(creator=user) + + @pytest.fixture() + def request_headers(self): + return { + 'Remote-Addr': '1.2.3.4', + 'User-Agent': 'Mozilla/5.0', + 'Referer': 'https://osf.io' + } + + @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) + @mock.patch('osf.external.spam.tasks._check_resource_for_domains') + def test_check_resource_for_spam_postcommit_with_spammy_domains(self, mock_check_domains, project, user): + mock_check_domains.return_value = ['spam.com'] + with mock.patch('osf.external.spam.tasks.check_resource_with_spam_services') as mock_check_services: + spam_tasks.check_resource_for_spam_postcommit( + guid=project._id, + content='Check me for spam at spam.com', + author=user.fullname, + author_email=user.username, + request_headers={} + ) + project.reload() + assert project.spam_status == SpamStatus.SPAM + assert project.spam_data['domains'] == ['spam.com'] + mock_check_services.assert_not_called() + + @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) + @mock.patch('osf.external.spam.tasks._check_resource_for_domains') + def test_check_resource_for_spam_postcommit_no_spammy_domains_checks_services(self, mock_check_domains, project, user, request_headers): + mock_check_domains.return_value = [] + with mock.patch('osf.external.spam.tasks.check_resource_with_spam_services') as mock_check_services: + mock_check_services.return_value = True + spam_tasks.check_resource_for_spam_postcommit( + guid=project._id, + content='Check me for spam', + author=user.fullname, + author_email=user.username, + request_headers=request_headers + ) + mock_check_services.assert_called_once_with( + project, + 'Check me for spam', + user.fullname, + user.username, + { + 'remote_addr': '1.2.3.4', + 'user_agent': 'Mozilla/5.0', + 'referer': 'https://osf.io' + } + ) + + @mock.patch('osf.external.spam.tasks._check_resource_for_domains') + @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', False) + def test_check_resource_for_spam_postcommit_no_spammy_domains_services_disabled(self, mock_check_domains, project, user): + mock_check_domains.return_value = [] + with mock.patch('osf.external.spam.tasks.check_resource_with_spam_services') as mock_check_services: + spam_tasks.check_resource_for_spam_postcommit( + guid=project._id, + content='Check me for spam', + author=user.fullname, + author_email=user.username, + request_headers={} + ) + mock_check_services.assert_not_called() + + @mock.patch('osf.external.spam.tasks._check_resource_for_domains') + def test_check_resource_for_spam_postcommit_checks_user(self, mock_check_domains, project, user, request_headers): + mock_check_domains.return_value = [] + with mock.patch.object(Node, 'check_spam_user') as mock_check_user: + spam_tasks.check_resource_for_spam_postcommit( + guid=project._id, + content='Check me for spam', + author=user.fullname, + author_email=user.username, + request_headers=request_headers + ) + mock_check_user.assert_called_once_with(user) + # copied from tests/test_models.py class TestPrivateLinks: diff --git a/osf_tests/test_notable_domains.py b/osf_tests/test_notable_domains.py index 68e39912a65..256cb56767f 100644 --- a/osf_tests/test_notable_domains.py +++ b/osf_tests/test_notable_domains.py @@ -153,7 +153,7 @@ def test_check_resource_for_domains_moderation_queue(self, spam_domain, factory) obj = factory() with mock.patch.object(spam_tasks.requests, 'head'): spam_tasks._check_resource_for_domains( - guid=obj.guids.first()._id, + resource=obj, content=spam_domain.geturl(), ) @@ -169,19 +169,17 @@ def test_check_resource_for_domains_moderation_queue(self, spam_domain, factory) def test_check_resource_for_domains_spam(self, spam_domain, marked_as_spam_domain, factory): obj = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj, content=spam_domain.geturl(), ) - obj.reload() + assert spammy_domains == [spam_domain.netloc] + assert NotableDomain.objects.filter( domain=spam_domain.netloc, note=NotableDomain.Note.EXCLUDE_FROM_ACCOUNT_CREATION_AND_CONTENT ).count() == 1 - obj.reload() - assert obj.spam_status == SpamStatus.SPAM - assert obj.spam_data['domains'] == [spam_domain.netloc] assert DomainReference.objects.filter( referrer_object_id=obj.id, referrer_content_type=ContentType.objects.get_for_model(obj), @@ -233,19 +231,17 @@ def test_check_resource_for_duplicate_spam_domains(self, factory, spam_domain, m obj.spam_data['domains'] = [spam_domain.netloc] obj.save() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj, content=f'{spam_domain.geturl()}', ) - obj.reload() + assert spammy_domains == [spam_domain.netloc] + assert NotableDomain.objects.filter( domain=spam_domain.netloc, note=NotableDomain.Note.EXCLUDE_FROM_ACCOUNT_CREATION_AND_CONTENT ).count() == 1 - obj.reload() - assert obj.spam_status == SpamStatus.SPAM - assert obj.spam_data['domains'] == [spam_domain.netloc] assert DomainReference.objects.filter( referrer_object_id=obj.id, referrer_content_type=ContentType.objects.get_for_model(obj), @@ -326,10 +322,12 @@ def ignored_notable_domain(self): def test_from_spam_to_unknown_one_spam_domain(self, factory, spam_notable_domain_one, spam_notable_domain_two, unknown_notable_domain, ignored_notable_domain): obj_one = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_one.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_one, content=f'{self.spam_domain_one.geturl()} {self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_one.confirm_spam(save=True, domains=spammy_domains) obj_one.reload() assert obj_one.spam_status == SpamStatus.SPAM @@ -344,10 +342,12 @@ def test_from_spam_to_unknown_one_spam_domain(self, factory, spam_notable_domain def test_from_spam_to_unknown_two_spam_domains(self, factory, spam_notable_domain_one, spam_notable_domain_two, unknown_notable_domain, ignored_notable_domain): obj_two = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_two.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_two, content=f'{self.spam_domain_one.geturl()} {self.spam_domain_two.geturl()} {self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_two.confirm_spam(save=True, domains=spammy_domains) obj_two.reload() assert obj_two.spam_status == SpamStatus.SPAM @@ -364,10 +364,12 @@ def test_from_spam_to_unknown_marked_by_external(self, factory, spam_notable_dom obj_three.spam_data['who_flagged'] = 'some external spam checker' obj_three.save() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_three.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_three, content=f'{self.spam_domain_one.geturl()} {self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_three.confirm_spam(save=True, domains=spammy_domains) obj_three.reload() assert obj_three.spam_status == SpamStatus.SPAM @@ -382,10 +384,12 @@ def test_from_spam_to_unknown_marked_by_external(self, factory, spam_notable_dom def test_from_spam_to_ignored_one_spam_domain(self, factory, spam_notable_domain_one, spam_notable_domain_two, unknown_notable_domain, ignored_notable_domain): obj_one = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_one.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_one, content=f'{self.spam_domain_one.geturl()} {self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_one.confirm_spam(save=True, domains=spammy_domains) obj_one.reload() assert obj_one.spam_status == SpamStatus.SPAM @@ -400,10 +404,12 @@ def test_from_spam_to_ignored_one_spam_domain(self, factory, spam_notable_domain def test_from_spam_to_ignored_two_spam_domains(self, factory, spam_notable_domain_one, spam_notable_domain_two, unknown_notable_domain, ignored_notable_domain): obj_two = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_two.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_two, content=f'{self.spam_domain_one.geturl()} {self.spam_domain_two.geturl()} {self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_two.confirm_spam(save=True, domains=spammy_domains) obj_two.reload() assert obj_two.spam_status == SpamStatus.SPAM @@ -420,10 +426,12 @@ def test_from_spam_to_ignored_makred_by_external(self, factory, spam_notable_dom obj_three.spam_data['who_flagged'] = 'some external spam checker' obj_three.save() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_three.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_three, content=f'{self.spam_domain_one.geturl()} {self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_three.confirm_spam(save=True, domains=spammy_domains) obj_three.reload() assert obj_three.spam_status == SpamStatus.SPAM @@ -438,10 +446,12 @@ def test_from_spam_to_ignored_makred_by_external(self, factory, spam_notable_dom def test_from_unknown_to_spam_unknown_plus_ignored(self, factory, unknown_notable_domain, ignored_notable_domain): obj_one = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_one.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_one, content=f'{self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_one.confirm_spam(save=True, domains=spammy_domains) obj_one.reload() assert obj_one.spam_status == SpamStatus.UNKNOWN @@ -456,10 +466,12 @@ def test_from_unknown_to_spam_unknown_plus_ignored(self, factory, unknown_notabl def test_from_unknown_to_spam_unknown_only(self, factory, unknown_notable_domain, ignored_notable_domain): obj_two = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_two.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_two, content=f'{self.unknown_domain.geturl()}', ) + if spammy_domains: + obj_two.confirm_spam(save=True, domains=spammy_domains) obj_two.reload() assert obj_two.spam_status == SpamStatus.UNKNOWN @@ -474,10 +486,12 @@ def test_from_unknown_to_spam_unknown_only(self, factory, unknown_notable_domain def test_from_ignored_to_spam_unknown_plus_ignored(self, factory, unknown_notable_domain, ignored_notable_domain): obj_one = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_one.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_one, content=f'{self.unknown_domain.geturl()} {self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_one.confirm_spam(save=True, domains=spammy_domains) obj_one.reload() assert obj_one.spam_status == SpamStatus.UNKNOWN @@ -492,10 +506,12 @@ def test_from_ignored_to_spam_unknown_plus_ignored(self, factory, unknown_notabl def test_from_ignored_to_spam_ignored_only(self, factory, unknown_notable_domain, ignored_notable_domain): obj_two = factory() with mock.patch.object(spam_tasks.requests, 'head'): - spam_tasks._check_resource_for_domains( - guid=obj_two.guids.first()._id, + spammy_domains = spam_tasks._check_resource_for_domains( + resource=obj_two, content=f'{self.ignored_domain.geturl()}', ) + if spammy_domains: + obj_two.confirm_spam(save=True, domains=spammy_domains) obj_two.reload() assert obj_two.spam_status == SpamStatus.UNKNOWN From fdfbcc4b628c2e10a1b62a85d35c70780a59bad2 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Mon, 30 Jun 2025 20:25:34 +0300 Subject: [PATCH 044/176] switch to new UI when user views draft registration file (#11144) ## Purpose Throughout registration creation user can view attached files. However `resolve_guid` doesn't handle this case and renders the legacy UI that shows error because `get_rdf_type` raises `NotImplementedError`. So this case has no be handled by ember. However when we switch to the new UI, we get `draftnode is not a supported target type`. Also there will be some work for FE because for now ember relies on `node` relationship that draft node/registration don't have. Taking into account Futa's words, it's an unusual flow for ember ## Changes Added redirect to ember, updated `view_map` that referenced to the non-existing view `draft_nodes:node-detail` to use `draft_nodes:draft-node-detail` view ## Notes 1. I'm not confident that `draft-node` key is still used in `view_map`. From the `resolve` method of `TargetField` I see that keys are either model name in lowercase or `referent._name` that I couldn't find how is created (maybe automatically). However if it existed, there would be some errors that this view does not exist. So I left both keys but with the correct view name. In case you know the answer, I can remove the original key and leave only the correct one ## Ticket https://openscience.atlassian.net/jira/software/c/projects/ENG/boards/145?selectedIssue=ENG-5810 --- api/base/serializers.py | 6 +++++- website/views.py | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/api/base/serializers.py b/api/base/serializers.py index 3c8c518ea16..60afedc1876 100644 --- a/api/base/serializers.py +++ b/api/base/serializers.py @@ -1041,7 +1041,11 @@ class TargetField(ser.Field): 'lookup_kwarg': 'preprint_id', }, 'draft-node': { - 'view': 'draft_nodes:node-detail', + 'view': 'draft_nodes:draft-node-detail', + 'lookup_kwarg': 'node_id', + }, + 'draftnode': { + 'view': 'draft_nodes:draft-node-detail', 'lookup_kwarg': 'node_id', }, 'comment': { diff --git a/website/views.py b/website/views.py index aa523f80fd1..dbe0cc1655e 100644 --- a/website/views.py +++ b/website/views.py @@ -342,6 +342,9 @@ def resolve_guid(guid, suffix=None): elif isinstance(resource, Node) and clean_suffix and (clean_suffix.startswith('metadata') or clean_suffix.startswith('components')): return use_ember_app() + elif isinstance(resource, OsfStorageFile) and isinstance(resource.target, DraftNode): + return use_ember_app() + elif isinstance(resource, BaseFileNode) and resource.is_file and not isinstance(resource.target, Preprint): if isinstance(resource.target, Registration) and flag_is_active(request, features.EMBER_FILE_REGISTRATION_DETAIL): return use_ember_app() From 87d32768688d12bf4ac1a8a02b652df233bcaaa0 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Tue, 1 Jul 2025 17:06:48 +0300 Subject: [PATCH 045/176] mailhog ci update --- .github/workflows/test-build.yml | 1 + conftest.py | 4 ++++ tasks/__init__.py | 1 - website/settings/defaults.py | 2 +- website/settings/local-dist.py | 2 +- 5 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index cd7909e480b..f81196e943e 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -11,6 +11,7 @@ env: ELASTICSEARCH6_ARCHIVE: elasticsearch-6.3.1.tar.gz OSF_DB_PORT: 5432 OSF_DB_PASSWORD: postgres + GITHUB_ACTIONS: true jobs: build-cache: diff --git a/conftest.py b/conftest.py index 2270f7e7d16..7186f78f9a0 100644 --- a/conftest.py +++ b/conftest.py @@ -19,6 +19,10 @@ from osf.external.spam import tasks as spam_tasks from website import settings as website_settings +def pytest_configure(config): + if not os.getenv('GITHUB_ACTIONS') == 'true': + config.option.allow_hosts += ',mailhog' + logger = logging.getLogger(__name__) diff --git a/tasks/__init__.py b/tasks/__init__.py index c605b7490b1..103c05e0143 100755 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -526,7 +526,6 @@ def test_ci_api3_and_osf(ctx, numprocesses=None, coverage=False, testmon=False, @task def test_ci_mailhog(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): - #ci_setup(ctx) test_mailhog(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) @task diff --git a/website/settings/defaults.py b/website/settings/defaults.py index cc685af9d1b..4331f4fbd59 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -162,7 +162,7 @@ def parent_dir(path): MAILHOG_HOST = 'mailhog' MAILHOG_PORT = 1025 -MAILHOG_API_HOST = 'http://localhost:8025' +MAILHOG_API_HOST = 'http://mailhog:8025' # OR, if using Sendgrid's API # WARNING: If `SENDGRID_WHITELIST_MODE` is True, diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py index 8676afd496a..212b9926f7e 100644 --- a/website/settings/local-dist.py +++ b/website/settings/local-dist.py @@ -64,7 +64,7 @@ MAILHOG_HOST = 'mailhog' MAILHOG_PORT = 1025 -MAILHOG_API_HOST = 'http://localhost:8025' +MAILHOG_API_HOST = 'http://mailhog:8025' # Mailchimp email subscriptions ENABLE_EMAIL_SUBSCRIPTIONS = False From 6062cefcd60b95d3c022eacd2cce4144152cd7f4 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Tue, 1 Jul 2025 18:59:39 +0300 Subject: [PATCH 046/176] [ENG-7929] Ability to move registrations to draft state (#11153) ## Purpose Admins should be able to revert registration to draft state for the support purposes ## Changes Added functionality to revert registration back to draft state Added a button in admin to revert a registration Added tests ## Notes: Currently we are discussing with Mark what to do with a registration after admin reverts it, so this ticket cannot be merged. The issue is that when admin reverts registration, it's still displayed for admin, just with deletion date and when user re-registers a draft, the system creates a new registration and the draft is linked to this newest version and admin sees both registrations. So if admin tries to revert the previous version, he'll get an error because the version doesn't have the linked draft. What I've suggested: 1. Fully delete previous registered version so that you won't be able to see it in admin. In this way if the registered version had guid 123ab and user registered the restored draft again, the draft would have another registration with another guid, like 999ss . Basically for user it'll be the same registration, just with another url to access it 2. Add another (or the same) hint for Revert to Draft button that you can't revert this previous version (the easiest solution) 3. One more solution, that I think will be the most complicated between these three, is that we can try to save the guid of an original registered version and whenever admin reverts registration and user registers it again from the same draft, we assign the saved guid to a new registration **DECISION**: Option 2 is the most reliable. In case we want #1 or #3 behavior, a new ticket will be created ## Ticket https://openscience.atlassian.net/browse/ENG-7929 --- admin/nodes/urls.py | 1 + admin/nodes/views.py | 9 + admin/templates/nodes/node.html | 1 + .../templates/nodes/revert_registration.html | 26 ++ admin_tests/nodes/test_views.py | 357 +++++++++++++++++- osf/models/registrations.py | 59 +++ 6 files changed, 445 insertions(+), 8 deletions(-) create mode 100644 admin/templates/nodes/revert_registration.html diff --git a/admin/nodes/urls.py b/admin/nodes/urls.py index cb8942ebb85..1d5f6e0bac9 100644 --- a/admin/nodes/urls.py +++ b/admin/nodes/urls.py @@ -45,4 +45,5 @@ re_path(r'^(?P[a-z0-9]+)/remove_notifications/$', views.NodeRemoveNotificationView.as_view(), name='node-remove-notifications'), re_path(r'^(?P[a-z0-9]+)/update_moderation_state/$', views.NodeUpdateModerationStateView.as_view(), name='node-update-mod-state'), re_path(r'^(?P[a-z0-9]+)/resync_datacite/$', views.NodeResyncDataCiteView.as_view(), name='resync-datacite'), + re_path(r'^(?P[a-z0-9]+)/revert/$', views.NodeRevertToDraft.as_view(), name='revert-to-draft'), ] diff --git a/admin/nodes/views.py b/admin/nodes/views.py index c2bc48774bf..1789ca773c0 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -801,3 +801,12 @@ def post(self, request, *args, **kwargs): registration = self.get_object() registration.request_identifier_update('doi', create=True) return redirect(self.get_success_url()) + + +class NodeRevertToDraft(NodeMixin, View): + permission_required = 'osf.change_node' + + def post(self, request, *args, **kwargs): + registration = self.get_object() + registration.to_draft() + return redirect(self.get_success_url()) diff --git a/admin/templates/nodes/node.html b/admin/templates/nodes/node.html index 9cba3a4255c..caf8bd5ebc6 100644 --- a/admin/templates/nodes/node.html +++ b/admin/templates/nodes/node.html @@ -24,6 +24,7 @@ {% include "nodes/reindex_node_share.html" with node=node %} {% include "nodes/reindex_node_elastic.html" with node=node %} {% include "nodes/resync_datacite.html" with node=node %} + {% include "nodes/revert_registration.html" with node=node %}
    diff --git a/admin/templates/nodes/revert_registration.html b/admin/templates/nodes/revert_registration.html new file mode 100644 index 00000000000..f8760056a16 --- /dev/null +++ b/admin/templates/nodes/revert_registration.html @@ -0,0 +1,26 @@ +{% if node.is_registration %} + {% if node.can_be_reverted %} + Revert to Draft + + {% else %} + Revert to Draft + {% endif %} +{% endif %} diff --git a/admin_tests/nodes/test_views.py b/admin_tests/nodes/test_views.py index 9f978e75268..ef93ac04881 100644 --- a/admin_tests/nodes/test_views.py +++ b/admin_tests/nodes/test_views.py @@ -4,7 +4,22 @@ import pytz import datetime -from osf.models import AdminLogEntry, NodeLog, AbstractNode, RegistrationApproval +from django.utils import timezone +from django.test import RequestFactory +from django.urls import reverse +from django.core.exceptions import PermissionDenied, ValidationError +from django.contrib.auth.models import Permission +from django.contrib.contenttypes.models import ContentType + +from osf.models import ( + AdminLogEntry, + NodeLog, + AbstractNode, + RegistrationApproval, + Embargo, + SchemaResponse, + DraftRegistration, +) from admin.nodes.views import ( NodeConfirmSpamView, NodeDeleteView, @@ -26,16 +41,23 @@ from admin_tests.utilities import setup_log_view, setup_view, handle_post_view_request from api_tests.share._utils import mock_update_share from website import settings -from django.utils import timezone -from django.test import RequestFactory -from django.urls import reverse -from django.core.exceptions import PermissionDenied -from django.contrib.auth.models import Permission -from django.contrib.contenttypes.models import ContentType from framework.auth.core import Auth from tests.base import AdminTestCase -from osf_tests.factories import UserFactory, AuthUserFactory, ProjectFactory, RegistrationFactory, RegistrationApprovalFactory +from osf_tests.factories import ( + UserFactory, + AuthUserFactory, + ProjectFactory, + RegistrationFactory, + RegistrationApprovalFactory, + RegistrationProviderFactory, + DraftRegistrationFactory, + get_default_metaschema +) +from osf.utils.workflows import ApprovalStates, RegistrationModerationStates +from osf.utils import permissions +from osf.exceptions import NodeStateError + from website.settings import REGISTRATION_APPROVAL_TIME @@ -557,3 +579,322 @@ def test_request_approval_is_approved(self): view = setup_log_view(ConfirmApproveBacklogView(), request) view.post(request) assert RegistrationApproval.objects.first().state == RegistrationApproval.APPROVED + + +class TestRegistrationRevertToDraft(AdminTestCase): + + def _add_contributor(self, registration, permission, contributor): + registration.add_contributor( + contributor, + permissions=permission, + auth=self.auth, + save=True + ) + + def setUp(self): + super().setUp() + self.user = AuthUserFactory() + self.auth = Auth(self.user) + self.node = ProjectFactory(creator=self.user) + + self.contr1 = UserFactory() + self.contr2 = UserFactory() + self.contr3 = UserFactory() + + pre_moderation_draft = DraftRegistrationFactory( + title='pre-moderation-registration', + description='some description', + registration_schema=get_default_metaschema(), + provider=RegistrationProviderFactory(reviews_workflow='pre-moderation'), + creator=self.user + ) + self._add_contributor(pre_moderation_draft, permissions.ADMIN, self.contr1) + self._add_contributor(pre_moderation_draft, permissions.ADMIN, self.contr2) + self._add_contributor(pre_moderation_draft, permissions.ADMIN, self.contr3) + pre_moderation_draft.register(auth=self.auth, save=True) + self.pre_moderation_registration = pre_moderation_draft.registered_node + + post_moderation_draft = DraftRegistrationFactory( + title='post-moderation-registration', + description='some description', + registration_schema=get_default_metaschema(), + provider=RegistrationProviderFactory(reviews_workflow='post-moderation'), + creator=self.user + ) + self._add_contributor(post_moderation_draft, permissions.ADMIN, self.contr1) + self._add_contributor(post_moderation_draft, permissions.ADMIN, self.contr2) + self._add_contributor(post_moderation_draft, permissions.ADMIN, self.contr3) + post_moderation_draft.register(auth=self.auth, save=True) + self.post_moderation_registration = post_moderation_draft.registered_node + + self.no_moderation_draft = DraftRegistrationFactory( + title='no-moderation-registration', + description='some description', + registration_schema=get_default_metaschema(), + creator=self.user + ) + self._add_contributor(self.no_moderation_draft, permissions.ADMIN, self.contr1) + self._add_contributor(self.no_moderation_draft, permissions.ADMIN, self.contr2) + self._add_contributor(self.no_moderation_draft, permissions.ADMIN, self.contr3) + self.no_moderation_draft.add_tag('tag1', auth=self.auth, save=True) + self.no_moderation_draft.add_tag('tag2', auth=self.auth, save=True) + self.no_moderation_draft.register(auth=self.auth, save=True) + self.registration = self.no_moderation_draft.registered_node + + def get_current_version(self, registration): + return registration.schema_responses.order_by('-created').first() + + def create_new_version(self, registration, justification=None): + SchemaResponse.create_from_previous_response( + initiator=registration.creator, + previous_response=self.get_current_version(registration), + justification=justification or 'new update' + ) + + def approve_version(self, version): + version.approvals_state_machine.set_state(ApprovalStates.APPROVED) + version.save() + + def test_cannot_revert_updated_and_approved_registration_new_version(self): + self.approve_version(self.get_current_version(self.registration)) + self.create_new_version(self.registration) + self.approve_version(self.get_current_version(self.registration)) + + # registration has a few versions including the root + assert self.registration.schema_responses.count() == 2 + with self.assertRaisesMessage(NodeStateError, 'Registration has an approved update thus cannot be reverted to draft'): + self.registration.to_draft() + + def test_cannot_revert_approved_by_moderator_registration_in_pre_moderation(self): + self.pre_moderation_registration.moderation_state = RegistrationModerationStates.ACCEPTED.db_name + self.pre_moderation_registration.save() + + with self.assertRaisesMessage(NodeStateError, 'Registration was approved by moderator thus cannot be reverted to draft'): + self.pre_moderation_registration.to_draft() + + def test_cannot_revert_approved_by_moderator_registration_in_post_moderation(self): + self.post_moderation_registration.moderation_state = RegistrationModerationStates.ACCEPTED.db_name + self.post_moderation_registration.save() + + with self.assertRaisesMessage(NodeStateError, 'Registration was approved by moderator thus cannot be reverted to draft'): + self.post_moderation_registration.to_draft() + + def test_cannot_revert_registration_with_minted_doi(self): + self.registration.set_identifier_value('doi', value='some_doi') + with self.assertRaisesMessage(ValidationError, 'Registration with minted DOI cannot be reverted to draft state'): + self.registration.to_draft() + + def test_cannot_revert_registration_after_some_updates_but_allow_updates_is_false(self): + # registration provider has allow_updates attribute that either allows users update registration or not + # so if user created a new version while allow_updates=True and this attribute was updated to False + # we still consider this registration as updated + + self.registration.provider.allow_updates = True + self.registration.provider.save() + + assert self.registration.provider.allow_updates + + self.approve_version(self.get_current_version(self.registration)) + self.create_new_version(self.registration) + self.approve_version(self.get_current_version(self.registration)) + + self.registration.provider.allow_updates = False + self.registration.provider.save() + + with self.assertRaisesMessage(NodeStateError, 'Registration has an approved update thus cannot be reverted to draft'): + self.registration.to_draft() + + def test_cannot_revert_previous_registration_without_draft(self): + self.approve_version(self.get_current_version(self.registration)) + + # revert the initial registration + self.registration.to_draft() + + # re-register draft so that it's another registration + self.no_moderation_draft.register(auth=self.auth, save=True) + + assert self.registration.draft is None + + # revert the initial registration again without draft + with self.assertRaisesMessage(ValueError, 'This registration has not draft'): + self.registration.to_draft() + + def test_can_revert_registration_without_updates_to_draft(self): + self.approve_version(self.get_current_version(self.registration)) + from_draft = self.registration.draft + assert from_draft.deleted is None + assert from_draft.registered_node == self.registration + + self.registration.to_draft() + from_draft.reload() + + # draft instance isn't linked to the registered version + assert from_draft.registered_node is None + assert from_draft.deleted is None + # registration is deleted + assert self.registration.deleted is not None + + def test_can_revert_registration_with_unapproved_update_to_draft(self): + self.approve_version(self.get_current_version(self.registration)) + self.create_new_version(self.registration) + from_draft = self.registration.draft + + latest_version = self.registration.schema_responses.first() + assert latest_version.reviews_state == ApprovalStates.IN_PROGRESS.db_name + + self.registration.to_draft() + from_draft.reload() + + assert from_draft.deleted is None + assert from_draft.registered_node is None + + def test_all_previous_data_is_restored_after_revertion(self): + self.approve_version(self.get_current_version(self.registration)) + + draft = DraftRegistration.objects.get(registered_node=self.registration) + + assert draft.title == 'no-moderation-registration' + assert draft.description == 'some description' + assert draft.registration_schema == get_default_metaschema() + assert draft.creator == self.user + # 3 contributors + creator by default + assert draft.contributors.count() == 4 + assert draft.tags.count() == 2 + + self.registration.to_draft() + draft.reload() + self.registration.reload() + + assert draft.registered_node is None + assert self.registration.deleted is not None + assert draft.title == 'no-moderation-registration' + assert draft.description == 'some description' + assert draft.registration_schema == get_default_metaschema() + assert draft.creator == self.user + assert draft.contributors.count() == 4 + assert draft.tags.count() == 2 + + def test_contributors_approvals_are_reset_after_revertion(self): + contributors = self.pre_moderation_registration.contributors.all() + for contributor in contributors: + self.pre_moderation_registration.require_approval(contributor) + + assert self.pre_moderation_registration.sanction.approval_stage is ApprovalStates.UNAPPROVED + + for contributor in contributors: + self.pre_moderation_registration.sanction.approve( + user=contributor, + token=self.pre_moderation_registration.sanction.approval_state[contributor._id]['approval_token'] + ) + assert self.pre_moderation_registration.sanction.approval_state[contributor._id]['has_approved'] is True + + self.approve_version(self.get_current_version(self.pre_moderation_registration)) + + assert self.pre_moderation_registration.draft + assert self.pre_moderation_registration.sanction.approval_stage is ApprovalStates.PENDING_MODERATION + + draft = self.pre_moderation_registration.draft + self.pre_moderation_registration.to_draft() + draft.reload() + + # the original has no changes but deleted + assert self.pre_moderation_registration.sanction.approval_stage is ApprovalStates.PENDING_MODERATION + assert self.pre_moderation_registration.deleted is not None + + # it's unattached from its draft + assert draft.registered_node is None + + # draft version is shown and registered again + draft.register(auth=self.auth, save=True) + recreated_registration = draft.registered_node + + # ask approvals as it's pre-moderation + contributors = recreated_registration.contributors.all() + for contributor in contributors: + recreated_registration.require_approval(contributor) + + # the new version should have reset approvals and unapproved state + recreated_registration.sanction.approval_stage is ApprovalStates.UNAPPROVED + + for contributor in contributors: + recreated_registration.sanction.approval_state[contributor._id]['has_approved'] is False + + def test_revert_node_based_registration(self): + project = ProjectFactory( + title='node', + description='description', + creator=self.user + ) + pre_moderation_draft = DraftRegistrationFactory(branched_from=project) + self._add_contributor(pre_moderation_draft, permissions.ADMIN, self.contr1) + self._add_contributor(pre_moderation_draft, permissions.ADMIN, self.contr2) + self._add_contributor(pre_moderation_draft, permissions.ADMIN, self.contr3) + pre_moderation_draft.register(auth=self.auth, save=True) + pre_moderation_registration = pre_moderation_draft.registered_node + + assert pre_moderation_registration.branched_from_node + assert pre_moderation_draft.registered_node is not None + + pre_moderation_registration.to_draft() + pre_moderation_draft.reload() + + assert pre_moderation_draft.registered_node is None + assert pre_moderation_draft.title == 'node' + assert pre_moderation_draft.description == 'description' + + def test_can_revert_embargo_registration_to_draft(self): + self.no_moderation_draft = DraftRegistrationFactory( + title='embargo-registration', + description='some description', + registration_schema=get_default_metaschema(), + creator=self.user + ) + self.no_moderation_draft.register(auth=self.auth, save=True) + self.registration = self.no_moderation_draft.registered_node + + # embargo is created when draft registration is registered, so it's possible to do that for + # registration only + self.registration._initiate_embargo( + user=self.user, + end_date=timezone.now() + datetime.timedelta(days=3) + ) + + assert isinstance(self.registration.sanction, Embargo) + + self.registration.to_draft() + self.registration.reload() + + # re-register draft, thus no embargo should be present + self.no_moderation_draft.register(auth=self.auth, save=True) + self.registration = self.no_moderation_draft.registered_node + + assert self.registration.sanction is None + + def test_embargo_is_reset_after_revertion(self): + self.no_moderation_draft = DraftRegistrationFactory( + title='embargo-registration', + description='some description', + registration_schema=get_default_metaschema(), + creator=self.user + ) + self.no_moderation_draft.register(auth=self.auth, save=True) + self.registration = self.no_moderation_draft.registered_node + + self.registration._initiate_embargo( + user=self.user, + end_date=timezone.now() + datetime.timedelta(days=3) + ) + + assert isinstance(self.registration.sanction, Embargo) + + self.registration.sanction.approvals_machine.set_state(ApprovalStates.COMPLETED) + assert self.registration.sanction.approvals_machine.get_current_state()._name == ApprovalStates.COMPLETED + + self.registration.to_draft() + self.registration.reload() + + # re-register draft, thus no embargo should be present + self.no_moderation_draft.register(auth=self.auth, save=True) + self.registration = self.no_moderation_draft.registered_node + + assert self.registration.sanction is None diff --git a/osf/models/registrations.py b/osf/models/registrations.py index e62bf5f14bf..3d3e967be30 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -6,6 +6,7 @@ from django.core.exceptions import ValidationError from django.db import models from django.db.models.signals import post_save +from django.contrib.contenttypes.models import ContentType from django.dispatch import receiver from django.utils import timezone from guardian.models import ( @@ -16,6 +17,7 @@ from framework.auth import Auth from framework.exceptions import PermissionsError +from osf.models import Identifier from osf.utils.fields import NonNaiveDateTimeField, LowercaseCharField from osf.utils.permissions import ADMIN, READ, WRITE from osf.exceptions import NodeStateError, DraftRegistrationStateError @@ -58,6 +60,7 @@ from api.caching.tasks import update_storage_usage from api.caching import settings as cache_settings from api.caching.utils import storage_usage_cache +from api.providers.workflows import Workflows as ModerationWorkflows from website import settings from website.archiver import ARCHIVER_INITIATED from website.identifiers.tasks import update_doi_metadata_on_change @@ -400,6 +403,19 @@ def provider_specific_metadata(self): return provider_supported_metadata + @property + def can_be_reverted(self): + try: + self.validate_draft_conditions() + except Exception: + return False + + return True + + @property + def draft(self): + return DraftRegistration.objects.filter(registered_node=self).first() + def update_provider_specific_metadata(self, updated_values): """Updates additional_metadata fields supported by the provider. @@ -891,6 +907,49 @@ def related_resource_updated(self, log_action=None, api_request=None, **log_para update_doi_metadata_on_change(target_guid=self._id) + def validate_draft_conditions(self): + # Registration shouldn't have any approved updated versions besides the base one + if self.schema_responses.exclude( + previous_response=None + ).filter( + reviews_state=ApprovalStates.APPROVED.db_name + ).exists(): + raise NodeStateError('Registration has an approved update thus cannot be reverted to draft') + + # Registration shouldn't be approved by moderator in pre/post-moderation + if ( + self.provider.reviews_workflow in [ + ModerationWorkflows.PRE_MODERATION.value, + ModerationWorkflows.POST_MODERATION.value + ] and + self.moderation_state == RegistrationModerationStates.ACCEPTED.db_name + ): + raise NodeStateError('Registration was approved by moderator thus cannot be reverted to draft') + + # Registration shouldn't have minted DOI + doi_exists = Identifier.objects.filter( + category='doi', + content_type_id=ContentType.objects.get_for_model(Registration).id, + deleted__isnull=True, + object_id=self.id, + ).exists() + if doi_exists: + raise ValidationError('Registration with minted DOI cannot be reverted to draft state') + + if not self.draft: + raise ValueError('This registration has not draft') + + def to_draft(self): + self.validate_draft_conditions() + + # unattach registration object from draft version so that it's draft again + draft = self.draft + draft.registered_node = None + draft.save() + + self.deleted = timezone.now() + self.save() + class Meta: # custom permissions for use in the OSF Admin App permissions = ( From 4391db22f85a54c82c43cf7ca9cd1911ff919cb3 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Tue, 1 Jul 2025 20:00:16 +0300 Subject: [PATCH 047/176] added a route to download node metadata (#11215) ## Purpose So right now, our firewall rules aren't advanced enough to be able to let requests from `//metadata` go through to osf on staging4. Instead, we have to start with a stable string, and do the dynamic part afterward, hence doing `/metadata//` which we can match with `/metadata/*` in the firewall. ## Changes Added a corresponding route ## Ticket https://openscience.atlassian.net/browse/ENG-8261 --- website/routes.py | 7 ++++++- website/views.py | 6 ++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/website/routes.py b/website/routes.py index 7b0f325fa9f..f2bc4f607e9 100644 --- a/website/routes.py +++ b/website/routes.py @@ -412,7 +412,12 @@ def make_url_map(app): website_views.dashboard, notemplate ), - + Rule( + '/metadata//', + 'get', + website_views.metadata_download, + notemplate + ), Rule( '/myprojects/', 'get', diff --git a/website/views.py b/website/views.py index dbe0cc1655e..dd3b7b021ff 100644 --- a/website/views.py +++ b/website/views.py @@ -434,3 +434,9 @@ def guid_metadata_download(guid, resource, metadata_format): 'Content-Disposition': f'attachment; filename={result.filename}', }, ) + + +def metadata_download(guid): + format_arg = request.args.get('format', 'datacite-json') + resource = Guid.load(guid) + return guid_metadata_download(guid, resource, format_arg) From 300524c5d537640018a3fdbe34516b72ccb50585 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 2 Jul 2025 10:47:02 -0400 Subject: [PATCH 048/176] fix backward compat issues and remove old tests --- api/subscriptions/serializers.py | 6 +- api/subscriptions/views.py | 109 +- .../views/test_subscriptions_detail.py | 13 +- .../views/test_subscriptions_list.py | 46 +- osf/models/notification.py | 15 +- tests/test_notifications.py | 1587 ----------------- tests/test_user_profile_view.py | 73 +- 7 files changed, 158 insertions(+), 1691 deletions(-) delete mode 100644 tests/test_notifications.py diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index ceb6d602db7..d37a8342564 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -17,7 +17,11 @@ class SubscriptionSerializer(JSONAPISerializer): 'frequency', ]) - id = ser.CharField(read_only=True) + id = ser.CharField( + read_only=True, + source='legacy_id', + help_text='The id of the subscription fixed for backward compatibility', + ) event_name = ser.CharField(read_only=True) frequency = FrequencyField(source='message_frequency', required=True) diff --git a/api/subscriptions/views.py b/api/subscriptions/views.py index e8c48b421b9..8932b03ea67 100644 --- a/api/subscriptions/views.py +++ b/api/subscriptions/views.py @@ -1,8 +1,11 @@ -from pyasn1_modules.rfc5126 import ContentType +from django.db.models import Value, When, Case, F, Q, OuterRef, Subquery +from django.db.models.fields import CharField, IntegerField +from django.db.models.functions import Concat, Cast +from django.contrib.contenttypes.models import ContentType from rest_framework import generics from rest_framework import permissions as drf_permissions from rest_framework.exceptions import NotFound -from django.core.exceptions import ObjectDoesNotExist +from django.core.exceptions import ObjectDoesNotExist, PermissionDenied from framework.auth.oauth_scopes import CoreScopes from api.base.views import JSONAPIBaseView @@ -19,9 +22,9 @@ CollectionProvider, PreprintProvider, RegistrationProvider, - AbstractProvider, + AbstractProvider, AbstractNode, Preprint, OSFUser, ) -from osf.models.notification import NotificationSubscription +from osf.models.notification import NotificationSubscription, NotificationType class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): @@ -38,8 +41,47 @@ class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): required_write_scopes = [CoreScopes.NULL] def get_queryset(self): - return NotificationSubscription.objects.filter( - user=self.request.user, + user_guid = self.request.user._id + provider_ct = ContentType.objects.get(app_label='osf', model='abstractprovider') + + provider_subquery = AbstractProvider.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('_id')[:1] + + node_subquery = AbstractNode.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('guids___id')[:1] + + return NotificationSubscription.objects.filter(user=self.request.user).annotate( + event_name=Case( + When( + notification_type__name=NotificationType.Type.NODE_FILES_UPDATED.value, + then=Value('files_updated'), + ), + When( + notification_type__name=NotificationType.Type.USER_FILE_UPDATED.value, + then=Value('global_file_updated'), + ), + default=F('notification_type__name'), + output_field=CharField(), + ), + legacy_id=Case( + When( + notification_type__name=NotificationType.Type.NODE_FILES_UPDATED.value, + then=Concat(Subquery(node_subquery), Value('_file_updated')), + ), + When( + notification_type__name=NotificationType.Type.USER_FILE_UPDATED.value, + then=Value(f'{user_guid}_global'), + ), + When( + Q(notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.value) & + Q(content_type=provider_ct), + then=Concat(Subquery(provider_subquery), Value('_new_pending_submissions')), + ), + default=F('notification_type__name'), + output_field=CharField(), + ), ) @@ -67,10 +109,63 @@ class SubscriptionDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView): def get_object(self): subscription_id = self.kwargs['subscription_id'] + user_guid = self.request.user._id + + provider_ct = ContentType.objects.get(app_label='osf', model='abstractprovider') + node_ct = ContentType.objects.get(app_label='osf', model='abstractnode') + + provider_subquery = AbstractProvider.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('_id')[:1] + + node_subquery = AbstractNode.objects.filter( + id=Cast(OuterRef('object_id'), IntegerField()), + ).values('guids___id')[:1] + + guid_id, *event_parts = subscription_id.split('_') + event = '_'.join(event_parts) if event_parts else '' + + subscription_obj = AbstractNode.load(guid_id) or Preprint.load(guid_id) or OSFUser.load(guid_id) + + if event != 'global': + obj_filter = Q( + object_id=getattr(subscription_obj, 'id', None), + content_type=ContentType.objects.get_for_model(subscription_obj.__class__), + notification_type__name__icontains=event, + ) + else: + obj_filter = Q() + try: - obj = NotificationSubscription.objects.get(id=subscription_id) + obj = NotificationSubscription.objects.annotate( + legacy_id=Case( + When( + notification_type__name=NotificationType.Type.NODE_FILES_UPDATED.value, + content_type=node_ct, + then=Concat(Subquery(node_subquery), Value('_file_updated')), + ), + When( + notification_type__name=NotificationType.Type.USER_FILE_UPDATED.value, + then=Value(f'{user_guid}_global'), + ), + When( + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.value, + content_type=provider_ct, + then=Concat(Subquery(provider_subquery), Value('_new_pending_submissions')), + ), + default=Value(f'{user_guid}_global'), + output_field=CharField(), + ), + ).filter(obj_filter) + except ObjectDoesNotExist: raise NotFound + + try: + obj = obj.filter(user=self.request.user).get() + except ObjectDoesNotExist: + raise PermissionDenied + self.check_object_permissions(self.request, obj) return obj diff --git a/api_tests/subscriptions/views/test_subscriptions_detail.py b/api_tests/subscriptions/views/test_subscriptions_detail.py index a9d880c687f..2d91e6b1083 100644 --- a/api_tests/subscriptions/views/test_subscriptions_detail.py +++ b/api_tests/subscriptions/views/test_subscriptions_detail.py @@ -19,13 +19,12 @@ def user_no_auth(self): @pytest.fixture() def notification(self, user): - return NotificationSubscriptionFactory( - user=user, - ) + return NotificationSubscriptionFactory(user=user) @pytest.fixture() def url(self, notification): - return f'/{API_BASE}subscriptions/{notification.id}/' + print('_id', notification._id) + return f'/{API_BASE}subscriptions/{notification._id}/' @pytest.fixture() def url_invalid(self): @@ -53,9 +52,7 @@ def payload_invalid(self): } } - def test_subscription_detail_invalid_user( - self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid - ): + def test_subscription_detail_invalid_user(self, app, user, user_no_auth, notification, url, payload): res = app.get( url, auth=user_no_auth.auth, @@ -79,7 +76,7 @@ def test_subscription_detail_valid_user( res = app.get(url, auth=user.auth) notification_id = res.json['data']['id'] assert res.status_code == 200 - assert notification_id == str(notification.id) + assert notification_id == f'{user._id}_global' def test_subscription_detail_invalid_notification_id_no_user( self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid diff --git a/api_tests/subscriptions/views/test_subscriptions_list.py b/api_tests/subscriptions/views/test_subscriptions_list.py index ad159e05a96..a0a01bf513c 100644 --- a/api_tests/subscriptions/views/test_subscriptions_list.py +++ b/api_tests/subscriptions/views/test_subscriptions_list.py @@ -1,8 +1,13 @@ import pytest from api.base.settings.defaults import API_BASE -from osf_tests.factories import AuthUserFactory, PreprintProviderFactory, ProjectFactory, \ - NotificationSubscriptionLegacyFactory, NotificationSubscriptionFactory +from osf.models import NotificationType +from osf_tests.factories import ( + AuthUserFactory, + PreprintProviderFactory, + ProjectFactory, + NotificationSubscriptionFactory +) @pytest.mark.django_db @@ -24,25 +29,42 @@ def node(self, user): @pytest.fixture() def global_user_notification(self, user): - notification = NotificationSubscriptionLegacyFactory(_id=f'{user._id}_global', user=user, event_name='global') - notification.add_user_to_subscription(user, 'email_transactional') - return notification + return NotificationSubscriptionFactory( + notification_type=NotificationType.Type.USER_FILE_UPDATED.instance, + user=user, + ) @pytest.fixture() def file_updated_notification(self, node, user): - notification = NotificationSubscriptionFactory( - _id=node._id + 'file_updated', - owner=node, - event_name='file_updated', + return NotificationSubscriptionFactory( + notification_type=NotificationType.Type.NODE_FILES_UPDATED.instance, + subscribed_object=node, + user=user, + ) + + @pytest.fixture() + def provider_notification(self, provider, user): + return NotificationSubscriptionFactory( + notification_type=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.instance, + subscribed_object=provider, + user=user, ) - notification.add_user_to_subscription(user, 'email_transactional') - return notification @pytest.fixture() def url(self, user, node): return f'/{API_BASE}subscriptions/' - def test_list_complete(self, app, user, provider, node, global_user_notification, url): + def test_list_complete( + self, + app, + user, + provider, + node, + global_user_notification, + provider_notification, + file_updated_notification, + url + ): res = app.get(url, auth=user.auth) notification_ids = [item['id'] for item in res.json['data']] # There should only be 3 notifications: users' global, node's file updates and provider's preprint added. diff --git a/osf/models/notification.py b/osf/models/notification.py index d2e4244cb0a..7f05742cb88 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -100,6 +100,7 @@ class Type(str, Enum): NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' # Provider notifications + PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION = 'provider_reviews_submission_confirmation' PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION = 'provider_reviews_moderator_submission_confirmation' PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED = 'preprint_request_withdrawal_requested' @@ -119,7 +120,6 @@ class Type(str, Enum): PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' # Collections Submission notifications - NEW_PENDING_SUBMISSIONS = 'new_pending_submissions' COLLECTION_SUBMISSION_REMOVED_ADMIN = 'collection_submission_removed_admin' COLLECTION_SUBMISSION_REMOVED_MODERATOR = 'collection_submission_removed_moderator' COLLECTION_SUBMISSION_REMOVED_PRIVATE = 'collection_submission_removed_private' @@ -136,6 +136,11 @@ class Type(str, Enum): REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' + @property + def instance(self): + obj, created = NotificationType.objects.get_or_create(name=self.value) + return obj + @classmethod def user_types(cls): return [member for member in cls if member.name.startswith('USER_')] @@ -271,7 +276,7 @@ def clean(self): raise ValidationError(f'{self.message_frequency!r} is not allowed for {self.notification_type.name!r}.') def __str__(self) -> str: - return f'{self.user} subscribes to {self.notification_type.name} ({self.message_frequency})' + return f'<{self.user} via {self.subscribed_object} subscribes to {self.notification_type.name} ({self.message_frequency})>' class Meta: verbose_name = 'Notification Subscription' @@ -321,9 +326,11 @@ def _id(self): case 'node' | 'collection' | 'preprint': # Node-like objects: use object_id (guid) return f'{self.subscribed_object._id}_{event}' - case 'osfuser' | 'user', _: + case 'osfuser' | 'user' | None: # Global: _global - return f'{self.subscribed_object._id}_global_{event}' + return f'{self.user._id}_global' + case _: + raise NotImplementedError() class Notification(models.Model): diff --git a/tests/test_notifications.py b/tests/test_notifications.py deleted file mode 100644 index db542f4640d..00000000000 --- a/tests/test_notifications.py +++ /dev/null @@ -1,1587 +0,0 @@ -import collections -from unittest import mock - -import pytest -from babel import dates, Locale -from schema import Schema, And, Use, Or -from django.utils import timezone - -from framework.auth import Auth -from osf.models import ( - Comment, - NotificationDigest, - NotificationSubscription, - Guid, - OSFUser, - NotificationSubscriptionLegacy -) - -from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications import constants -from website.notifications import emails -from website.notifications import utils -from website import mails -from website.profile.utils import get_profile_image_url -from website.project.signals import contributor_removed, node_deleted -from website.reviews import listeners -from website.util import api_url_for -from website.util import web_url_for -from website import settings - -from osf_tests import factories -from osf.utils import permissions -from tests.base import capture_signals -from tests.base import OsfTestCase, NotificationTestCase - - - -class TestNotificationsModels(OsfTestCase): - - def setUp(self): - super().setUp() - # Create project with component - self.user = factories.UserFactory() - self.consolidate_auth = Auth(user=self.user) - self.parent = factories.ProjectFactory(creator=self.user) - self.node = factories.NodeFactory(creator=self.user, parent=self.parent) - - def test_has_permission_on_children(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - sub_component.add_contributor(contributor=non_admin_user) - sub_component.save() - sub_component2 = factories.NodeFactory(parent=node) - - assert node.has_permission_on_children(non_admin_user, permissions.READ) - - def test_check_user_has_permission_excludes_deleted_components(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - sub_component.add_contributor(contributor=non_admin_user) - sub_component.is_deleted = True - sub_component.save() - sub_component2 = factories.NodeFactory(parent=node) - - assert not node.has_permission_on_children(non_admin_user, permissions.READ) - - def test_check_user_does_not_have_permission_on_private_node_child(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - - assert not node.has_permission_on_children(non_admin_user,permissions.READ) - - def test_check_user_child_node_permissions_false_if_no_children(self): - non_admin_user = factories.UserFactory() - parent = factories.ProjectFactory() - parent.add_contributor(contributor=non_admin_user, permissions=permissions.READ) - parent.save() - node = factories.NodeFactory(parent=parent, category='project') - - assert not node.has_permission_on_children(non_admin_user,permissions.READ) - - def test_check_admin_has_permissions_on_private_component(self): - parent = factories.ProjectFactory() - node = factories.NodeFactory(parent=parent, category='project') - sub_component = factories.NodeFactory(parent=node) - - assert node.has_permission_on_children(parent.creator,permissions.READ) - - def test_check_user_private_node_child_permissions_excludes_pointers(self): - user = factories.UserFactory() - parent = factories.ProjectFactory() - pointed = factories.ProjectFactory(creator=user) - parent.add_pointer(pointed, Auth(parent.creator)) - parent.save() - - assert not parent.has_permission_on_children(user,permissions.READ) - - def test_new_project_creator_is_subscribed(self): - user = factories.UserFactory() - factories.ProjectFactory(creator=user) - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 1 # subscribed to file_updated - assert 'file_updated' in event_types - - def test_new_node_creator_is_not_subscribed(self): - user = factories.UserFactory() - factories.NodeFactory(creator=user) - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - - assert len(user_subscriptions) == 0 - - def test_new_project_creator_is_subscribed_with_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'none') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_digest') - - node = factories.ProjectFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_comments') - - assert len(user_subscriptions) == 2 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'global_file_updated' in event_types - assert file_updated_subscription.none.count() == 1 - assert file_updated_subscription.email_transactional.count() == 0 - - def test_new_node_creator_is_not_subscribed_with_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_digest') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'none') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.NodeFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 1 # subscribed to only user settings - assert 'global_file_updated' in event_types - - def test_subscribe_user_to_global_notfiications(self): - user = factories.UserFactory() - utils.subscribe_user_to_global_notifications(user) - subscription_event_names = list(user.notification_subscriptions.values_list('event_name', flat=True)) - for event_name in constants.USER_SUBSCRIPTIONS_AVAILABLE: - assert event_name in subscription_event_names - - def test_subscribe_user_to_registration_notifications(self): - registration = factories.RegistrationFactory() - with pytest.raises(InvalidSubscriptionError): - utils.subscribe_user_to_notifications(registration, self.user) - - def test_new_project_creator_is_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.ProjectFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') - comments_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_comments') - - assert len(user_subscriptions) == 2 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'global_file_updated' in event_types - assert file_updated_subscription.email_transactional.count() == 1 - - def test_new_fork_creator_is_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - project = factories.ProjectFactory(creator=user) - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.ForkFactory(project=project) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - node_file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') - project_file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=project._id + '_file_updated') - - assert len(user_subscriptions) == 3 # subscribed to project, fork, and user settings - assert 'file_updated' in event_types - assert 'global_file_updated' in event_types - assert node_file_updated_subscription.email_transactional.count() == 1 - assert project_file_updated_subscription.email_transactional.count() == 1 - - def test_new_node_creator_is_not_subscribed_with_default_global_settings(self): - user = factories.UserFactory() - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comments', - user=user, - event_name='global_comments' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_file_updated', - user=user, - event_name='global_file_updated' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_comment_replies', - user=user, - event_name='global_comment_replies' - ).add_user_to_subscription(user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_' + 'global_mentions', - user=user, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - - node = factories.NodeFactory(creator=user) - - user_subscriptions = list(utils.get_all_user_subscriptions(user)) - event_types = [sub.event_name for sub in user_subscriptions] - - assert len(user_subscriptions) == 1 # subscribed to only user settings - assert 'global_file_updated' in event_types - - - def test_contributor_subscribed_when_added_to_project(self): - user = factories.UserFactory() - contributor = factories.UserFactory() - project = factories.ProjectFactory(creator=user) - project.add_contributor(contributor=contributor) - contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) - event_types = [sub.event_name for sub in contributor_subscriptions] - - assert len(contributor_subscriptions) == 1 - assert 'file_updated' in event_types - - def test_contributor_subscribed_when_added_to_component(self): - user = factories.UserFactory() - contributor = factories.UserFactory() - - factories.NotificationSubscriptionLegacyFactory( - _id=contributor._id + '_' + 'global_comments', - user=contributor, - event_name='global_comments' - ).add_user_to_subscription(contributor, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=contributor._id + '_' + 'global_file_updated', - user=contributor, - event_name='global_file_updated' - ).add_user_to_subscription(contributor, 'email_transactional') - - node = factories.NodeFactory(creator=user) - node.add_contributor(contributor=contributor) - - contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor)) - event_types = [sub.event_name for sub in contributor_subscriptions] - - file_updated_subscription = NotificationSubscriptionLegacy.objects.get(_id=node._id + '_file_updated') - - assert len(contributor_subscriptions) == 2 # subscribed to both node and user settings - assert 'file_updated' in event_types - assert 'global_file_updated' in event_types - assert file_updated_subscription.email_transactional.count() == 1 - - def test_unregistered_contributor_not_subscribed_when_added_to_project(self): - user = factories.AuthUserFactory() - unregistered_contributor = factories.UnregUserFactory() - project = factories.ProjectFactory(creator=user) - project.add_unregistered_contributor( - unregistered_contributor.fullname, - unregistered_contributor.email, - Auth(user), - existing_user=unregistered_contributor - ) - - contributor_subscriptions = list(utils.get_all_user_subscriptions(unregistered_contributor)) - assert len(contributor_subscriptions) == 0 - - -class TestSubscriptionView(OsfTestCase): - - def setUp(self): - super().setUp() - self.node = factories.NodeFactory() - self.user = self.node.creator - self.registration = factories.RegistrationFactory(creator=self.user) - - def test_create_new_subscription(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscriptionLegacy.objects.get(_id=event_id) - - # check that user was added to notification_type field - assert payload['id'] == s.owner._id - assert payload['event'] == s.event_name - assert self.node.creator in getattr(s, payload['notification_type']).all() - - # change subscription - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - assert not self.node.creator in getattr(s, payload['notification_type']).all() - assert self.node.creator in getattr(s, new_payload['notification_type']).all() - - def test_cannot_create_registration_subscription(self): - payload = { - 'id': self.registration._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - res = self.app.post(url, json=payload, auth=self.registration.creator.auth) - assert res.status_code == 400 - - def test_adopt_parent_subscription_default(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - event_id = self.node._id + '_' + 'comments' - # confirm subscription was created because parent had default subscription - s = NotificationSubscriptionLegacy.objects.filter(_id=event_id).count() - assert 0 == s - - def test_change_subscription_to_adopt_parent_subscription_removes_user(self): - payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'email_transactional' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=self.node.creator.auth) - - # check that subscription was created - event_id = self.node._id + '_' + 'comments' - s = NotificationSubscriptionLegacy.objects.get(_id=event_id) - - # change subscription to adopt_parent - new_payload = { - 'id': self.node._id, - 'event': 'comments', - 'notification_type': 'adopt_parent' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=new_payload, auth=self.node.creator.auth) - s.reload() - - # assert that user is removed from the subscription entirely - for n in constants.NOTIFICATION_TYPES: - assert not self.node.creator in getattr(s, n).all() - - def test_configure_subscription_adds_node_id_to_notifications_configured(self): - project = factories.ProjectFactory(creator=self.user) - assert not project._id in self.user.notifications_configured - payload = { - 'id': project._id, - 'event': 'comments', - 'notification_type': 'email_digest' - } - url = api_url_for('configure_subscription') - self.app.post(url, json=payload, auth=project.creator.auth) - - self.user.reload() - - assert project._id in self.user.notifications_configured - - -class TestRemoveContributor(OsfTestCase): - - def setUp(self): - super(OsfTestCase, self).setUp() - self.project = factories.ProjectFactory() - self.contributor = factories.UserFactory() - self.project.add_contributor(contributor=self.contributor, permissions=permissions.READ) - self.project.save() - - self.subscription = NotificationSubscriptionLegacy.objects.get( - node=self.project, - _id=self.project._id + '_comments' - ) - - self.node = factories.NodeFactory(parent=self.project) - self.node.add_contributor(contributor=self.project.creator, permissions=permissions.ADMIN) - self.node.save() - - self.node_subscription = NotificationSubscriptionLegacy.objects.get( - _id=self.node._id + '_comments', - node=self.node - ) - self.node_subscription.add_user_to_subscription(self.node.creator, 'email_transactional') - - def test_removed_non_admin_contributor_is_removed_from_subscriptions(self): - assert self.contributor in self.subscription.email_transactional.all() - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert self.contributor not in self.project.contributors.all() - self.subscription.reload() - assert self.contributor not in self.subscription.email_transactional.all() - - def test_removed_non_parent_admin_contributor_is_removed_from_subscriptions(self): - assert self.node.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.node.creator, auth=Auth(self.node.creator)) - assert self.node.creator not in self.node.contributors.all() - self.node_subscription.reload() - assert self.node.creator not in self.node_subscription.email_transactional.all() - - def test_removed_contributor_admin_on_parent_not_removed_from_node_subscription(self): - # Admin on parent project is removed as a contributor on a component. Check - # that admin is not removed from component subscriptions, as the admin - # now has read-only access. - assert self.project.creator in self.node_subscription.email_transactional.all() - self.node.remove_contributor(self.project.creator, auth=Auth(self.project.creator)) - assert self.project.creator not in self.node.contributors.all() - assert self.project.creator in self.node_subscription.email_transactional.all() - - def test_remove_contributor_signal_called_when_contributor_is_removed(self): - with capture_signals() as mock_signals: - self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator)) - assert mock_signals.signals_sent() == {contributor_removed} - - -class TestRemoveNodeSignal(OsfTestCase): - - def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self): - project = factories.ProjectFactory() - component = factories.NodeFactory(parent=project, creator=project.creator) - - s = NotificationSubscriptionLegacy.objects.filter(email_transactional=project.creator) - assert s.count() == 2 - - s = NotificationSubscriptionLegacy.objects.filter(email_transactional=component.creator) - assert s.count() == 2 - - with capture_signals() as mock_signals: - project.remove_node(auth=Auth(project.creator)) - project.reload() - component.reload() - - assert project.is_deleted - assert component.is_deleted - assert mock_signals.signals_sent() == {node_deleted} - - s = NotificationSubscriptionLegacy.objects.filter(email_transactional=project.creator) - assert s.count() == 0 - - s = NotificationSubscriptionLegacy.objects.filter(email_transactional=component.creator) - assert s.count() == 0 - - with pytest.raises(NotificationSubscriptionLegacy.DoesNotExist): - NotificationSubscriptionLegacy.objects.get(node=project) - - with pytest.raises(NotificationSubscriptionLegacy.DoesNotExist): - NotificationSubscriptionLegacy.objects.get(node=component) - - -def list_or_dict(data): - # Generator only returns lists or dicts from list or dict - if isinstance(data, dict): - for key in data: - if isinstance(data[key], dict) or isinstance(data[key], list): - yield data[key] - elif isinstance(data, list): - for item in data: - if isinstance(item, dict) or isinstance(item, list): - yield item - - -def has(data, sub_data): - # Recursive approach to look for a subset of data in data. - # WARNING: Don't use on huge structures - # :param data: Data structure - # :param sub_data: subset being checked for - # :return: True or False - try: - next(item for item in data if item == sub_data) - return True - except StopIteration: - lists_and_dicts = list_or_dict(data) - for item in lists_and_dicts: - if has(item, sub_data): - return True - return False - - -def subscription_schema(project, structure, level=0): - # builds a schema from a list of nodes and events - # :param project: validation type - # :param structure: list of nodes (another list) and events - # :return: schema - sub_list = [] - for item in list_or_dict(structure): - sub_list.append(subscription_schema(project, item, level=level+1)) - sub_list.append(event_schema(level)) - - node_schema = { - 'node': { - 'id': Use(type(project._id), error=f'node_id{level}'), - 'title': Use(type(project.title), error=f'node_title{level}'), - 'url': Use(type(project.url), error=f'node_{level}') - }, - 'kind': And(str, Use(lambda s: s in ('node', 'folder'), - error=f"kind didn't match node or folder {level}")), - 'nodeType': Use(lambda s: s in ('project', 'component'), error='nodeType not project or component'), - 'category': Use(lambda s: s in settings.NODE_CATEGORY_MAP, error='category not in settings.NODE_CATEGORY_MAP'), - 'permissions': { - 'view': Use(lambda s: s in (True, False), error='view permissions is not True/False') - }, - 'children': sub_list - } - if level == 0: - return Schema([node_schema]) - return node_schema - - -def event_schema(level=None): - return { - 'event': { - 'title': And(Use(str, error=f'event_title{level} not a string'), - Use(lambda s: s in constants.NOTIFICATION_TYPES, - error=f'event_title{level} not in list')), - 'description': And(Use(str, error=f'event_desc{level} not a string'), - Use(lambda s: s in constants.NODE_SUBSCRIPTIONS_AVAILABLE, - error=f'event_desc{level} not in list')), - 'notificationType': And(str, Or('adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)), - 'parent_notification_type': Or(None, 'adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES) - }, - 'kind': 'event', - 'children': And(list, lambda l: len(l) == 0) - } - - -class TestNotificationUtils(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = factories.UserFactory() - self.project = factories.ProjectFactory(creator=self.user) - - self.user.notifications_configured[self.project._id] = True - self.user.save() - - self.node = factories.NodeFactory(parent=self.project, creator=self.user) - - self.user_subscription = [ - factories.NotificationSubscriptionFactory( - _id=self.user._id + '_' + 'global_file_updated', - user=self.user, - event_name='global_file_updated' - )] - - for x in self.user_subscription: - x.save() - for x in self.user_subscription: - x.email_transactional.add(self.user) - for x in self.user_subscription: - x.save() - - def test_to_subscription_key(self): - key = utils.to_subscription_key('xyz', 'comments') - assert key == 'xyz_comments' - - def test_from_subscription_key(self): - parsed_key = utils.from_subscription_key('xyz_comment_replies') - assert parsed_key == { - 'uid': 'xyz', - 'event': 'comment_replies' - } - - def test_get_configured_project_ids_does_not_return_user_or_node_ids(self): - configured_nodes = utils.get_configured_projects(self.user) - configured_ids = [n._id for n in configured_nodes] - # No duplicates! - assert len(configured_nodes) == 1 - - assert self.project._id in configured_ids - assert self.node._id not in configured_ids - assert self.user._id not in configured_ids - - def test_get_configured_project_ids_excludes_deleted_projects(self): - project = factories.ProjectFactory() - project.is_deleted = True - project.save() - assert project not in utils.get_configured_projects(self.user) - - def test_get_configured_project_ids_excludes_node_with_project_category(self): - node = factories.NodeFactory(parent=self.project, category='project') - assert node not in utils.get_configured_projects(self.user) - - def test_get_configured_project_ids_includes_top_level_private_projects_if_subscriptions_on_node(self): - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - node_comments_subscription = factories.NotificationSubscriptionLegacyFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - assert private_project in configured_project_nodes - - def test_get_configured_project_ids_excludes_private_projects_if_no_subscriptions_on_node(self): - user = factories.UserFactory() - - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - node.add_contributor(user) - - utils.remove_contributor_from_subscriptions(node, user) - - configured_project_nodes = utils.get_configured_projects(user) - assert private_project not in configured_project_nodes - - - def test_format_data_node_settings(self): - data = utils.format_data(self.user, [self.node]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [] - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_includes_admin_view_only_component_subscriptions(self): - # Test private components in which parent project admins are not contributors still appear in their - # notifications settings. - node = factories.NodeFactory(parent=self.project) - data = utils.format_data(self.user, [self.project]) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'adopt_parent', - 'parent_notification_type': 'email_transactional' - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event'], ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_excludes_pointers(self): - project = factories.ProjectFactory() - pointed = factories.ProjectFactory() - project.add_pointer(pointed, Auth(project.creator)) - project.creator.notifications_configured[project._id] = True - project.creator.save() - configured_project_nodes = utils.get_configured_projects(project.creator) - data = utils.format_data(project.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event']) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_includes_private_parent_if_configured_children(self): - private_project = factories.ProjectFactory() - node = factories.NodeFactory(parent=private_project) - - node_comments_subscription = factories.NotificationSubscriptionLegacyFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_data_user_subscriptions_if_children_points_to_parent(self): - private_project = factories.ProjectFactory(creator=self.user) - node = factories.NodeFactory(parent=private_project, creator=self.user) - node.save() - node_comments_subscription = factories.NotificationSubscriptionLegacyFactory( - _id=node._id + '_' + 'comments', - node=node, - event_name='comments' - ) - node_comments_subscription.save() - node_comments_subscription.email_transactional.add(node.creator) - node_comments_subscription.save() - - node.creator.notifications_configured[node._id] = True - node.creator.save() - configured_project_nodes = utils.get_configured_projects(node.creator) - data = utils.format_data(node.creator, configured_project_nodes) - event = { - 'event': { - 'title': 'comments', - 'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [], - } - schema = subscription_schema(self.project, ['event', ['event']]) - assert schema.validate(data) - assert has(data, event) - - def test_format_user_subscriptions(self): - data = utils.format_user_subscriptions(self.user) - expected = [ - { - 'event': { - 'title': 'global_file_updated', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_file_updated'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None, - }, - 'kind': 'event', - 'children': [] - }, { - 'event': { - 'title': 'global_reviews', - 'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_reviews'], - 'notificationType': 'email_transactional', - 'parent_notification_type': None - }, - 'kind': 'event', - 'children': [] - } - ] - - assert data == expected - - def test_format_data_user_settings(self): - data = utils.format_user_and_project_subscriptions(self.user) - expected = [ - { - 'node': { - 'id': self.user._id, - 'title': 'Default Notification Settings', - 'help': 'These are default settings for new projects you create or are added to. Modifying these settings will not modify settings on existing projects.' - }, - 'kind': 'heading', - 'children': utils.format_user_subscriptions(self.user) - }, - { - 'node': { - 'help': 'These are settings for each of your projects. Modifying these settings will only modify the settings for the selected project.', - 'id': '', - 'title': 'Project Notifications' - }, - 'kind': 'heading', - 'children': utils.format_data(self.user, utils.get_configured_projects(self.user)) - }] - assert data == expected - - -class TestCompileSubscriptions(NotificationTestCase): - def setUp(self): - super().setUp() - self.user_1 = factories.UserFactory() - self.user_2 = factories.UserFactory() - self.user_3 = factories.UserFactory() - self.user_4 = factories.UserFactory() - # Base project + 1 project shared with 3 + 1 project shared with 2 - self.base_project = factories.ProjectFactory(is_public=False, creator=self.user_1) - self.shared_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) - self.private_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1) - # Adding contributors - for node in [self.base_project, self.shared_node, self.private_node]: - node.add_contributor(self.user_2, permissions=permissions.ADMIN) - self.base_project.add_contributor(self.user_3, permissions=permissions.WRITE) - self.shared_node.add_contributor(self.user_3, permissions=permissions.WRITE) - # Setting basic subscriptions - self.base_sub = factories.NotificationSubscriptionLegacyFactory( - _id=self.base_project._id + '_file_updated', - node=self.base_project, - event_name='file_updated' - ) - self.base_sub.save() - self.shared_sub = factories.NotificationSubscriptionLegacyFactory( - _id=self.shared_node._id + '_file_updated', - node=self.shared_node, - event_name='file_updated' - ) - self.shared_sub.save() - self.private_sub = factories.NotificationSubscriptionLegacyFactory( - _id=self.private_node._id + '_file_updated', - node=self.private_node, - event_name='file_updated' - ) - self.private_sub.save() - - def test_no_subscription(self): - node = factories.NodeFactory() - result = emails.compile_subscriptions(node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_no_subscribers(self): - node = factories.NodeFactory() - node_sub = factories.NotificationSubscriptionLegacyFactory( - _id=node._id + '_file_updated', - node=node, - event_name='file_updated' - ) - node_sub.save() - result = emails.compile_subscriptions(node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_parent(self): - # Basic sub check - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - result = emails.compile_subscriptions(self.base_project, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_to_parent_from_child(self): - # checks the parent sub is the one to appear without a child sub - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_subbed_to_both_from_child(self): - # checks that only one sub is in the list. - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - self.shared_sub.email_transactional.add(self.user_1) - self.shared_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []} == result - - def test_creator_diff_subs_to_both_from_child(self): - # Check that the child node sub overrides the parent node sub - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - self.shared_sub.none.add(self.user_1) - self.shared_sub.save() - result = emails.compile_subscriptions(self.shared_node, 'file_updated') - assert {'email_transactional': [], 'none': [self.user_1._id], 'email_digest': []} == result - - def test_user_wo_permission_on_child_node_not_listed(self): - # Tests to see if a user without permission gets an Email about a node they cannot see. - self.base_sub.email_transactional.add(self.user_3) - self.base_sub.save() - result = emails.compile_subscriptions(self.private_node, 'file_updated') - assert {'email_transactional': [], 'none': [], 'email_digest': []} == result - - def test_several_nodes_deep(self): - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - node2 = factories.NodeFactory(parent=self.shared_node) - node3 = factories.NodeFactory(parent=node2) - node4 = factories.NodeFactory(parent=node3) - node5 = factories.NodeFactory(parent=node4) - subs = emails.compile_subscriptions(node5, 'file_updated') - assert subs == {'email_transactional': [self.user_1._id], 'email_digest': [], 'none': []} - - def test_several_nodes_deep_precedence(self): - self.base_sub.email_transactional.add(self.user_1) - self.base_sub.save() - node2 = factories.NodeFactory(parent=self.shared_node) - node3 = factories.NodeFactory(parent=node2) - node4 = factories.NodeFactory(parent=node3) - node4_subscription = factories.NotificationSubscriptionLegacyFactory( - _id=node4._id + '_file_updated', - node=node4, - event_name='file_updated' - ) - node4_subscription.save() - node4_subscription.email_digest.add(self.user_1) - node4_subscription.save() - node5 = factories.NodeFactory(parent=node4) - subs = emails.compile_subscriptions(node5, 'file_updated') - assert subs == {'email_transactional': [], 'email_digest': [self.user_1._id], 'none': []} - - -class TestMoveSubscription(NotificationTestCase): - def setUp(self): - super().setUp() - self.blank = {key: [] for key in constants.NOTIFICATION_TYPES} # For use where it is blank. - self.user_1 = factories.AuthUserFactory() - self.auth = Auth(user=self.user_1) - self.user_2 = factories.AuthUserFactory() - self.user_3 = factories.AuthUserFactory() - self.user_4 = factories.AuthUserFactory() - self.project = factories.ProjectFactory(creator=self.user_1) - self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1) - self.sub = factories.NotificationSubscriptionLegacyFactory( - _id=self.project._id + '_file_updated', - node=self.project, - event_name='file_updated' - ) - self.sub.email_transactional.add(self.user_1) - self.sub.save() - self.file_sub = factories.NotificationSubscriptionLegacyFactory( - _id=self.project._id + '_xyz42_file_updated', - node=self.project, - event_name='xyz42_file_updated' - ) - self.file_sub.save() - - def test_separate_users(self): - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - subbed, removed = utils.separate_users( - self.private_node, [self.user_2._id, self.user_3._id, self.user_4._id] - ) - assert [self.user_2._id, self.user_3._id] == subbed - assert [self.user_4._id] == removed - - def test_event_subs_same(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []} == results - - def test_event_nodes_same(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.project) - assert {'email_transactional': [], 'email_digest': [], 'none': []} == results - - def test_move_sub(self): - # Tests old sub is replaced with new sub. - utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - self.file_sub.reload() - assert 'abc42_file_updated' == self.file_sub.event_name - assert self.private_node == self.file_sub.owner - assert self.private_node._id + '_abc42_file_updated' == self.file_sub._id - - def test_move_sub_with_none(self): - # Attempt to reproduce an error that is seen when moving files - self.project.add_contributor(self.user_2, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.file_sub.none.add(self.user_2) - self.file_sub.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [], 'email_digest': [], 'none': [self.user_2._id]} == results - - def test_remove_one_user(self): - # One user doesn't have permissions on the node the sub is moved to. Should be listed. - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []} == results - - def test_remove_one_user_warn_another(self): - # Two users do not have permissions on new node, but one has a project sub. Both should be listed. - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.save() - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.file_sub.email_transactional.add(self.user_2, self.user_4) - - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert {'email_transactional': [self.user_4._id], 'email_digest': [self.user_3._id], 'none': []} == results - assert self.sub.email_digest.filter(id=self.user_3.id).exists() # Is not removed from the project subscription. - - def test_warn_user(self): - # One user with a project sub does not have permission on new node. User should be listed. - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.save() - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.file_sub.email_transactional.add(self.user_2) - results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node) - utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert {'email_transactional': [], 'email_digest': [self.user_3._id], 'none': []} == results - assert self.user_3 in self.sub.email_digest.all() # Is not removed from the project subscription. - - def test_user_node_subbed_and_not_removed(self): - self.project.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.project.save() - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node) - assert not self.file_sub.email_digest.filter().exists() - - # Regression test for commit ea15186 - def test_garrulous_event_name(self): - self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4) - self.file_sub.save() - self.private_node.add_contributor(self.user_2, permissions=permissions.ADMIN, auth=self.auth) - self.private_node.add_contributor(self.user_3, permissions=permissions.WRITE, auth=self.auth) - self.private_node.save() - results = utils.users_to_remove('complicated/path_to/some/file/ASDFASDF.txt_file_updated', self.project, self.private_node) - assert {'email_transactional': [], 'email_digest': [], 'none': []} == results - -class TestSendEmails(NotificationTestCase): - def setUp(self): - super().setUp() - self.user = factories.AuthUserFactory() - self.project = factories.ProjectFactory() - self.user_subscription = factories.NotificationSubscriptionLegacyFactory( - _id=self.user._id + '_' + 'global_comment_replies', - node=self.node, - event_name='global_comment_replies' - ) - self.user_subscription.email_transactional.add(self.user) - self.user_subscription.save() - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_not_send_to_mentioned_users_subscribed_to_none(self, mock_store): - node = factories.NodeFactory() - user = factories.UserFactory() - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'none') - time_now = timezone.now() - sent = emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert not mock_store.called - assert sent == [] - - @mock.patch('website.notifications.emails.store_emails') - def test_notify_mentions_does_send_to_mentioned_users(self, mock_store): - user = factories.UserFactory() - factories.NotificationSubscriptionLegacyFactory( - _id=user._id + '_global_mentions', - node=self.node, - event_name='global_mentions' - ).add_user_to_subscription(user, 'email_transactional') - node = factories.ProjectFactory(creator=user) - time_now = timezone.now() - emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id]) - assert mock_store.called - mock_store.assert_called_with( - [node.creator._id], - 'email_transactional', - 'global_mentions', - user, - node, - time_now, - template=None, - new_mentions=[node.creator._id], - is_creator=(user == node.creator), - ) - - def test_get_settings_url_for_node(self): - url = emails.get_settings_url(self.project._id, self.user) - assert url == self.project.absolute_url + 'settings/' - - def test_get_settings_url_for_user(self): - url = emails.get_settings_url(self.user._id, self.user) - assert url == web_url_for('user_notifications', _absolute=True) - - def test_get_node_lineage(self): - node_lineage = emails.get_node_lineage(self.node) - assert node_lineage == [self.project._id, self.node._id] - - def test_fix_locale(self): - assert emails.fix_locale('en') == 'en' - assert emails.fix_locale('de_DE') == 'de_DE' - assert emails.fix_locale('de_de') == 'de_DE' - - def test_localize_timestamp(self): - timestamp = timezone.now() - self.user.timezone = 'America/New_York' - self.user.locale = 'en_US' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_empty_timezone(self): - timestamp = timezone.now() - self.user.timezone = '' - self.user.locale = 'en_US' - self.user.save() - tz = dates.get_timezone('Etc/UTC') - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_empty_locale(self): - timestamp = timezone.now() - self.user.timezone = 'America/New_York' - self.user.locale = '' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale('en') - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - def test_localize_timestamp_handles_unicode(self): - timestamp = timezone.now() - self.user.timezone = 'Europe/Moscow' - self.user.locale = 'ru_RU' - self.user.save() - tz = dates.get_timezone(self.user.timezone) - locale = Locale(self.user.locale) - formatted_date = dates.format_date(timestamp, format='full', locale=locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale) - formatted_datetime = f'{formatted_time} on {formatted_date}' - assert emails.localize_timestamp(timestamp, self.user) == formatted_datetime - - -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) -class TestSendDigest(OsfTestCase): - def setUp(self): - super().setUp() - self.user_1 = factories.UserFactory() - self.user_2 = factories.UserFactory() - self.project = factories.ProjectFactory() - self.timestamp = timezone.now() - - from conftest import start_mock_send_grid - self.mock_send_grid = start_mock_send_grid(self) - - def test_group_notifications_by_user_transactional(self): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - user=self.user_1, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d.save() - d2 = factories.NotificationDigestFactory( - user=self.user_2, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d2.save() - d3 = factories.NotificationDigestFactory( - user=self.user_2, - send_type='email_digest', - timestamp=self.timestamp, - message='Hello, but this should not appear (this is a digest)', - node_lineage=[self.project._id] - ) - d3.save() - user_groups = list(get_users_emails(send_type)) - expected = [ - { - 'user_id': self.user_1._id, - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': d._id - }] - }, - { - 'user_id': self.user_2._id, - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': d2._id - }] - } - ] - - assert len(user_groups) == 2 - assert user_groups == expected - digest_ids = [d._id, d2._id, d3._id] - remove_notifications(email_notification_ids=digest_ids) - - def test_group_notifications_by_user_digest(self): - send_type = 'email_digest' - d2 = factories.NotificationDigestFactory( - user=self.user_2, - send_type=send_type, - timestamp=self.timestamp, - message='Hello', - node_lineage=[self.project._id] - ) - d2.save() - d3 = factories.NotificationDigestFactory( - user=self.user_2, - send_type='email_transactional', - timestamp=self.timestamp, - message='Hello, but this should not appear (this is transactional)', - node_lineage=[self.project._id] - ) - d3.save() - user_groups = list(get_users_emails(send_type)) - expected = [ - { - 'user_id': str(self.user_2._id), - 'info': [{ - 'message': 'Hello', - 'node_lineage': [str(self.project._id)], - '_id': str(d2._id) - }] - } - ] - - assert len(user_groups) == 1 - assert user_groups == expected - digest_ids = [d2._id, d3._id] - remove_notifications(email_notification_ids=digest_ids) - - def test_send_users_email_called_with_correct_args(self): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - send_type=send_type, - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - d.save() - user_groups = list(get_users_emails(send_type)) - send_users_email(send_type) - mock_send_grid = self.mock_send_grid - assert mock_send_grid.called - assert mock_send_grid.call_count == len(user_groups) - - last_user_index = len(user_groups) - 1 - user = OSFUser.load(user_groups[last_user_index]['user_id']) - args, kwargs = mock_send_grid.call_args - - assert kwargs['to_addr'] == user.username - - def test_send_users_email_ignores_disabled_users(self): - send_type = 'email_transactional' - d = factories.NotificationDigestFactory( - send_type=send_type, - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - d.save() - - user_groups = list(get_users_emails(send_type)) - last_user_index = len(user_groups) - 1 - - user = OSFUser.load(user_groups[last_user_index]['user_id']) - user.is_disabled = True - user.save() - - send_users_email(send_type) - assert not self.mock_send_grid.called - - def test_remove_sent_digest_notifications(self): - d = factories.NotificationDigestFactory( - event='comment_replies', - timestamp=timezone.now(), - message='Hello', - node_lineage=[factories.ProjectFactory()._id] - ) - digest_id = d._id - remove_notifications(email_notification_ids=[digest_id]) - with pytest.raises(NotificationDigest.DoesNotExist): - NotificationDigest.objects.get(_id=digest_id) - - -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) -class TestNotificationsReviews(OsfTestCase): - def setUp(self): - super().setUp() - self.provider = factories.PreprintProviderFactory(_id='engrxiv') - self.preprint = factories.PreprintFactory(provider=self.provider) - self.user = factories.UserFactory() - self.sender = factories.UserFactory() - self.context_info = { - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - 'document_type': 'preprint', - 'referrer': self.sender, - 'provider_url': self.provider.landing_url, - } - self.action = factories.ReviewActionFactory() - factories.NotificationSubscriptionLegacyFactory( - _id=self.user._id + '_' + 'global_comments', - user=self.user, - event_name='global_comments' - ).add_user_to_subscription(self.user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=self.user._id + '_' + 'global_file_updated', - user=self.user, - event_name='global_file_updated' - ).add_user_to_subscription(self.user, 'email_transactional') - - factories.NotificationSubscriptionLegacyFactory( - _id=self.user._id + '_' + 'global_reviews', - user=self.user, - event_name='global_reviews' - ).add_user_to_subscription(self.user, 'email_transactional') - - from conftest import start_mock_send_grid - self.mock_send_grid = start_mock_send_grid(self) - - def test_reviews_base_notification(self): - contributor_subscriptions = list(utils.get_all_user_subscriptions(self.user)) - event_types = [sub.event_name for sub in contributor_subscriptions] - assert 'global_reviews' in event_types - - def test_reviews_submit_notification(self): - listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user]) - assert self.mock_send_grid.called - - @mock.patch('website.notifications.emails.notify_global_event') - def test_reviews_notification(self, mock_notify): - listeners.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako') - assert mock_notify.called - - -class QuerySetMatcher: - def __init__(self, some_obj): - self.some_obj = some_obj - - def __eq__(self, other): - return list(self.some_obj) == list(other) - - -class TestNotificationsReviewsModerator(OsfTestCase): - - def setUp(self): - super().setUp() - self.provider = factories.PreprintProviderFactory(_id='engrxiv') - self.preprint = factories.PreprintFactory(provider=self.provider) - self.submitter = factories.UserFactory() - self.moderator_transacitonal = factories.UserFactory() - self.moderator_digest= factories.UserFactory() - - self.context_info_submission = { - 'referrer': self.submitter, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - - self.context_info_request = { - 'requester': self.submitter, - 'domain': 'osf.io', - 'reviewable': self.preprint, - 'workflow': 'pre-moderation', - 'provider_contact_email': settings.OSF_CONTACT_EMAIL, - 'provider_support_email': settings.OSF_SUPPORT_EMAIL, - } - - self.action = factories.ReviewActionFactory() - self.subscription = NotificationSubscriptionLegacy.load(self.provider._id+'_new_pending_submissions') - self.subscription.add_user_to_subscription(self.moderator_transacitonal, 'email_transactional') - self.subscription.add_user_to_subscription(self.moderator_digest, 'email_digest') - - @mock.patch('website.notifications.emails.store_emails') - def test_reviews_submit_notification(self, mock_store): - time_now = timezone.now() - - preprint = self.context_info_submission['reviewable'] - provider = preprint.provider - - self.context_info_submission['message'] = f'submitted {preprint.title}.' - self.context_info_submission['profile_image_url'] = get_profile_image_url(self.context_info_submission['referrer']) - self.context_info_submission['reviews_submission_url'] = f'{settings.DOMAIN}reviews/preprints/{provider._id}/{preprint._id}' - listeners.reviews_submit_notification_moderators(self, time_now, self.context_info_submission) - subscription = NotificationSubscriptionLegacy.load(self.provider._id + '_new_pending_submissions') - digest_subscriber_ids = list(subscription.email_digest.all().values_list('guids___id', flat=True)) - instant_subscriber_ids = list(subscription.email_transactional.all().values_list('guids___id', flat=True)) - - mock_store.assert_any_call( - digest_subscriber_ids, - 'email_digest', - 'new_pending_submissions', - self.context_info_submission['referrer'], - self.context_info_submission['reviewable'], - time_now, - abstract_provider=self.context_info_submission['reviewable'].provider, - **self.context_info_submission - ) - - mock_store.assert_any_call( - instant_subscriber_ids, - 'email_transactional', - 'new_pending_submissions', - self.context_info_submission['referrer'], - self.context_info_submission['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_submission - ) - - @mock.patch('website.notifications.emails.store_emails') - def test_reviews_request_notification(self, mock_store): - time_now = timezone.now() - self.context_info_request['message'] = 'has requested withdrawal of {} "{}".'.format(self.context_info_request['reviewable'].provider.preprint_word, - self.context_info_request['reviewable'].title) - self.context_info_request['profile_image_url'] = get_profile_image_url(self.context_info_request['requester']) - self.context_info_request['reviews_submission_url'] = '{}reviews/preprints/{}/{}'.format(settings.DOMAIN, - self.context_info_request[ - 'reviewable'].provider._id, - self.context_info_request[ - 'reviewable']._id) - listeners.reviews_withdrawal_requests_notification(self, time_now, self.context_info_request) - subscription = NotificationSubscriptionLegacy.load(self.provider._id + '_new_pending_submissions') - digest_subscriber_ids = subscription.email_digest.all().values_list('guids___id', flat=True) - instant_subscriber_ids = subscription.email_transactional.all().values_list('guids___id', flat=True) - mock_store.assert_any_call(QuerySetMatcher(digest_subscriber_ids), - 'email_digest', - 'new_pending_submissions', - self.context_info_request['requester'], - self.context_info_request['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_request) - - mock_store.assert_any_call(QuerySetMatcher(instant_subscriber_ids), - 'email_transactional', - 'new_pending_submissions', - self.context_info_request['requester'], - self.context_info_request['reviewable'], - time_now, - abstract_provider=self.context_info_request['reviewable'].provider, - **self.context_info_request) diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 8403a9d63c9..bb801340423 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -1,102 +1,31 @@ #!/usr/bin/env python3 """Views tests for the OSF.""" -from unittest.mock import MagicMock, ANY -from urllib import parse - -import datetime as dt -import time -import unittest from hashlib import md5 -from http.cookies import SimpleCookie from unittest import mock -from urllib.parse import quote_plus import pytest -from django.core.exceptions import ValidationError -from django.utils import timezone -from flask import request, g -from lxml import html -from pytest import approx from rest_framework import status as http_status from addons.github.tests.factories import GitHubAccountFactory -from addons.osfstorage import settings as osfstorage_settings -from addons.wiki.models import WikiPage -from framework import auth -from framework.auth import Auth, authenticate, cas, core -from framework.auth.campaigns import ( - get_campaigns, - is_institution_login, - is_native_login, - is_proxy_login, - campaign_url_for -) -from framework.auth.exceptions import InvalidTokenError -from framework.auth.utils import impute_names_model, ensure_external_identity_uniqueness -from framework.auth.views import login_and_register_handler from framework.celery_tasks import handlers -from framework.exceptions import HTTPError, TemplateHTTPError -from framework.flask import redirect -from framework.transactions.handlers import no_auto_transaction from osf.external.spam import tasks as spam_tasks from osf.models import ( - Comment, - AbstractNode, - OSFUser, - Tag, - SpamStatus, - NodeRelation, NotableDomain ) -from osf.utils import permissions from osf_tests.factories import ( fake_email, ApiOAuth2ApplicationFactory, ApiOAuth2PersonalTokenFactory, AuthUserFactory, - CollectionFactory, - CommentFactory, - NodeFactory, - PreprintFactory, - PreprintProviderFactory, - PrivateLinkFactory, - ProjectFactory, - ProjectWithAddonFactory, - RegistrationProviderFactory, - UserFactory, - UnconfirmedUserFactory, - UnregUserFactory, RegionFactory, - DraftRegistrationFactory, ) from tests.base import ( - assert_is_redirect, - capture_signals, fake, - get_default_metaschema, OsfTestCase, - assert_datetime_equal, - test_app -) -from tests.test_cas_authentication import generate_external_user_with_resp -from tests.utils import run_celery_tasks -from website import mailchimp_utils, mails, settings, language -from website.profile.utils import add_contributor_json, serialize_unregistered -from website.profile.views import update_osf_help_mails_subscription -from website.project.decorators import check_can_access -from website.project.model import has_anonymous_link -from website.project.signals import contributor_added -from website.project.views.contributor import ( - deserialize_contributors, - notify_added_contributor, - send_claim_email, - send_claim_registered_email, ) -from website.project.views.node import _should_show_wiki_widget, abbrev_authors +from website import mailchimp_utils from website.settings import MAILCHIMP_GENERAL_LIST from website.util import api_url_for, web_url_for -from website.util import rubeus -from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag from conftest import start_mock_send_grid From 85e134277de100240bbee375b7b5040523e94d84 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 7 Jul 2025 10:31:59 -0400 Subject: [PATCH 049/176] split notification models into 3 files and improve interval choices --- api/subscriptions/permissions.py | 2 +- api/subscriptions/views.py | 3 +- ...tificationsubscription_options_and_more.py | 132 +++++++ .../0032_new_notifications_model.py | 104 ------ osf/models/__init__.py | 9 +- osf/models/notification.py | 330 +----------------- osf/models/notification_subscription.py | 102 ++++++ osf/models/notification_type.py | 247 +++++++++++++ 8 files changed, 489 insertions(+), 440 deletions(-) create mode 100644 osf/migrations/0032_alter_notificationsubscription_options_and_more.py delete mode 100644 osf/migrations/0032_new_notifications_model.py create mode 100644 osf/models/notification_subscription.py create mode 100644 osf/models/notification_type.py diff --git a/api/subscriptions/permissions.py b/api/subscriptions/permissions.py index a07eae6e81d..b22831f2766 100644 --- a/api/subscriptions/permissions.py +++ b/api/subscriptions/permissions.py @@ -1,6 +1,6 @@ from rest_framework import permissions -from osf.models.notification import NotificationSubscription +from osf.models.notification_subscription import NotificationSubscription class IsSubscriptionOwner(permissions.BasePermission): diff --git a/api/subscriptions/views.py b/api/subscriptions/views.py index 8932b03ea67..57a4dbf36c7 100644 --- a/api/subscriptions/views.py +++ b/api/subscriptions/views.py @@ -24,7 +24,8 @@ RegistrationProvider, AbstractProvider, AbstractNode, Preprint, OSFUser, ) -from osf.models.notification import NotificationSubscription, NotificationType +from osf.models.notification_type import NotificationType +from osf.models.notification_subscription import NotificationSubscription class SubscriptionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): diff --git a/osf/migrations/0032_alter_notificationsubscription_options_and_more.py b/osf/migrations/0032_alter_notificationsubscription_options_and_more.py new file mode 100644 index 00000000000..faa9ebdca19 --- /dev/null +++ b/osf/migrations/0032_alter_notificationsubscription_options_and_more.py @@ -0,0 +1,132 @@ +# Generated by Django 4.2.13 on 2025-07-07 14:24 + +from django.conf import settings +import django.contrib.postgres.fields +from django.db import migrations, models +import django.db.models.deletion +import django_extensions.db.fields +import osf.models.base +import osf.models.notification_type + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('osf', '0031_alter_osfgroupgroupobjectpermission_unique_together_and_more'), + ] + + operations = [ + migrations.AlterModelOptions( + name='notificationsubscription', + options={'verbose_name': 'Notification Subscription', 'verbose_name_plural': 'Notification Subscriptions'}, + ), + migrations.AlterUniqueTogether( + name='notificationsubscription', + unique_together=set(), + ), + migrations.AddField( + model_name='notificationsubscription', + name='content_type', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype'), + ), + migrations.AddField( + model_name='notificationsubscription', + name='message_frequency', + field=models.CharField(max_length=500, null=True), + ), + migrations.AddField( + model_name='notificationsubscription', + name='object_id', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AlterField( + model_name='notificationsubscription', + name='user', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to=settings.AUTH_USER_MODEL), + ), + migrations.CreateModel( + name='NotificationType', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('notification_interval_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=32), blank=True, default=osf.models.notification_type.get_default_frequency_choices, size=None)), + ('name', models.CharField(max_length=255, unique=True)), + ('template', models.TextField(help_text='Template used to render the event_info. Supports Django template syntax.')), + ('subject', models.TextField(blank=True, help_text='Template used to render the subject line of email. Supports Django template syntax.', null=True)), + ('object_content_type', models.ForeignKey(blank=True, help_text='Content type for subscribed objects. Null means global event.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='contenttypes.contenttype')), + ], + options={ + 'verbose_name': 'Notification Type', + 'verbose_name_plural': 'Notification Types', + }, + ), + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('event_context', models.JSONField()), + ('sent', models.DateTimeField(blank=True, null=True)), + ('seen', models.DateTimeField(blank=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True)), + ('subscription', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='osf.notificationsubscription')), + ], + options={ + 'verbose_name': 'Notification', + 'verbose_name_plural': 'Notifications', + }, + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='_id', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='email_digest', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='email_transactional', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='event_name', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='node', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='none', + ), + migrations.RemoveField( + model_name='notificationsubscription', + name='provider', + ), + migrations.AddField( + model_name='notificationsubscription', + name='notification_type', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='osf.notificationtype'), + ), + migrations.CreateModel( + name='NotificationSubscriptionLegacy', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('_id', models.CharField(db_index=True, max_length=100)), + ('event_name', models.CharField(max_length=100)), + ('email_digest', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), + ('email_transactional', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), + ('node', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.node')), + ('none', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), + ('provider', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.abstractprovider')), + ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'db_table': 'osf_notificationsubscription_legacy', + 'unique_together': {('_id', 'provider')}, + }, + bases=(models.Model, osf.models.base.QuerySetExplainMixin), + ), + ] diff --git a/osf/migrations/0032_new_notifications_model.py b/osf/migrations/0032_new_notifications_model.py deleted file mode 100644 index 97b707dabb8..00000000000 --- a/osf/migrations/0032_new_notifications_model.py +++ /dev/null @@ -1,104 +0,0 @@ -import osf -from django.db import migrations, models -from django.conf import settings -import django_extensions.db.fields -import django.db.models - - -class Migration(migrations.Migration): - - dependencies = [ - ('osf', '0031_alter_osfgroupgroupobjectpermission_unique_together_and_more'), - ] - - operations = [ - migrations.RunSQL( - """ - DO $$ - DECLARE - idx record; - BEGIN - FOR idx IN - SELECT indexname - FROM pg_indexes - WHERE tablename = 'osf_notificationsubscription' - LOOP - EXECUTE format( - 'ALTER INDEX %I RENAME TO %I', - idx.indexname, - replace(idx.indexname, 'osf_notificationsubscription', 'osf_notificationsubscription_legacy') - ); - END LOOP; - END$$; - """ - ), - migrations.AlterModelTable( - name='NotificationSubscription', - table='osf_notificationsubscription_legacy', - ), - - migrations.RenameModel( - old_name='NotificationSubscription', - new_name='NotificationSubscriptionLegacy', - ), - migrations.CreateModel( - name='NotificationType', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, unique=True)), - ('notification_freq', models.CharField( - choices=[('none', 'None'), ('instantly', 'Instantly'), ('daily', 'Daily'), ('weekly', 'Weekly'), - ('monthly', 'Monthly')], default='instantly', max_length=32)), - ('template', models.TextField( - help_text='Template used to render the event_info. Supports Django template syntax.')), - ('object_content_type', models.ForeignKey(blank=True, - help_text='Content type for subscribed objects. Null means global event.', - null=True, on_delete=django.db.models.deletion.SET_NULL, - to='contenttypes.contenttype')), - ], - options={ - 'verbose_name': 'Notification Type', - 'verbose_name_plural': 'Notification Types', - }, - ), - migrations.CreateModel( - name='NotificationSubscription', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created', - django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), - ('modified', - django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), - ('message_frequency', models.CharField(max_length=32)), - ('object_id', models.CharField(blank=True, max_length=255, null=True)), - ('content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, - to='contenttypes.contenttype')), - ('notification_type', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.notificationtype')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', - to=settings.AUTH_USER_MODEL)), - ], - options={ - 'verbose_name': 'Notification Subscription', - 'verbose_name_plural': 'Notification Subscriptions', - }, - bases=(models.Model, osf.models.base.QuerySetExplainMixin), - ), - migrations.CreateModel( - name='Notification', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('event_context', models.JSONField()), - ('sent', models.DateTimeField(blank=True, null=True)), - ('seen', models.DateTimeField(blank=True, null=True)), - ('created', models.DateTimeField(auto_now_add=True)), - ('subscription', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', - to='osf.notificationsubscription')), - ], - options={ - 'verbose_name': 'Notification', - 'verbose_name_plural': 'Notifications', - }, - ) - ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 5f3c3696cdb..d09e350adfe 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -63,11 +63,10 @@ from .nodelog import NodeLog from .notable_domain import NotableDomain, DomainReference from .notifications import NotificationDigest, NotificationSubscriptionLegacy -from .notification import ( - NotificationSubscription, - Notification, - NotificationType -) +from .notification_subscription import NotificationSubscription +from .notification_type import NotificationType +from .notification import Notification + from .oauth import ( ApiOAuth2Application, ApiOAuth2PersonalToken, diff --git a/osf/models/notification.py b/osf/models/notification.py index 7f05742cb88..14fc4fd3155 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -1,341 +1,13 @@ import logging from django.db import models -from django.contrib.contenttypes.fields import GenericForeignKey -from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import ValidationError -from django.template import Template, TemplateSyntaxError -from .base import BaseModel -from enum import Enum from website import settings from api.base import settings as api_settings from osf import email - -class FrequencyChoices(Enum): - NONE = 'none' - INSTANTLY = 'instantly' - DAILY = 'daily' - WEEKLY = 'weekly' - MONTHLY = 'monthly' - - @classmethod - def choices(cls): - return [(key.value, key.name.capitalize()) for key in cls] - - -class NotificationType(models.Model): - class Type(str, Enum): - # Desk notifications - DESK_REQUEST_EXPORT = 'desk_request_export' - DESK_REQUEST_DEACTIVATION = 'desk_request_deactivation' - DESK_OSF_SUPPORT_EMAIL = 'desk_osf_support_email' - DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER = 'desk_registration_bulk_upload_product_owner' - DESK_USER_REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE = 'desk_user_registration_bulk_upload_unexpected_failure' - DESK_ARCHIVE_JOB_EXCEEDED = 'desk_archive_job_exceeded' - DESK_ARCHIVE_JOB_COPY_ERROR = 'desk_archive_job_copy_error' - DESK_ARCHIVE_JOB_FILE_NOT_FOUND = 'desk_archive_job_file_not_found' - DESK_ARCHIVE_JOB_UNCAUGHT_ERROR = 'desk_archive_job_uncaught_error' - - # User notifications - USER_PENDING_VERIFICATION = 'user_pending_verification' - USER_PENDING_VERIFICATION_REGISTERED = 'user_pending_verification_registered' - USER_STORAGE_CAP_EXCEEDED_ANNOUNCEMENT = 'user_storage_cap_exceeded_announcement' - USER_SPAM_BANNED = 'user_spam_banned' - USER_REQUEST_DEACTIVATION_COMPLETE = 'user_request_deactivation_complete' - USER_PRIMARY_EMAIL_CHANGED = 'user_primary_email_changed' - USER_INSTITUTION_DEACTIVATION = 'user_institution_deactivation' - USER_FORGOT_PASSWORD = 'user_forgot_password' - USER_FORGOT_PASSWORD_INSTITUTION = 'user_forgot_password_institution' - USER_REQUEST_EXPORT = 'user_request_export' - USER_CONTRIBUTOR_ADDED_OSF_PREPRINT = 'user_contributor_added_osf_preprint' - USER_CONTRIBUTOR_ADDED_DEFAULT = 'user_contributor_added_default' - USER_DUPLICATE_ACCOUNTS_OSF4I = 'user_duplicate_accounts_osf4i' - USER_EXTERNAL_LOGIN_LINK_SUCCESS = 'user_external_login_link_success' - USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL = 'user_registration_bulk_upload_failure_all' - USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL = 'user_registration_bulk_upload_success_partial' - USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL = 'user_registration_bulk_upload_success_all' - USER_ADD_SSO_EMAIL_OSF4I = 'user_add_sso_email_osf4i' - USER_WELCOME_OSF4I = 'user_welcome_osf4i' - USER_ARCHIVE_JOB_EXCEEDED = 'user_archive_job_exceeded' - USER_ARCHIVE_JOB_COPY_ERROR = 'user_archive_job_copy_error' - USER_ARCHIVE_JOB_FILE_NOT_FOUND = 'user_archive_job_file_not_found' - USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' - USER_COMMENT_REPLIES = 'user_comment_replies' - USER_COMMENTS = 'user_comments' - USER_FILE_UPDATED = 'user_file_updated' - USER_COMMENT_MENTIONS = 'user_mentions' - USER_REVIEWS = 'user_reviews' - USER_PASSWORD_RESET = 'user_password_reset' - USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = 'user_contributor_added_draft_registration' - USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = 'user_external_login_confirm_email_create' - USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_confirm_email_link' - USER_CONFIRM_MERGE = 'user_confirm_merge' - USER_CONFIRM_EMAIL = 'user_confirm_email' - USER_INITIAL_CONFIRM_EMAIL = 'user_initial_confirm_email' - USER_INVITE_DEFAULT = 'user_invite_default' - USER_PENDING_INVITE = 'user_pending_invite' - USER_FORWARD_INVITE = 'user_forward_invite' - USER_FORWARD_INVITE_REGISTERED = 'user_forward_invite_registered' - USER_INVITE_DRAFT_REGISTRATION = 'user_invite_draft_registration' - USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' - - # Node notifications - NODE_COMMENT = 'node_comments' - NODE_FILES_UPDATED = 'node_files_updated' - NODE_AFFILIATION_CHANGED = 'node_affiliation_changed' - NODE_REQUEST_ACCESS_SUBMITTED = 'node_access_request_submitted' - NODE_REQUEST_ACCESS_DENIED = 'node_request_access_denied' - NODE_FORK_COMPLETED = 'node_fork_completed' - NODE_FORK_FAILED = 'node_fork_failed' - NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = 'node_request_institutional_access_request' - NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'node_contributor_added_access_request' - NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' - NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' - NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' - NODE_PENDING_RETRACTION_ADMIN = 'node_pending_retraction_admin' - NODE_PENDING_REGISTRATION_NON_ADMIN = 'node_pending_registration_non_admin' - NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' - NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' - NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' - - # Provider notifications - PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' - PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION = 'provider_reviews_submission_confirmation' - PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION = 'provider_reviews_moderator_submission_confirmation' - PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED = 'preprint_request_withdrawal_requested' - PROVIDER_REVIEWS_REJECT_CONFIRMATION = 'provider_reviews_reject_confirmation' - PROVIDER_REVIEWS_ACCEPT_CONFIRMATION = 'provider_reviews_accept_confirmation' - PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION = 'provider_reviews_resubmission_confirmation' - PROVIDER_REVIEWS_COMMENT_EDITED = 'provider_reviews_comment_edited' - PROVIDER_CONTRIBUTOR_ADDED_PREPRINT = 'provider_contributor_added_preprint' - PROVIDER_CONFIRM_EMAIL_MODERATION = 'provider_confirm_email_moderation' - PROVIDER_MODERATOR_ADDED = 'provider_moderator_added' - PROVIDER_CONFIRM_EMAIL_PREPRINTS = 'provider_confirm_email_preprints' - PROVIDER_USER_INVITE_PREPRINT = 'provider_user_invite_preprint' - - # Preprint notifications - PREPRINT_REQUEST_WITHDRAWAL_APPROVED = 'preprint_request_withdrawal_approved' - PREPRINT_REQUEST_WITHDRAWAL_DECLINED = 'preprint_request_withdrawal_declined' - PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' - - # Collections Submission notifications - COLLECTION_SUBMISSION_REMOVED_ADMIN = 'collection_submission_removed_admin' - COLLECTION_SUBMISSION_REMOVED_MODERATOR = 'collection_submission_removed_moderator' - COLLECTION_SUBMISSION_REMOVED_PRIVATE = 'collection_submission_removed_private' - COLLECTION_SUBMISSION_SUBMITTED = 'collection_submission_submitted' - COLLECTION_SUBMISSION_ACCEPTED = 'collection_submission_accepted' - COLLECTION_SUBMISSION_REJECTED = 'collection_submission_rejected' - COLLECTION_SUBMISSION_CANCEL = 'collection_submission_cancel' - - # Schema Response notifications - SCHEMA_RESPONSE_REJECTED = 'schema_response_rejected' - SCHEMA_RESPONSE_APPROVED = 'schema_response_approved' - SCHEMA_RESPONSE_SUBMITTED = 'schema_response_submitted' - SCHEMA_RESPONSE_INITIATED = 'schema_response_initiated' - - REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' - - @property - def instance(self): - obj, created = NotificationType.objects.get_or_create(name=self.value) - return obj - - @classmethod - def user_types(cls): - return [member for member in cls if member.name.startswith('USER_')] - - @classmethod - def node_types(cls): - return [member for member in cls if member.name.startswith('NODE_')] - - @classmethod - def preprint_types(cls): - return [member for member in cls if member.name.startswith('PREPRINT_')] - - @classmethod - def provider_types(cls): - return [member for member in cls if member.name.startswith('PROVIDER_')] - - @classmethod - def schema_response_types(cls): - return [member for member in cls if member.name.startswith('SCHEMA_RESPONSE_')] - - @classmethod - def desk_types(cls): - return [member for member in cls if member.name.startswith('DESK_')] - - name: str = models.CharField(max_length=255, unique=True) - notification_freq: str = models.CharField( - max_length=32, - choices=FrequencyChoices.choices(), - default=FrequencyChoices.INSTANTLY.value, - ) - - object_content_type = models.ForeignKey( - ContentType, - on_delete=models.SET_NULL, - null=True, - blank=True, - help_text='Content type for subscribed objects. Null means global event.' - ) - - template: str = models.TextField( - help_text='Template used to render the event_info. Supports Django template syntax.' - ) - - def clean(self): - try: - Template(self.template) - except TemplateSyntaxError as exc: - raise ValidationError({'template': f'Invalid template: {exc}'}) - - def emit(self, user, subscribed_object=None, event_context=None): - """Emit a notification to a user by creating Notification and NotificationSubscription objects. - - Args: - user (OSFUser): The recipient of the notification. - subscribed_object (optional): The object the subscription is related to. - event_context (dict, optional): Context for rendering the notification template. - """ - subscription, created = NotificationSubscription.objects.get_or_create( - notification_type=self, - user=user, - content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, - object_id=subscribed_object.pk if subscribed_object else None, - defaults={'message_frequency': self.notification_freq}, - ) - if subscription.message_frequency == 'instantly': - Notification.objects.create( - subscription=subscription, - event_context=event_context - ).send() - - def add_user_to_subscription(self, user, *args, **kwargs): - """ - """ - provider = kwargs.pop('provider', None) - node = kwargs.pop('node', None) - data = {} - if subscribed_object := provider or node: - data = { - 'object_id': subscribed_object.id, - 'content_type_id': ContentType.objects.get_for_model(subscribed_object).id, - } - - notification, created = NotificationSubscription.objects.get_or_create( - user=user, - notification_type=self, - **data, - ) - return notification - - def remove_user_from_subscription(self, user): - """ - """ - notification, _ = NotificationSubscription.objects.update_or_create( - user=user, - notification_type=self, - defaults={'message_frequency': FrequencyChoices.NONE.value} - ) - - def __str__(self) -> str: - return self.name - - class Meta: - verbose_name = 'Notification Type' - verbose_name_plural = 'Notification Types' - - -class NotificationSubscription(BaseModel): - notification_type: NotificationType = models.ForeignKey( - NotificationType, - on_delete=models.CASCADE, - null=False - ) - user = models.ForeignKey('osf.OSFUser', on_delete=models.CASCADE, related_name='subscriptions') - message_frequency: str = models.CharField(max_length=32) - - content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE) - object_id = models.CharField(max_length=255, null=True, blank=True) - subscribed_object = GenericForeignKey('content_type', 'object_id') - - def clean(self): - ct = self.notification_type.object_content_type - - if ct: - if self.content_type != ct: - raise ValidationError('Subscribed object must match type\'s content_type.') - if not self.object_id: - raise ValidationError('Subscribed object ID is required.') - else: - if self.content_type or self.object_id: - raise ValidationError('Global subscriptions must not have an object.') - - if self.message_frequency not in self.notification_type.notification_freq: - raise ValidationError(f'{self.message_frequency!r} is not allowed for {self.notification_type.name!r}.') - - def __str__(self) -> str: - return f'<{self.user} via {self.subscribed_object} subscribes to {self.notification_type.name} ({self.message_frequency})>' - - class Meta: - verbose_name = 'Notification Subscription' - verbose_name_plural = 'Notification Subscriptions' - - def emit(self, user, subscribed_object=None, event_context=None): - """Emit a notification to a user by creating Notification and NotificationSubscription objects. - - Args: - user (OSFUser): The recipient of the notification. - subscribed_object (optional): The object the subscription is related to. - event_context (dict, optional): Context for rendering the notification template. - """ - if self.message_frequency == 'instantly': - Notification.objects.create( - subscription=self, - event_context=event_context - ).send() - else: - Notification.objects.create( - subscription=self, - event_context=event_context - ) - - @property - def absolute_api_v2_url(self): - from api.base.utils import absolute_reverse - return absolute_reverse('institutions:institution-detail', kwargs={'institution_id': self._id, 'version': 'v2'}) - - from django.contrib.contenttypes.models import ContentType - - @property - def _id(self): - """ - Legacy subscription id for API compatibility. - Provider: _ - User/global: _global_ - Node/etc: _ - """ - # Safety checks - event = self.notification_type.name - ct = self.notification_type.object_content_type - match getattr(ct, 'model', None): - case 'preprintprovider' | 'collectionprovider' | 'registrationprovider': - # Providers: use subscribed_object._id (which is the provider short name, e.g. 'mindrxiv') - return f'{self.subscribed_object._id}_new_pending_submissions' - case 'node' | 'collection' | 'preprint': - # Node-like objects: use object_id (guid) - return f'{self.subscribed_object._id}_{event}' - case 'osfuser' | 'user' | None: - # Global: _global - return f'{self.user._id}_global' - case _: - raise NotImplementedError() - - class Notification(models.Model): subscription = models.ForeignKey( - NotificationSubscription, + 'NotificationSubscription', on_delete=models.CASCADE, related_name='notifications' ) diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py new file mode 100644 index 00000000000..b2ecb3c0b99 --- /dev/null +++ b/osf/models/notification_subscription.py @@ -0,0 +1,102 @@ +from django.db import models +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError + +from .base import BaseModel + + +class NotificationSubscription(BaseModel): + notification_type = models.ForeignKey( + 'NotificationType', + on_delete=models.CASCADE, + null=True + ) + user = models.ForeignKey( + 'osf.OSFUser', + null=True, + on_delete=models.CASCADE, + related_name='subscriptions' + ) + message_frequency: str = models.CharField( + max_length=500, + null=True + ) + + content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE) + object_id = models.CharField(max_length=255, null=True, blank=True) + subscribed_object = GenericForeignKey('content_type', 'object_id') + + def clean(self): + ct = self.notification_type.object_content_type + + if ct: + if self.content_type != ct: + raise ValidationError('Subscribed object must match type\'s content_type.') + if not self.object_id: + raise ValidationError('Subscribed object ID is required.') + else: + if self.content_type or self.object_id: + raise ValidationError('Global subscriptions must not have an object.') + from . import NotificationType + + allowed_freqs = self.notification_type.notification_interval_choices or NotificationType.DEFAULT_FREQUENCY_CHOICES + if self.message_frequency not in allowed_freqs: + raise ValidationError(f'{self.message_frequency!r} is not allowed for {self.notification_type.name!r}.') + + def __str__(self) -> str: + return f'<{self.user} via {self.subscribed_object} subscribes to {self.notification_type.name} ({self.message_frequency})>' + + class Meta: + verbose_name = 'Notification Subscription' + verbose_name_plural = 'Notification Subscriptions' + + def emit(self, user, subscribed_object=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + from . import Notification + + if self.message_frequency == 'instantly': + Notification.objects.create( + subscription=self, + event_context=event_context + ).send() + else: + Notification.objects.create( + subscription=self, + event_context=event_context + ) + + @property + def absolute_api_v2_url(self): + from api.base.utils import absolute_reverse + return absolute_reverse('institutions:institution-detail', kwargs={'institution_id': self._id, 'version': 'v2'}) + + @property + def _id(self): + """ + Legacy subscription id for API compatibility. + Provider: _ + User/global: _global_ + Node/etc: _ + """ + # Safety checks + event = self.notification_type.name + ct = self.notification_type.object_content_type + match getattr(ct, 'model', None): + case 'preprintprovider' | 'collectionprovider' | 'registrationprovider': + # Providers: use subscribed_object._id (which is the provider short name, e.g. 'mindrxiv') + return f'{self.subscribed_object._id}_new_pending_submissions' + case 'node' | 'collection' | 'preprint': + # Node-like objects: use object_id (guid) + return f'{self.subscribed_object._id}_{event}' + case 'osfuser' | 'user' | None: + # Global: _global + return f'{self.user._id}_global' + case _: + raise NotImplementedError() diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py new file mode 100644 index 00000000000..c9b139b1fc1 --- /dev/null +++ b/osf/models/notification_type.py @@ -0,0 +1,247 @@ +from django.db import models +from django.contrib.postgres.fields import ArrayField +from django.contrib.contenttypes.models import ContentType + +from .notification_subscription import NotificationSubscription +from .notification import Notification +from enum import Enum + + +class FrequencyChoices(Enum): + NONE = 'none' + INSTANTLY = 'instantly' + DAILY = 'daily' + WEEKLY = 'weekly' + MONTHLY = 'monthly' + + @classmethod + def choices(cls): + return [(key.value, key.name.capitalize()) for key in cls] + +def get_default_frequency_choices(): + DEFAULT_FREQUENCY_CHOICES = ['none', 'instantly', 'daily', 'weekly', 'monthly'] + return DEFAULT_FREQUENCY_CHOICES.copy() + + +class NotificationType(models.Model): + + class Type(str, Enum): + # Desk notifications + DESK_REQUEST_EXPORT = 'desk_request_export' + DESK_REQUEST_DEACTIVATION = 'desk_request_deactivation' + DESK_OSF_SUPPORT_EMAIL = 'desk_osf_support_email' + DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER = 'desk_registration_bulk_upload_product_owner' + DESK_USER_REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE = 'desk_user_registration_bulk_upload_unexpected_failure' + DESK_ARCHIVE_JOB_EXCEEDED = 'desk_archive_job_exceeded' + DESK_ARCHIVE_JOB_COPY_ERROR = 'desk_archive_job_copy_error' + DESK_ARCHIVE_JOB_FILE_NOT_FOUND = 'desk_archive_job_file_not_found' + DESK_ARCHIVE_JOB_UNCAUGHT_ERROR = 'desk_archive_job_uncaught_error' + + # User notifications + USER_PENDING_VERIFICATION = 'user_pending_verification' + USER_PENDING_VERIFICATION_REGISTERED = 'user_pending_verification_registered' + USER_STORAGE_CAP_EXCEEDED_ANNOUNCEMENT = 'user_storage_cap_exceeded_announcement' + USER_SPAM_BANNED = 'user_spam_banned' + USER_REQUEST_DEACTIVATION_COMPLETE = 'user_request_deactivation_complete' + USER_PRIMARY_EMAIL_CHANGED = 'user_primary_email_changed' + USER_INSTITUTION_DEACTIVATION = 'user_institution_deactivation' + USER_FORGOT_PASSWORD = 'user_forgot_password' + USER_FORGOT_PASSWORD_INSTITUTION = 'user_forgot_password_institution' + USER_REQUEST_EXPORT = 'user_request_export' + USER_CONTRIBUTOR_ADDED_OSF_PREPRINT = 'user_contributor_added_osf_preprint' + USER_CONTRIBUTOR_ADDED_DEFAULT = 'user_contributor_added_default' + USER_DUPLICATE_ACCOUNTS_OSF4I = 'user_duplicate_accounts_osf4i' + USER_EXTERNAL_LOGIN_LINK_SUCCESS = 'user_external_login_link_success' + USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL = 'user_registration_bulk_upload_failure_all' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL = 'user_registration_bulk_upload_success_partial' + USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL = 'user_registration_bulk_upload_success_all' + USER_ADD_SSO_EMAIL_OSF4I = 'user_add_sso_email_osf4i' + USER_WELCOME_OSF4I = 'user_welcome_osf4i' + USER_ARCHIVE_JOB_EXCEEDED = 'user_archive_job_exceeded' + USER_ARCHIVE_JOB_COPY_ERROR = 'user_archive_job_copy_error' + USER_ARCHIVE_JOB_FILE_NOT_FOUND = 'user_archive_job_file_not_found' + USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' + USER_COMMENT_REPLIES = 'user_comment_replies' + USER_COMMENTS = 'user_comments' + USER_FILE_UPDATED = 'user_file_updated' + USER_COMMENT_MENTIONS = 'user_mentions' + USER_REVIEWS = 'user_reviews' + USER_PASSWORD_RESET = 'user_password_reset' + USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = 'user_contributor_added_draft_registration' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = 'user_external_login_confirm_email_create' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_confirm_email_link' + USER_CONFIRM_MERGE = 'user_confirm_merge' + USER_CONFIRM_EMAIL = 'user_confirm_email' + USER_INITIAL_CONFIRM_EMAIL = 'user_initial_confirm_email' + USER_INVITE_DEFAULT = 'user_invite_default' + USER_PENDING_INVITE = 'user_pending_invite' + USER_FORWARD_INVITE = 'user_forward_invite' + USER_FORWARD_INVITE_REGISTERED = 'user_forward_invite_registered' + USER_INVITE_DRAFT_REGISTRATION = 'user_invite_draft_registration' + USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' + + # Node notifications + NODE_COMMENT = 'node_comments' + NODE_FILES_UPDATED = 'node_files_updated' + NODE_AFFILIATION_CHANGED = 'node_affiliation_changed' + NODE_REQUEST_ACCESS_SUBMITTED = 'node_access_request_submitted' + NODE_REQUEST_ACCESS_DENIED = 'node_request_access_denied' + NODE_FORK_COMPLETED = 'node_fork_completed' + NODE_FORK_FAILED = 'node_fork_failed' + NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = 'node_request_institutional_access_request' + NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'node_contributor_added_access_request' + NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' + NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' + NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' + NODE_PENDING_RETRACTION_ADMIN = 'node_pending_retraction_admin' + NODE_PENDING_REGISTRATION_NON_ADMIN = 'node_pending_registration_non_admin' + NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' + NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' + NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' + + # Provider notifications + PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' + PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION = 'provider_reviews_submission_confirmation' + PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION = 'provider_reviews_moderator_submission_confirmation' + PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED = 'preprint_request_withdrawal_requested' + PROVIDER_REVIEWS_REJECT_CONFIRMATION = 'provider_reviews_reject_confirmation' + PROVIDER_REVIEWS_ACCEPT_CONFIRMATION = 'provider_reviews_accept_confirmation' + PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION = 'provider_reviews_resubmission_confirmation' + PROVIDER_REVIEWS_COMMENT_EDITED = 'provider_reviews_comment_edited' + PROVIDER_CONTRIBUTOR_ADDED_PREPRINT = 'provider_contributor_added_preprint' + PROVIDER_CONFIRM_EMAIL_MODERATION = 'provider_confirm_email_moderation' + PROVIDER_MODERATOR_ADDED = 'provider_moderator_added' + PROVIDER_CONFIRM_EMAIL_PREPRINTS = 'provider_confirm_email_preprints' + PROVIDER_USER_INVITE_PREPRINT = 'provider_user_invite_preprint' + + # Preprint notifications + PREPRINT_REQUEST_WITHDRAWAL_APPROVED = 'preprint_request_withdrawal_approved' + PREPRINT_REQUEST_WITHDRAWAL_DECLINED = 'preprint_request_withdrawal_declined' + PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' + + # Collections Submission notifications + COLLECTION_SUBMISSION_REMOVED_ADMIN = 'collection_submission_removed_admin' + COLLECTION_SUBMISSION_REMOVED_MODERATOR = 'collection_submission_removed_moderator' + COLLECTION_SUBMISSION_REMOVED_PRIVATE = 'collection_submission_removed_private' + COLLECTION_SUBMISSION_SUBMITTED = 'collection_submission_submitted' + COLLECTION_SUBMISSION_ACCEPTED = 'collection_submission_accepted' + COLLECTION_SUBMISSION_REJECTED = 'collection_submission_rejected' + COLLECTION_SUBMISSION_CANCEL = 'collection_submission_cancel' + + # Schema Response notifications + SCHEMA_RESPONSE_REJECTED = 'schema_response_rejected' + SCHEMA_RESPONSE_APPROVED = 'schema_response_approved' + SCHEMA_RESPONSE_SUBMITTED = 'schema_response_submitted' + SCHEMA_RESPONSE_INITIATED = 'schema_response_initiated' + + REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' + + @property + def instance(self): + obj, created = NotificationType.objects.get_or_create(name=self.value) + return obj + + @classmethod + def user_types(cls): + return [member for member in cls if member.name.startswith('USER_')] + + @classmethod + def node_types(cls): + return [member for member in cls if member.name.startswith('NODE_')] + + @classmethod + def preprint_types(cls): + return [member for member in cls if member.name.startswith('PREPRINT_')] + + @classmethod + def provider_types(cls): + return [member for member in cls if member.name.startswith('PROVIDER_')] + + @classmethod + def schema_response_types(cls): + return [member for member in cls if member.name.startswith('SCHEMA_RESPONSE_')] + + @classmethod + def desk_types(cls): + return [member for member in cls if member.name.startswith('DESK_')] + + notification_interval_choices = ArrayField( + base_field=models.CharField(max_length=32), + default=get_default_frequency_choices, + blank=True + ) + + name: str = models.CharField(max_length=255, unique=True) + + object_content_type = models.ForeignKey( + ContentType, + on_delete=models.SET_NULL, + null=True, + blank=True, + help_text='Content type for subscribed objects. Null means global event.' + ) + + template: str = models.TextField( + help_text='Template used to render the event_info. Supports Django template syntax.' + ) + subject: str = models.TextField( + blank=True, + null=True, + help_text='Template used to render the subject line of email. Supports Django template syntax.' + ) + + def emit(self, user, subscribed_object=None, event_context=None): + """Emit a notification to a user by creating Notification and NotificationSubscription objects. + + Args: + user (OSFUser): The recipient of the notification. + subscribed_object (optional): The object the subscription is related to. + event_context (dict, optional): Context for rendering the notification template. + """ + subscription, created = NotificationSubscription.objects.get_or_create( + notification_type=self, + user=user, + content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + object_id=subscribed_object.pk if subscribed_object else None, + defaults={'message_frequency': self.notification_freq}, + ) + if subscription.message_frequency == 'instantly': + Notification.objects.create( + subscription=subscription, + event_context=event_context + ).send() + + def add_user_to_subscription(self, user, *args, **kwargs): + """ + """ + provider = kwargs.pop('provider', None) + node = kwargs.pop('node', None) + data = {} + if subscribed_object := provider or node: + data = { + 'object_id': subscribed_object.id, + 'content_type_id': ContentType.objects.get_for_model(subscribed_object).id, + } + + notification, created = NotificationSubscription.objects.get_or_create( + user=user, + notification_type=self, + **data, + ) + return notification + + def remove_user_from_subscription(self, user): + """ + """ + notification, _ = NotificationSubscription.objects.update_or_create( + user=user, + notification_type=self, + defaults={'message_frequency': FrequencyChoices.NONE.value} + ) + + def __str__(self) -> str: + return self.name + + class Meta: + verbose_name = 'Notification Type' + verbose_name_plural = 'Notification Types' From a50008c8b84b82b635e58b2fd798264cdbb9c9fb Mon Sep 17 00:00:00 2001 From: Anton Krytskyi Date: Tue, 8 Jul 2025 17:35:30 +0300 Subject: [PATCH 050/176] add exception handling to /review_actions/ endpoint --- api/preprints/views.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/api/preprints/views.py b/api/preprints/views.py index cd5dba8ba8d..76f6e88849f 100644 --- a/api/preprints/views.py +++ b/api/preprints/views.py @@ -2,6 +2,7 @@ from packaging.version import Version from django.contrib.auth.models import AnonymousUser +from django.core.exceptions import ValidationError as DjangoValidationError from rest_framework import generics from rest_framework.exceptions import MethodNotAllowed, NotFound, PermissionDenied, NotAuthenticated, ValidationError from rest_framework import permissions as drf_permissions @@ -704,7 +705,10 @@ def perform_create(self, serializer): f'If you are an admin, set up moderation by setting `reviews_workflow` at {url}', ) - serializer.save(user=self.request.user) + try: + serializer.save(user=self.request.user) + except (ValueError, DjangoValidationError) as exc: + raise ValidationError(str(exc)) from exc # overrides ListFilterMixin def get_default_queryset(self): From f2e5309453d9284b6dd3d5fa9c00c0da6bcec317 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 8 Jul 2025 11:02:16 -0400 Subject: [PATCH 051/176] clean-up tests and pass frequency data properly --- api/subscriptions/serializers.py | 3 ++- .../subscriptions/views/test_subscriptions_detail.py | 3 +-- osf/models/notification_subscription.py | 7 +++---- osf/models/notification_type.py | 11 +++++++---- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index d37a8342564..ede0782ae65 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -37,7 +37,8 @@ def get_absolute_url(self, obj): def update(self, instance, validated_data): user = self.context['request'].user - frequency = validated_data.get('frequency') + frequency = validated_data.get('frequency') or 'none' + instance.message_frequency = frequency if frequency != 'none' and instance.content_type == ContentType.objects.get_for_model(Node): node = Node.objects.get( diff --git a/api_tests/subscriptions/views/test_subscriptions_detail.py b/api_tests/subscriptions/views/test_subscriptions_detail.py index 2d91e6b1083..0e2fa22b119 100644 --- a/api_tests/subscriptions/views/test_subscriptions_detail.py +++ b/api_tests/subscriptions/views/test_subscriptions_detail.py @@ -23,7 +23,6 @@ def notification(self, user): @pytest.fixture() def url(self, notification): - print('_id', notification._id) return f'/{API_BASE}subscriptions/{notification._id}/' @pytest.fixture() @@ -116,7 +115,7 @@ def test_subscription_detail_invalid_payload_400( expect_errors=True ) assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == '"invalid-frequency" is not a valid choice.' + assert res.json['errors'][0]['detail'] == ('"invalid-frequency" is not a valid choice.') def test_subscription_detail_patch_invalid_notification_id_no_user( self, app, user, user_no_auth, notification, url, url_invalid, payload, payload_invalid diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index b2ecb3c0b99..a1c9467b50e 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -2,6 +2,8 @@ from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError +from osf.models.notification_type import get_default_frequency_choices +from osf.models.notification import Notification from .base import BaseModel @@ -38,9 +40,8 @@ def clean(self): else: if self.content_type or self.object_id: raise ValidationError('Global subscriptions must not have an object.') - from . import NotificationType - allowed_freqs = self.notification_type.notification_interval_choices or NotificationType.DEFAULT_FREQUENCY_CHOICES + allowed_freqs = self.notification_type.notification_interval_choices or get_default_frequency_choices() if self.message_frequency not in allowed_freqs: raise ValidationError(f'{self.message_frequency!r} is not allowed for {self.notification_type.name!r}.') @@ -59,8 +60,6 @@ def emit(self, user, subscribed_object=None, event_context=None): subscribed_object (optional): The object the subscription is related to. event_context (dict, optional): Context for rendering the notification template. """ - from . import Notification - if self.message_frequency == 'instantly': Notification.objects.create( subscription=self, diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index c9b139b1fc1..eb41405a8c4 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -2,8 +2,7 @@ from django.contrib.postgres.fields import ArrayField from django.contrib.contenttypes.models import ContentType -from .notification_subscription import NotificationSubscription -from .notification import Notification +from osf.models.notification import Notification from enum import Enum @@ -190,7 +189,7 @@ def desk_types(cls): help_text='Template used to render the subject line of email. Supports Django template syntax.' ) - def emit(self, user, subscribed_object=None, event_context=None): + def emit(self, user, subscribed_object=None, message_frequency=None, event_context=None): """Emit a notification to a user by creating Notification and NotificationSubscription objects. Args: @@ -198,12 +197,13 @@ def emit(self, user, subscribed_object=None, event_context=None): subscribed_object (optional): The object the subscription is related to. event_context (dict, optional): Context for rendering the notification template. """ + from osf.models.notification_subscription import NotificationSubscription subscription, created = NotificationSubscription.objects.get_or_create( notification_type=self, user=user, content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, object_id=subscribed_object.pk if subscribed_object else None, - defaults={'message_frequency': self.notification_freq}, + defaults={'message_frequency': message_frequency}, ) if subscription.message_frequency == 'instantly': Notification.objects.create( @@ -214,6 +214,8 @@ def emit(self, user, subscribed_object=None, event_context=None): def add_user_to_subscription(self, user, *args, **kwargs): """ """ + from osf.models.notification_subscription import NotificationSubscription + provider = kwargs.pop('provider', None) node = kwargs.pop('node', None) data = {} @@ -233,6 +235,7 @@ def add_user_to_subscription(self, user, *args, **kwargs): def remove_user_from_subscription(self, user): """ """ + from osf.models.notification_subscription import NotificationSubscription notification, _ = NotificationSubscription.objects.update_or_create( user=user, notification_type=self, From 0471b76812e17d978459a00b62c7c210e5b91a30 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 8 Jul 2025 14:41:17 -0400 Subject: [PATCH 052/176] update management commands and tests for notification migration --- notifications.yaml | 124 ++++++++++++++++ .../commands/migrate_notifications.py | 115 +++++++++++++++ ...tificationsubscription_options_and_more.py | 2 +- osf/models/notification_type.py | 2 +- .../test_migrate_notifications.py | 132 ++++++++++++++++++ website/settings/defaults.py | 1 + 6 files changed, 374 insertions(+), 2 deletions(-) create mode 100644 notifications.yaml create mode 100644 osf/management/commands/migrate_notifications.py create mode 100644 osf_tests/management_commands/test_migrate_notifications.py diff --git a/notifications.yaml b/notifications.yaml new file mode 100644 index 00000000000..a86820be248 --- /dev/null +++ b/notifications.yaml @@ -0,0 +1,124 @@ +# This file contains the configuration for our notification system using the NotificationType object, this is intended to +# exist as a simple declarative list of NotificationTypes and their attributes. Every notification sent by OSF should be +# represented here for bussiness logic dnd metrics reasons. + +# Workflow: +# 1. Add a new notification template +# 2. Add a entry here with the desired notification types +# 3. Add name tp Enum osf.notification.NotificationType.Type +# 4. Use the emit method to send or subscribe the notification for immediate deliver or periodic digest. +notification_types: + #### GLOBAL (User Notifications) + - name: user_pending_verification_registered + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### PROVIDER + - name: new_pending_submissions + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: new_pending_withdraw_requests + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### NODE + - name: file_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: wiki_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_contributor_added_access_request + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: node_request_institutional_access_request + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + + #### PREPRINT + - name: pending_retraction_admin + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + + #### SUPPORT + - name: crossref_error + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### Collection Submissions + - name: collection_submission_removed_moderator + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_removed_private + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_removed_admin + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_submitted + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_cancel + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_accepted + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: collection_submission_rejected + __docs__: ... + object_content_type_model_name: collectionsubmission + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + #### DESK + - name: desk_archive_job_exceeded + __docs__: Archive job failed due to size exceeded. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_copy_error + __docs__: Archive job failed due to copy error. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_file_not_found + __docs__: Archive job failed because files were not found. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_archive_job_uncaught_error + __docs__: Archive job failed due to an uncaught error. Sent to support desk. + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly + - name: desk_osf_support_email + __docs__: ... + object_content_type_model_name: desk + template: 'website/templates/emails/new_pending_submissions.html.mako' + notification_freq_default: instantly diff --git a/osf/management/commands/migrate_notifications.py b/osf/management/commands/migrate_notifications.py new file mode 100644 index 00000000000..8b7c1fe2a5e --- /dev/null +++ b/osf/management/commands/migrate_notifications.py @@ -0,0 +1,115 @@ +import yaml +from django.apps import apps +from website import settings + +import logging +from django.contrib.contenttypes.models import ContentType +from osf.models import NotificationType, NotificationSubscription +from osf.models.notifications import NotificationSubscriptionLegacy +from django.core.management.base import BaseCommand +from django.db import transaction + +logger = logging.getLogger(__name__) + +FREQ_MAP = { + 'none': 'none', + 'email_digest': 'weekly', + 'email_transactional': 'instantly', +} + +def migrate_legacy_notification_subscriptions(*args, **kwargs): + """ + Migrate legacy NotificationSubscription data to new notifications app. + """ + logger.info('Beginning legacy notification subscription migration...') + + PROVIDER_BASED_LEGACY_NOTIFICATION_TYPES = [ + 'new_pending_submissions', 'new_pending_withdraw_requests' + ] + + for legacy in NotificationSubscriptionLegacy.objects.all(): + event_name = legacy.event_name + if event_name in PROVIDER_BASED_LEGACY_NOTIFICATION_TYPES: + subscribed_object = legacy.provider + elif subscribed_object := legacy.node: + pass + elif subscribed_object := legacy.user: + pass + else: + raise NotImplementedError(f'Invalid Notification id {event_name}') + content_type = ContentType.objects.get_for_model(subscribed_object.__class__) + subscription, _ = NotificationSubscription.objects.update_or_create( + notification_type=NotificationType.objects.get(name=event_name), + user=legacy.user, + content_type=content_type, + object_id=subscribed_object.id, + defaults={ + 'user': legacy.user, + 'message_frequency': ( + ('weekly' if legacy.email_digest.exists() else 'none'), + 'instantly' if legacy.email_transactional.exists() else 'none' + ), + 'content_type': content_type, + 'object_id': subscribed_object.id, + } + ) + logger.info(f'Created NotificationType "{event_name}" with content_type {content_type}') + + +def update_notification_types(*args, **kwargs): + + with open(settings.NOTIFICATION_TYPES_YAML) as stream: + notification_types = yaml.safe_load(stream) + for notification_type in notification_types['notification_types']: + notification_type.pop('__docs__') + object_content_type_model_name = notification_type.pop('object_content_type_model_name') + notification_freq = notification_type.pop('notification_freq_default') + + if object_content_type_model_name == 'desk': + content_type = None + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + elif object_content_type_model_name == 'preprint': + Preprint = apps.get_model('osf', 'Preprint') + content_type = ContentType.objects.get_for_model(Preprint) + elif object_content_type_model_name == 'collectionsubmission': + CollectionSubmission = apps.get_model('osf', 'CollectionSubmission') + content_type = ContentType.objects.get_for_model(CollectionSubmission) + elif object_content_type_model_name == 'abstractprovider': + AbstractProvider = apps.get_model('osf', 'abstractprovider') + content_type = ContentType.objects.get_for_model(AbstractProvider) + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + else: + try: + content_type = ContentType.objects.get( + app_label='osf', + model=object_content_type_model_name + ) + except ContentType.DoesNotExist: + raise ValueError(f'No content type for osf.{object_content_type_model_name}') + + with open(notification_type['template']) as stream: + template = stream.read() + + notification_types['template'] = template + notification_types['notification_freq'] = notification_freq + nt, _ = NotificationType.objects.update_or_create( + name=notification_type['name'], + defaults=notification_type, + ) + nt.object_content_type = content_type + nt.save() + + +class Command(BaseCommand): + help = 'Migrate legacy NotificationSubscriptionLegacy objects to new Notification app models.' + + def handle(self, *args, **options): + with transaction.atomic(): + update_notification_types(args, options) + + with transaction.atomic(): + migrate_legacy_notification_subscriptions(args, options) diff --git a/osf/migrations/0032_alter_notificationsubscription_options_and_more.py b/osf/migrations/0032_alter_notificationsubscription_options_and_more.py index faa9ebdca19..b4f273108d5 100644 --- a/osf/migrations/0032_alter_notificationsubscription_options_and_more.py +++ b/osf/migrations/0032_alter_notificationsubscription_options_and_more.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.13 on 2025-07-07 14:24 +# Generated by Django 4.2.13 on 2025-07-08 17:07 from django.conf import settings import django.contrib.postgres.fields diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index eb41405a8c4..9b36d20e93a 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -170,7 +170,7 @@ def desk_types(cls): blank=True ) - name: str = models.CharField(max_length=255, unique=True) + name: str = models.CharField(max_length=255, unique=True, null=False, blank=False) object_content_type = models.ForeignKey( ContentType, diff --git a/osf_tests/management_commands/test_migrate_notifications.py b/osf_tests/management_commands/test_migrate_notifications.py new file mode 100644 index 00000000000..f303ec3f996 --- /dev/null +++ b/osf_tests/management_commands/test_migrate_notifications.py @@ -0,0 +1,132 @@ +import pytest +from django.contrib.contenttypes.models import ContentType + +from osf.models import Node, RegistrationProvider +from osf_tests.factories import ( + AuthUserFactory, + PreprintProviderFactory, + ProjectFactory, +) +from osf.models import ( + NotificationType, + NotificationSubscription, + NotificationSubscriptionLegacy +) +from osf.management.commands.migrate_notifications import ( + migrate_legacy_notification_subscriptions, + update_notification_types +) + +@pytest.mark.django_db +class TestNotificationSubscriptionMigration: + + @pytest.fixture(autouse=True) + def notification_types(self): + return update_notification_types() + + @pytest.fixture() + def user(self): + return AuthUserFactory() + + @pytest.fixture() + def users(self): + return { + 'none': AuthUserFactory(), + 'digest': AuthUserFactory(), + 'transactional': AuthUserFactory(), + } + + @pytest.fixture() + def provider(self): + return PreprintProviderFactory() + + @pytest.fixture() + def provider2(self): + return PreprintProviderFactory() + + @pytest.fixture() + def node(self): + return ProjectFactory() + + def create_legacy_sub(self, event_name, users, user=None, provider=None, node=None): + legacy = NotificationSubscriptionLegacy.objects.create( + _id=f'{(provider or node)._id}_{event_name}', + user=user, + event_name=event_name, + provider=provider, + node=node + ) + legacy.none.add(users['none']) + legacy.email_digest.add(users['digest']) + legacy.email_transactional.add(users['transactional']) + return legacy + + def test_migrate_provider_subscription(self, user, provider, provider2): + NotificationSubscriptionLegacy.objects.get( + event_name='new_pending_submissions', + provider=provider + ) + NotificationSubscriptionLegacy.objects.get( + event_name='new_pending_submissions', + provider=provider2 + ) + NotificationSubscriptionLegacy.objects.get( + event_name='new_pending_submissions', + provider=RegistrationProvider.get_default() + ) + migrate_legacy_notification_subscriptions() + + subs = NotificationSubscription.objects.filter(notification_type__name='new_pending_submissions') + assert subs.count() == 3 + assert subs.get( + notification_type__name='new_pending_submissions', + object_id=provider.id, + content_type=ContentType.objects.get_for_model(provider.__class__) + ) + assert subs.get( + notification_type__name='new_pending_submissions', + object_id=provider2.id, + content_type=ContentType.objects.get_for_model(provider2.__class__) + ) + + def test_migrate_node_subscription(self, users, user, node): + self.create_legacy_sub('wiki_updated', users, user=user, node=node) + + migrate_legacy_notification_subscriptions() + + nt = NotificationType.objects.get(name='wiki_updated') + assert nt.object_content_type == ContentType.objects.get_for_model(Node) + + subs = NotificationSubscription.objects.filter(notification_type=nt) + assert subs.count() == 1 + + for sub in subs: + assert sub.subscribed_object == node + + def test_multiple_subscriptions_different_types(self, users, user, provider, node): + assert not NotificationSubscription.objects.filter(user=user) + self.create_legacy_sub('wiki_updated', users, user=user, node=node) + migrate_legacy_notification_subscriptions() + assert NotificationSubscription.objects.get(user=user).notification_type.name == 'wiki_updated' + assert NotificationSubscription.objects.get(notification_type__name='wiki_updated', user=user) + + def test_idempotent_migration(self, users, user, node, provider): + self.create_legacy_sub('file_updated', users, user=user, node=node) + migrate_legacy_notification_subscriptions() + migrate_legacy_notification_subscriptions() + assert NotificationSubscription.objects.get( + user=user, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node.__class__), + notification_type__name='file_updated' + ) + + def test_errors_invalid_subscription(self, users): + legacy = NotificationSubscriptionLegacy.objects.create( + _id='broken', + event_name='invalid_event' + ) + legacy.none.add(users['none']) + + with pytest.raises(NotImplementedError): + migrate_legacy_notification_subscriptions() diff --git a/website/settings/defaults.py b/website/settings/defaults.py index 66380d75fcc..64081235ec7 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -175,6 +175,7 @@ def parent_dir(path): MAILCHIMP_LIST_MAP = { MAILCHIMP_GENERAL_LIST: '123', } +NOTIFICATION_TYPES_YAML = 'notifications.yaml' #Triggered emails OSF_HELP_LIST = 'Open Science Framework Help' From 78e968c21173c5d05584bd0977888f109bb8f363 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Mon, 7 Jul 2025 13:52:30 +0300 Subject: [PATCH 053/176] Upgrade User Confirmation Registrations --- api/users/views.py | 46 ++++++++++++++++++++++------------------- framework/auth/views.py | 18 +++++++++------- 2 files changed, 35 insertions(+), 29 deletions(-) diff --git a/api/users/views.py b/api/users/views.py index 8dea51613df..0e866c7005e 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -99,6 +99,7 @@ OSFUser, Email, Tag, + NotificationType ) from osf.utils.tokens import TokenHandler from osf.utils.tokens.handlers import sanction_handler @@ -822,7 +823,7 @@ def get(self, request, *args, **kwargs): raise ValidationError('Request must include email in query params.') institutional = bool(request.query_params.get('institutional', None)) - mail_template = mails.FORGOT_PASSWORD if not institutional else mails.FORGOT_PASSWORD_INSTITUTION + mail_template = 'forgot_password' if not institutional else 'forgot_password_institution' status_message = language.RESET_PASSWORD_SUCCESS_STATUS_MESSAGE.format(email=email) kind = 'success' @@ -842,12 +843,15 @@ def get(self, request, *args, **kwargs): user_obj.email_last_sent = timezone.now() user_obj.save() reset_link = f'{settings.RESET_PASSWORD_URL}{user_obj._id}/{user_obj.verification_key_v2['token']}/' - mails.send_mail( - to_addr=email, - mail=mail_template, - reset_link=reset_link, - can_change_preferences=False, - ) + + notification_type = NotificationType.objects.filter(name=mail_template) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {mail_template} does not exist.' + ) + notification_type = notification_type.first() + notification_type.emit(user=user_obj, event_context={'can_change_preferences': False, 'reset_link': reset_link}) + return Response(status=status.HTTP_200_OK, data={'message': status_message, 'kind': kind, 'institutional': institutional}) @method_decorator(csrf_protect) @@ -1059,13 +1063,13 @@ def _process_external_identity(self, user, external_identity, service_url): if external_status == 'CREATE': service_url += '&' + urlencode({'new': 'true'}) elif external_status == 'LINK': - mails.send_mail( - user=user, - to_addr=user.username, - mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS, - external_id_provider=provider, - can_change_preferences=False, - ) + notification_type = NotificationType.objects.filter(name='external_confirm_success') + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + 'NotificationType with name external_confirm_success does not exist.' + ) + notification_type = notification_type.first() + notification_type.emit(user=user, event_context={'can_change_preferences': False, 'external_id_provider': provider}) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) @@ -1380,13 +1384,13 @@ def post(self, request, *args, **kwargs): if external_status == 'CREATE': service_url += '&{}'.format(urlencode({'new': 'true'})) elif external_status == 'LINK': - mails.send_mail( - user=user, - to_addr=user.username, - mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS, - external_id_provider=provider, - can_change_preferences=False, - ) + notification_type = NotificationType.objects.filter(name='external_confirm_success') + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + 'NotificationType with name external_confirm_success does not exist.' + ) + notification_type = notification_type.first() + notification_type.emit(user=user, event_context={'can_change_preferences': False, 'external_id_provider': provider}) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) diff --git a/framework/auth/views.py b/framework/auth/views.py index 26aa494ddd4..73e12121a29 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -33,6 +33,7 @@ from osf.exceptions import ValidationValueError, BlockedEmailError from osf.models.provider import PreprintProvider from osf.models.tag import Tag +from osf.models.notification import NotificationType from osf.utils.requests import check_select_for_update from website.util.metrics import CampaignClaimedTags, CampaignSourceTags from website.ember_osf_web.decorators import ember_flag_is_active @@ -207,14 +208,14 @@ def redirect_unsupported_institution(auth): def forgot_password_post(): """Dispatches to ``_forgot_password_post`` passing non-institutional user mail template and reset action.""" - return _forgot_password_post(mail_template=mails.FORGOT_PASSWORD, + return _forgot_password_post(mail_template='forgot_password', reset_route='reset_password_get') def forgot_password_institution_post(): """Dispatches to `_forgot_password_post` passing institutional user mail template, reset action, and setting the ``institutional`` flag.""" - return _forgot_password_post(mail_template=mails.FORGOT_PASSWORD_INSTITUTION, + return _forgot_password_post(mail_template='forgot_password_institution', reset_route='reset_password_institution_get', institutional=True) @@ -272,12 +273,13 @@ def _forgot_password_post(mail_template, reset_route, institutional=False): token=user_obj.verification_key_v2['token'] ) ) - mails.send_mail( - to_addr=email, - mail=mail_template, - reset_link=reset_link, - can_change_preferences=False, - ) + notification_type = NotificationType.objects.filter(name=mail_template) + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + f'NotificationType with name {mail_template} does not exist.' + ) + notification_type = notification_type.first() + notification_type.emit(user=user_obj, event_context={'can_change_preferences': False, 'reset_link': reset_link}) # institutional forgot password page displays the message as main text, not as an alert if institutional: From 1affb5e83d13ea2676cbd460b04b9cd58a4980bb Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Wed, 9 Jul 2025 17:23:21 +0300 Subject: [PATCH 054/176] fix unit tests --- api/institutions/authentication.py | 19 ++++++----- api/users/views.py | 14 ++++---- conftest.py | 23 ++++++++++++++ framework/auth/views.py | 4 +-- notifications.yaml | 51 ++++++++++++++++++++++++++++++ osf/models/notification.py | 2 ++ osf/models/user.py | 17 +++++----- 7 files changed, 103 insertions(+), 27 deletions(-) diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index a5588c2b034..b052834f181 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -20,10 +20,10 @@ from osf import features from osf.exceptions import InstitutionAffiliationStateError -from osf.models import Institution +from osf.models import Institution, NotificationType from osf.models.institution import SsoFilterCriteriaAction -from website.mails import send_mail, WELCOME_OSF4I, DUPLICATE_ACCOUNTS_OSF4I, ADD_SSO_EMAIL_OSF4I +from website.mails import send_mail, DUPLICATE_ACCOUNTS_OSF4I, ADD_SSO_EMAIL_OSF4I from website.settings import OSF_SUPPORT_EMAIL, DOMAIN from website.util.metrics import institution_source_tag @@ -334,14 +334,13 @@ def authenticate(self, request): user.save() # Send confirmation email for all three: created, confirmed and claimed - send_mail( - to_addr=user.username, - mail=WELCOME_OSF4I, - user=user, - domain=DOMAIN, - osf_support_email=OSF_SUPPORT_EMAIL, - storage_flag_is_active=flag_is_active(request, features.STORAGE_I18N), - ) + notification_type = NotificationType.objects.filter(name='welcome_osf4i') + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + 'NotificationType with name welcome_osf4i does not exist.', + ) + notification_type = notification_type.first() + notification_type.emit(user=user, message_frequency='instantly', event_context={'domain': DOMAIN, 'osf_support_email': OSF_SUPPORT_EMAIL, 'storage_flag_is_active': flag_is_active(request, features.STORAGE_I18N)}) # Add the email to the user's account if it is identified by the eppn if email_to_add: diff --git a/api/users/views.py b/api/users/views.py index 0e866c7005e..04fdb101d6f 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -99,7 +99,7 @@ OSFUser, Email, Tag, - NotificationType + NotificationType, ) from osf.utils.tokens import TokenHandler from osf.utils.tokens.handlers import sanction_handler @@ -847,10 +847,10 @@ def get(self, request, *args, **kwargs): notification_type = NotificationType.objects.filter(name=mail_template) if not notification_type.exists(): raise NotificationType.DoesNotExist( - f'NotificationType with name {mail_template} does not exist.' + f'NotificationType with name {mail_template} does not exist.', ) notification_type = notification_type.first() - notification_type.emit(user=user_obj, event_context={'can_change_preferences': False, 'reset_link': reset_link}) + notification_type.emit(user=user_obj, message_frequency='instantly', event_context={'can_change_preferences': False, 'reset_link': reset_link}) return Response(status=status.HTTP_200_OK, data={'message': status_message, 'kind': kind, 'institutional': institutional}) @@ -1066,10 +1066,10 @@ def _process_external_identity(self, user, external_identity, service_url): notification_type = NotificationType.objects.filter(name='external_confirm_success') if not notification_type.exists(): raise NotificationType.DoesNotExist( - 'NotificationType with name external_confirm_success does not exist.' + 'NotificationType with name external_confirm_success does not exist.', ) notification_type = notification_type.first() - notification_type.emit(user=user, event_context={'can_change_preferences': False, 'external_id_provider': provider}) + notification_type.emit(user=user, message_frequency='instantly', event_context={'can_change_preferences': False, 'external_id_provider': provider}) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) @@ -1387,10 +1387,10 @@ def post(self, request, *args, **kwargs): notification_type = NotificationType.objects.filter(name='external_confirm_success') if not notification_type.exists(): raise NotificationType.DoesNotExist( - 'NotificationType with name external_confirm_success does not exist.' + 'NotificationType with name external_confirm_success does not exist.', ) notification_type = notification_type.first() - notification_type.emit(user=user, event_context={'can_change_preferences': False, 'external_id_provider': provider}) + notification_type.emit(user=user, message_frequency='instantly', event_context={'can_change_preferences': False, 'external_id_provider': provider}) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) diff --git a/conftest.py b/conftest.py index 2270f7e7d16..4d9ab71d4d5 100644 --- a/conftest.py +++ b/conftest.py @@ -18,6 +18,7 @@ from framework.celery_tasks import app as celery_app from osf.external.spam import tasks as spam_tasks from website import settings as website_settings +from osf.management.commands.migrate_notifications import update_notification_types logger = logging.getLogger(__name__) @@ -374,3 +375,25 @@ def start_mock_send_grid(test_case): test_case.addCleanup(patcher.stop) mocked_send.return_value = True return mocked_send + + +@pytest.fixture() +def mock_notification_send(): + with mock.patch.object(website_settings, 'USE_EMAIL', True): + with mock.patch.object(website_settings, 'USE_CELERY', False): + with mock.patch('osf.models.notification.Notification.send') as mock_emit: + mock_emit.return_value = None # Or True, if needed + yield mock_emit + + +def start_mock_notification_send(test_case): + patcher = mock.patch('osf.models.notification.Notification.send') + mocked_emit = patcher.start() + test_case.addCleanup(patcher.stop) + mocked_emit.return_value = None + return mocked_emit + + +@pytest.fixture(autouse=True) +def load_notification_types(db, *args, **kwargs): + update_notification_types(*args, **kwargs) diff --git a/framework/auth/views.py b/framework/auth/views.py index 73e12121a29..a1c42eda1ca 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -33,7 +33,7 @@ from osf.exceptions import ValidationValueError, BlockedEmailError from osf.models.provider import PreprintProvider from osf.models.tag import Tag -from osf.models.notification import NotificationType +from osf.models.notification_type import NotificationType from osf.utils.requests import check_select_for_update from website.util.metrics import CampaignClaimedTags, CampaignSourceTags from website.ember_osf_web.decorators import ember_flag_is_active @@ -279,7 +279,7 @@ def _forgot_password_post(mail_template, reset_route, institutional=False): f'NotificationType with name {mail_template} does not exist.' ) notification_type = notification_type.first() - notification_type.emit(user=user_obj, event_context={'can_change_preferences': False, 'reset_link': reset_link}) + notification_type.emit(user=user_obj, message_frequency='instantly', event_context={'can_change_preferences': False, 'reset_link': reset_link}) # institutional forgot password page displays the message as main text, not as an alert if institutional: diff --git a/notifications.yaml b/notifications.yaml index a86820be248..be5bd0f1574 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -14,6 +14,57 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly + - name: password_reset + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/password_reset.html.mako' + notification_freq_default: instantly + - name: forgot_password + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/forgot_password.html.mako' + notification_freq_default: instantly + - name: welcome_osf4i + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/welcome_osf4i.html.mako' + notification_freq_default: instantly + - name: invite_preprints_osf + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/invite_preprints_osf.html.mako' + notification_freq_default: instantly + - name: invite_preprints + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/invite_preprints.html.mako' + notification_freq_default: instantly + - name: invite_draft_registration + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/invite_draft_registration.html.mako' + notification_freq_default: instantly + - name: invite_default + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/invite_default.html.mako' + notification_freq_default: instantly + - name: pending_invite + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/pending_invite.html.mako' + notification_freq_default: instantly + - name: forward_invite + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/forward_invite.html.mako' + notification_freq_default: instantly + - name: external_confirm_success + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/external_confirm_success.html.mako' + notification_freq_default: instantly + #### PROVIDER - name: new_pending_submissions __docs__: ... diff --git a/osf/models/notification.py b/osf/models/notification.py index 14fc4fd3155..557712b81a5 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -17,6 +17,8 @@ class Notification(models.Model): created = models.DateTimeField(auto_now_add=True) def send(self, protocol_type='email', recipient=None): + if not settings.USE_EMAIL: + return if not protocol_type == 'email': raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') diff --git a/osf/models/user.py b/osf/models/user.py index ede9c96d5e5..420171dc61f 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -57,11 +57,12 @@ from osf.utils.requests import check_select_for_update from osf.utils.permissions import API_CONTRIBUTOR_PERMISSIONS, MANAGER, MEMBER, ADMIN from website import settings as website_settings -from website import filters, mails +from website import filters from website.project import new_bookmark_collection from website.util.metrics import OsfSourceTags, unregistered_created_source_tag from importlib import import_module from osf.utils.requests import get_headers_from_request +from osf.models.notification_type import NotificationType SessionStore = import_module(settings.SESSION_ENGINE).SessionStore @@ -1071,13 +1072,13 @@ def set_password(self, raw_password, notify=True): raise ChangePasswordError(['Password cannot be the same as your email address']) super().set_password(raw_password) if had_existing_password and notify: - mails.send_mail( - to_addr=self.username, - mail=mails.PASSWORD_RESET, - user=self, - can_change_preferences=False, - osf_contact_email=website_settings.OSF_CONTACT_EMAIL - ) + notification_type = NotificationType.objects.filter(name='password_reset') + if not notification_type.exists(): + raise NotificationType.DoesNotExist( + 'NotificationType with name password_reset does not exist.', + ) + notification_type = notification_type.first() + notification_type.emit(user=self, message_frequency='instantly', event_context={'can_change_preferences': False, 'osf_contact_email': website_settings.OSF_CONTACT_EMAIL}) remove_sessions_for_user(self) @classmethod From 72623cc45a96dbe272c71ac5458b4429bd1faceb Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Wed, 9 Jul 2025 18:07:53 +0300 Subject: [PATCH 055/176] fix unit tests --- api_tests/users/views/test_user_confirm.py | 6 +++--- api_tests/users/views/test_user_settings.py | 7 ++++--- osf_tests/test_user.py | 13 +++++++------ tests/test_auth.py | 9 +++------ tests/test_webtests.py | 5 +++-- 5 files changed, 20 insertions(+), 20 deletions(-) diff --git a/api_tests/users/views/test_user_confirm.py b/api_tests/users/views/test_user_confirm.py index 0cb4b7606a2..d304fc456b5 100644 --- a/api_tests/users/views/test_user_confirm.py +++ b/api_tests/users/views/test_user_confirm.py @@ -6,6 +6,7 @@ @pytest.mark.django_db +@pytest.mark.usefixtures('mock_notification_send') class TestConfirmEmail: @pytest.fixture() @@ -147,8 +148,7 @@ def test_post_success_create(self, mock_send_mail, app, confirm_url, user_with_e assert user.external_identity == {'ORCID': {'0002-0001-0001-0001': 'VERIFIED'}} assert user.emails.filter(address=email.lower()).exists() - @mock.patch('website.mails.send_mail') - def test_post_success_link(self, mock_send_mail, app, confirm_url, user_with_email_verification): + def test_post_success_link(self, mock_notification_send, app, confirm_url, user_with_email_verification): user, token, email = user_with_email_verification user.external_identity['ORCID']['0000-0000-0000-0000'] = 'LINK' user.save() @@ -168,7 +168,7 @@ def test_post_success_link(self, mock_send_mail, app, confirm_url, user_with_ema ) assert res.status_code == 201 - assert mock_send_mail.called + assert mock_notification_send.called user.reload() assert user.external_identity['ORCID']['0000-0000-0000-0000'] == 'VERIFIED' diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index ec60c1f4c3d..28d565e3153 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -44,7 +44,7 @@ def payload(self): } } - def test_get(self, app, user_one, url): + def test_get(self, app, user_one, url, mock_notification_send): res = app.get(url, auth=user_one.auth, expect_errors=True) assert res.status_code == 405 @@ -169,6 +169,7 @@ def test_multiple_errors(self, app, user_one, url, payload): @pytest.mark.django_db @pytest.mark.usefixtures('mock_send_grid') +@pytest.mark.usefixtures('mock_notification_send') class TestResetPassword: @pytest.fixture() @@ -187,14 +188,14 @@ def url(self): def csrf_token(self): return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) - def test_get(self, mock_send_grid, app, url, user_one): + def test_get(self, mock_notification_send, app, url, user_one): encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' res = app.get(url) assert res.status_code == 200 user_one.reload() - assert mock_send_grid.call_args[1]['to_addr'] == user_one.username + assert mock_notification_send.called def test_get_invalid_email(self, mock_send_grid, app, url): url = f'{url}?email={'invalid_email'}' diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index 3a2e508dd2d..70d3a7ceb17 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -886,6 +886,7 @@ def test_get_user_by_cookie_no_session(self): @pytest.mark.usefixtures('mock_send_grid') +@pytest.mark.usefixtures('mock_notification_send') class TestChangePassword: def test_change_password(self, user): @@ -897,19 +898,19 @@ def test_change_password(self, user): user.change_password(old_password, new_password, confirm_password) assert bool(user.check_password(new_password)) is True - def test_set_password_notify_default(self, mock_send_grid, user): + def test_set_password_notify_default(self, mock_notification_send, user): old_password = 'password' user.set_password(old_password) user.save() - assert mock_send_grid.called is True + assert mock_notification_send.called is True - def test_set_password_no_notify(self, mock_send_grid, user): + def test_set_password_no_notify(self, mock_notification_send, user): old_password = 'password' user.set_password(old_password, notify=False) user.save() - assert mock_send_grid.called is False + assert mock_notification_send.called is False - def test_check_password_upgrade_hasher_no_notify(self, mock_send_grid, user, settings): + def test_check_password_upgrade_hasher_no_notify(self, mock_notification_send, user, settings): # NOTE: settings fixture comes from pytest-django. # changes get reverted after tests run settings.PASSWORD_HASHERS = ( @@ -920,7 +921,7 @@ def test_check_password_upgrade_hasher_no_notify(self, mock_send_grid, user, set user.password = 'sha1$lNb72DKWDv6P$e6ae16dada9303ae0084e14fc96659da4332bb05' user.check_password(raw_password) assert user.password.startswith('md5$') - assert mock_send_grid.called is False + assert mock_notification_send.called is False def test_change_password_invalid(self, old_password=None, new_password=None, confirm_password=None, error_message='Old password is invalid'): diff --git a/tests/test_auth.py b/tests/test_auth.py index 6088c608e67..52156529d92 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -36,7 +36,7 @@ must_have_addon, must_be_addon_authorizer, ) from website.util import api_url_for -from conftest import start_mock_send_grid +from conftest import start_mock_send_grid, start_mock_notification_send from tests.test_cas_authentication import generate_external_user_with_resp @@ -50,6 +50,7 @@ class TestAuthUtils(OsfTestCase): def setUp(self): super().setUp() self.mock_send_grid = start_mock_send_grid(self) + self.start_mock_notification_send = start_mock_notification_send(self) def test_citation_with_only_fullname(self): user = UserFactory() @@ -173,11 +174,7 @@ def test_password_change_sends_email(self): user = UserFactory() user.set_password('killerqueen') user.save() - assert len(self.mock_send_grid.call_args_list) == 1 - empty, kwargs = self.mock_send_grid.call_args - - assert empty == () - assert kwargs['to_addr'] == user.username + assert len(self.start_mock_notification_send.call_args_list) == 1 @mock.patch('framework.auth.utils.requests.post') def test_validate_recaptcha_success(self, req_post): diff --git a/tests/test_webtests.py b/tests/test_webtests.py index ae1a30e7618..a9d8058069f 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -36,7 +36,7 @@ from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory from website import language from website.util import web_url_for, api_url_for -from conftest import start_mock_send_grid +from conftest import start_mock_send_grid, start_mock_notification_send logging.getLogger('website.project.model').setLevel(logging.ERROR) @@ -805,6 +805,7 @@ def setUp(self): self.user.save() self.mock_send_grid = start_mock_send_grid(self) + self.start_mock_notification_send = start_mock_notification_send(self) # log users out before they land on forgot password page def test_forgot_password_logs_out_user(self): @@ -833,7 +834,7 @@ def test_can_receive_reset_password_email(self): res = form.submit(self.app) # check mail was sent - assert self.mock_send_grid.called + assert self.start_mock_notification_send.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword From a02352c8b85f24487f978ee996f2ac8857985618 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Wed, 9 Jul 2025 20:20:13 +0300 Subject: [PATCH 056/176] fix unit tests --- notifications.yaml | 5 +++++ tests/test_registrations/test_retractions.py | 3 --- tests/test_webtests.py | 3 ++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index be5bd0f1574..ac2fe383f31 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -64,6 +64,11 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_success.html.mako' notification_freq_default: instantly + - name: forgot_password_institution + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/forgot_password_institution.html.mako' + notification_freq_default: instantly #### PROVIDER - name: new_pending_submissions diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index f2586b5cec6..8d28410e79b 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -805,8 +805,6 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): json={'justification': ''}, auth=self.user.auth, ) - # Only the creator gets an email; the unreg user does not get emailed - assert self.mock_send_grid.call_count == 1 def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -896,7 +894,6 @@ def test_valid_POST_calls_send_mail_with_username(self): json={'justification': ''}, auth=self.user.auth, ) - assert self.mock_send_grid.called def test_non_contributor_GET_approval_returns_HTTPError_FORBIDDEN(self): non_contributor = AuthUserFactory() diff --git a/tests/test_webtests.py b/tests/test_webtests.py index a9d8058069f..c55e6b523f4 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -924,6 +924,7 @@ def setUp(self): self.user.save() self.mock_send_grid = start_mock_send_grid(self) + self.start_mock_notification_send = start_mock_notification_send(self) # log users out before they land on institutional forgot password page def test_forgot_password_logs_out_user(self): @@ -950,7 +951,7 @@ def test_can_receive_reset_password_email(self): res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) # check mail was sent - assert self.mock_send_grid.called + assert self.start_mock_notification_send.called # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword From ff9743ba85aa9505e40b7be1404d2f7fe421165a Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Wed, 9 Jul 2025 21:14:35 +0300 Subject: [PATCH 057/176] fix unit tests --- api_tests/users/views/test_user_settings.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index 28d565e3153..d361fbe3c0f 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -268,7 +268,8 @@ def test_post_invalid_password(self, app, url, user_one, csrf_token): res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) assert res.status_code == 400 - def test_throrrle(self, app, url, user_one): + def test_throrrle(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' res = app.get(url) @@ -282,9 +283,8 @@ def test_throrrle(self, app, url, user_one): } } } - - res = app.post_json_api(url, payload, expect_errors=True) - assert res.status_code == 429 + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-CSRFToken': csrf_token}) + assert res.status_code == 200 res = app.get(url, expect_errors=True) assert res.json['message'] == 'You have recently requested to change your password. Please wait a few minutes before trying again.' From feba563b955064d043c9c11aac6fe92817399397 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 10 Jul 2025 11:24:32 +0300 Subject: [PATCH 058/176] fix unit tests --- notifications.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index ac2fe383f31..6054b727e8e 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -92,12 +92,12 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly - - name: node_contributor_added_access_request + - name: node_request_institutional_access_request __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly - - name: node_request_institutional_access_request + - name: node_contributor_added_access_request __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/new_pending_submissions.html.mako' From 90ef652bb747504c21315fa8b06c227188f7c467 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Thu, 10 Jul 2025 14:08:06 +0300 Subject: [PATCH 059/176] fix unit tests --- tests/test_registrations/test_retractions.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 8d28410e79b..dcc62d40b8b 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -753,6 +753,8 @@ def test_POST_retraction_to_subproject_component_returns_HTTPError_BAD_REQUEST(s @pytest.mark.enable_bookmark_creation @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') +@mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationRetractionViewsTestCase(OsfTestCase): def setUp(self): super().setUp() @@ -805,6 +807,7 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): json={'justification': ''}, auth=self.user.auth, ) + assert self.mock_send_grid.call_count == 1 def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -894,6 +897,7 @@ def test_valid_POST_calls_send_mail_with_username(self): json={'justification': ''}, auth=self.user.auth, ) + assert self.mock_send_grid.called def test_non_contributor_GET_approval_returns_HTTPError_FORBIDDEN(self): non_contributor = AuthUserFactory() From e3bc74284c255470dfdf428ac8f8868cd8ec60b6 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 10 Jul 2025 10:28:26 -0400 Subject: [PATCH 060/176] remove old management commands and add new ones to population notifications --- .../commands/add_colon_delim_to_s3_buckets.py | 76 --- .../commands/add_egap_registration_schema.py | 29 -- .../commands/add_institution_perm_groups.py | 19 - .../commands/add_notification_subscription.py | 78 --- osf/management/commands/addon_deleted_date.py | 96 ---- .../commands/backfill_date_retracted.py | 89 ---- .../commands/create_fake_preprint_actions.py | 57 --- .../commands/fake_metrics_reports.py | 62 --- .../make_dummy_pageviews_for_metrics.py | 118 ----- .../commands/migrate_notifications.py | 56 +-- .../commands/migrate_pagecounter_data.py | 124 ----- .../commands/migrate_preprint_affiliation.py | 118 ----- .../migrate_registration_responses.py | 173 ------- .../migrate_user_institution_affiliation.py | 84 ---- .../commands/move_egap_regs_to_provider.py | 44 -- .../commands/populate_branched_from_node.py | 67 --- .../populate_initial_schema_responses.py | 100 ---- .../commands/populate_notification_types.py | 72 +++ .../test_migrate_notifications.py | 4 +- .../test_move_egap_regs_to_provider.py | 51 -- .../test_populate_initial_schema_responses.py | 130 ----- ...t_registration_moderation_notifications.py | 457 ------------------ osf_tests/test_s3_folder_migration.py | 41 -- website/settings/defaults.py | 4 - 24 files changed, 76 insertions(+), 2073 deletions(-) delete mode 100644 osf/management/commands/add_colon_delim_to_s3_buckets.py delete mode 100644 osf/management/commands/add_egap_registration_schema.py delete mode 100644 osf/management/commands/add_institution_perm_groups.py delete mode 100644 osf/management/commands/add_notification_subscription.py delete mode 100644 osf/management/commands/addon_deleted_date.py delete mode 100644 osf/management/commands/backfill_date_retracted.py delete mode 100644 osf/management/commands/create_fake_preprint_actions.py delete mode 100644 osf/management/commands/fake_metrics_reports.py delete mode 100644 osf/management/commands/make_dummy_pageviews_for_metrics.py delete mode 100644 osf/management/commands/migrate_pagecounter_data.py delete mode 100644 osf/management/commands/migrate_preprint_affiliation.py delete mode 100644 osf/management/commands/migrate_registration_responses.py delete mode 100644 osf/management/commands/migrate_user_institution_affiliation.py delete mode 100644 osf/management/commands/move_egap_regs_to_provider.py delete mode 100644 osf/management/commands/populate_branched_from_node.py delete mode 100644 osf/management/commands/populate_initial_schema_responses.py create mode 100644 osf/management/commands/populate_notification_types.py delete mode 100644 osf_tests/management_commands/test_move_egap_regs_to_provider.py delete mode 100644 osf_tests/management_commands/test_populate_initial_schema_responses.py delete mode 100644 osf_tests/test_registration_moderation_notifications.py delete mode 100644 osf_tests/test_s3_folder_migration.py diff --git a/osf/management/commands/add_colon_delim_to_s3_buckets.py b/osf/management/commands/add_colon_delim_to_s3_buckets.py deleted file mode 100644 index 0a283f78f0f..00000000000 --- a/osf/management/commands/add_colon_delim_to_s3_buckets.py +++ /dev/null @@ -1,76 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from django.apps import apps -from django.db.models import F, Value -from django.db.models.functions import Concat, Replace - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - """ - Adds Colon (':') delineators to s3 buckets to separate them from them from their subfolder, so `` - becomes `:/` , the root path. Folder names will also be updated to maintain consistency. - - """ - - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--reverse', - action='store_true', - dest='reverse', - help='Unsets date_retraction' - ) - - def handle(self, *args, **options): - reverse = options.get('reverse', False) - if reverse: - reverse_update_folder_names() - else: - update_folder_names() - - -def update_folder_names(): - NodeSettings = apps.get_model('addons_s3', 'NodeSettings') - - # Update folder_id for all records - NodeSettings.objects.exclude( - folder_name__contains=':/' - ).update( - folder_id=Concat(F('folder_id'), Value(':/')) - ) - - # Update folder_name for records containing '(' - NodeSettings.objects.filter( - folder_name__contains=' (' - ).exclude( - folder_name__contains=':/' - ).update( - folder_name=Replace(F('folder_name'), Value(' ('), Value(':/ (')) - ) - NodeSettings.objects.exclude( - folder_name__contains=':/' - ).exclude( - folder_name__contains=' (' - ).update( - folder_name=Concat(F('folder_name'), Value(':/')) - ) - logger.info('Update Folder Names/IDs complete') - - -def reverse_update_folder_names(): - NodeSettings = apps.get_model('addons_s3', 'NodeSettings') - - # Reverse update folder_id for all records - NodeSettings.objects.update(folder_id=Replace(F('folder_id'), Value(':/'), Value(''))) - - # Reverse update folder_name for records containing ':/ (' - NodeSettings.objects.filter(folder_name__contains=':/ (').update( - folder_name=Replace(F('folder_name'), Value(':/ ('), Value(' (')) - ) - NodeSettings.objects.filter(folder_name__contains=':/').update( - folder_name=Replace(F('folder_name'), Value(':/'), Value('')) - ) - logger.info('Reverse Update Folder Names/IDs complete') diff --git a/osf/management/commands/add_egap_registration_schema.py b/osf/management/commands/add_egap_registration_schema.py deleted file mode 100644 index ea5df1e7f4a..00000000000 --- a/osf/management/commands/add_egap_registration_schema.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from osf.models import RegistrationSchema -from website.project.metadata.schemas import ensure_schema_structure, from_json - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - """Add egap-registration schema to the db. - For now, doing this outside of a migration so it can be individually added to - a staging environment for preview. - """ - - def handle(self, *args, **options): - egap_registration_schema = ensure_schema_structure(from_json('egap-registration-3.json')) - schema_obj, created = RegistrationSchema.objects.update_or_create( - name=egap_registration_schema['name'], - schema_version=egap_registration_schema.get('version', 1), - defaults={ - 'schema': egap_registration_schema, - } - ) - - if created: - logger.info('Added schema {} to the database'.format(egap_registration_schema['name'])) - else: - logger.info('updated existing schema {}'.format(egap_registration_schema['name'])) diff --git a/osf/management/commands/add_institution_perm_groups.py b/osf/management/commands/add_institution_perm_groups.py deleted file mode 100644 index d7becaf2d8b..00000000000 --- a/osf/management/commands/add_institution_perm_groups.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from osf.models import Institution - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - """A new permissions group was created for Institutions, which will be created upon each new Institution, - but the old institutions will not have this group. This management command creates those groups for the - existing institutions. - """ - - def handle(self, *args, **options): - institutions = Institution.objects.all() - for institution in institutions: - institution.update_group_permissions() - logger.info(f'Added perms to {institution.name}.') diff --git a/osf/management/commands/add_notification_subscription.py b/osf/management/commands/add_notification_subscription.py deleted file mode 100644 index 46c0a17ec30..00000000000 --- a/osf/management/commands/add_notification_subscription.py +++ /dev/null @@ -1,78 +0,0 @@ -# This is a management command, rather than a migration script, for two primary reasons: -# 1. It makes no changes to database structure (e.g. AlterField), only database content. -# 2. It takes a long time to run and the site doesn't need to be down that long. - -import logging - -import django - -django.setup() - -from django.core.management.base import BaseCommand -from django.db import transaction - -from website.notifications.utils import to_subscription_key - -from scripts import utils as script_utils - -logger = logging.getLogger(__name__) - - -def add_reviews_notification_setting(notification_type, state=None): - if state: - OSFUser = state.get_model('osf', 'OSFUser') - NotificationSubscriptionLegacy = state.get_model('osf', 'NotificationSubscriptionLegacy') - else: - from osf.models import OSFUser, NotificationSubscriptionLegacy - - active_users = OSFUser.objects.filter(date_confirmed__isnull=False).exclude(date_disabled__isnull=False).exclude(is_active=False).order_by('id') - total_active_users = active_users.count() - - logger.info(f'About to add a global_reviews setting for {total_active_users} users.') - - total_created = 0 - for user in active_users.iterator(): - user_subscription_id = to_subscription_key(user._id, notification_type) - - subscription = NotificationSubscriptionLegacy.load(user_subscription_id) - if not subscription: - logger.info(f'No {notification_type} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscriptionLegacy(_id=user_subscription_id, owner=user, event_name=notification_type) - subscription.save() # Need to save in order to access m2m fields - subscription.add_user_to_subscription(user, 'email_transactional') - else: - logger.info(f'User {user._id} already has a {notification_type} subscription') - total_created += 1 - - logger.info(f'Added subscriptions for {total_created}/{total_active_users} users') - - -class Command(BaseCommand): - """ - Add subscription to all active users for given notification type. - """ - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - ) - - parser.add_argument( - '--notification', - type=str, - required=True, - help='Notification type to subscribe users to', - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - state = options.get('state', None) - if not dry_run: - script_utils.add_file_logger(logger, __file__) - with transaction.atomic(): - add_reviews_notification_setting(notification_type=options['notification'], state=state) - if dry_run: - raise RuntimeError('Dry run, transaction rolled back.') diff --git a/osf/management/commands/addon_deleted_date.py b/osf/management/commands/addon_deleted_date.py deleted file mode 100644 index df2f78b26e0..00000000000 --- a/osf/management/commands/addon_deleted_date.py +++ /dev/null @@ -1,96 +0,0 @@ -import datetime -import logging - -from django.core.management.base import BaseCommand -from django.db import connection, transaction -from framework.celery_tasks import app as celery_app - -logger = logging.getLogger(__name__) - -TABLES_TO_POPULATE_WITH_MODIFIED = [ - 'addons_zotero_usersettings', - 'addons_dropbox_usersettings', - 'addons_dropbox_nodesettings', - 'addons_figshare_nodesettings', - 'addons_figshare_usersettings', - 'addons_forward_nodesettings', - 'addons_github_nodesettings', - 'addons_github_usersettings', - 'addons_gitlab_nodesettings', - 'addons_gitlab_usersettings', - 'addons_googledrive_nodesettings', - 'addons_googledrive_usersettings', - 'addons_mendeley_nodesettings', - 'addons_mendeley_usersettings', - 'addons_onedrive_nodesettings', - 'addons_onedrive_usersettings', - 'addons_osfstorage_nodesettings', - 'addons_osfstorage_usersettings', - 'addons_bitbucket_nodesettings', - 'addons_bitbucket_usersettings', - 'addons_owncloud_nodesettings', - 'addons_box_nodesettings', - 'addons_owncloud_usersettings', - 'addons_box_usersettings', - 'addons_dataverse_nodesettings', - 'addons_dataverse_usersettings', - 'addons_s3_nodesettings', - 'addons_s3_usersettings', - 'addons_twofactor_usersettings', - 'addons_wiki_nodesettings', - 'addons_zotero_nodesettings' -] - -UPDATE_DELETED_WITH_MODIFIED = """UPDATE {} SET deleted=modified - WHERE id IN (SELECT id FROM {} WHERE is_deleted AND deleted IS NULL LIMIT {}) RETURNING id;""" - -@celery_app.task(name='management.commands.addon_deleted_date') -def populate_deleted(dry_run=False, page_size=1000): - with transaction.atomic(): - for table in TABLES_TO_POPULATE_WITH_MODIFIED: - run_statements(UPDATE_DELETED_WITH_MODIFIED, page_size, table) - if dry_run: - raise RuntimeError('Dry Run -- Transaction rolled back') - -def run_statements(statement, page_size, table): - logger.info(f'Populating deleted column in table {table}') - with connection.cursor() as cursor: - cursor.execute(statement.format(table, table, page_size)) - rows = cursor.fetchall() - if rows: - logger.info(f'Table {table} still has rows to populate') - -class Command(BaseCommand): - help = '''Populates new deleted field for various models. Ensure you have run migrations - before running this script.''' - - def add_arguments(self, parser): - parser.add_argument( - '--dry_run', - type=bool, - default=False, - help='Run queries but do not write files', - ) - parser.add_argument( - '--page_size', - type=int, - default=1000, - help='How many rows to process at a time', - ) - - def handle(self, *args, **options): - script_start_time = datetime.datetime.now() - logger.info(f'Script started time: {script_start_time}') - logger.debug(options) - - dry_run = options['dry_run'] - page_size = options['page_size'] - - if dry_run: - logger.info('DRY RUN') - - populate_deleted(dry_run, page_size) - - script_finish_time = datetime.datetime.now() - logger.info(f'Script finished time: {script_finish_time}') - logger.info(f'Run time {script_finish_time - script_start_time}') diff --git a/osf/management/commands/backfill_date_retracted.py b/osf/management/commands/backfill_date_retracted.py deleted file mode 100644 index 698a67c82ae..00000000000 --- a/osf/management/commands/backfill_date_retracted.py +++ /dev/null @@ -1,89 +0,0 @@ -# This is a management command, rather than a migration script, for two primary reasons: -# 1. It makes no changes to database structure (e.g. AlterField), only database content. -# 2. It may need to be ran more than once, as it skips failed registrations. - -from datetime import timedelta -import logging - -import django -django.setup() - -from django.core.management.base import BaseCommand -from django.db import transaction - -from osf.models import Registration, Retraction, Sanction -from scripts import utils as script_utils - -logger = logging.getLogger(__name__) - -def set_date_retracted(*args): - registrations = ( - Registration.objects.filter(retraction__state=Sanction.APPROVED, retraction__date_retracted=None) - .select_related('retraction') - .prefetch_related('registered_from__logs') - .prefetch_related('registered_from__guids') - ) - total = registrations.count() - logger.info(f'Migrating {total} retractions.') - - for registration in registrations: - if not registration.registered_from: - logger.warning(f'Skipping failed registration {registration._id}') - continue - retraction_logs = registration.registered_from.logs.filter(action='retraction_approved', params__retraction_id=registration.retraction._id) - if retraction_logs.count() != 1 and retraction_logs.first().date - retraction_logs.last().date > timedelta(seconds=5): - msg = ( - 'There should be a retraction_approved log for retraction {} on node {}. No retraction_approved log found.' - if retraction_logs.count() == 0 - else 'There should only be one retraction_approved log for retraction {} on node {}. Multiple logs found.' - ) - raise Exception(msg.format(registration.retraction._id, registration.registered_from._id)) - date_retracted = retraction_logs[0].date - logger.info( - 'Setting date_retracted for retraction {} to be {}, from retraction_approved node log {}.'.format( - registration.retraction._id, date_retracted, retraction_logs[0]._id - ) - ) - registration.retraction.date_retracted = date_retracted - registration.retraction.save() - -def unset_date_retracted(*args): - retractions = Retraction.objects.filter(state=Sanction.APPROVED).exclude(date_retracted=None) - logger.info(f'Migrating {retractions.count()} retractions.') - - for retraction in retractions: - retraction.date_retracted = None - retraction.save() - - -class Command(BaseCommand): - """ - Backfill Retraction.date_retracted with `RETRACTION_APPROVED` log date. - """ - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Run migration and roll back changes to db', - ) - parser.add_argument( - '--reverse', - action='store_true', - dest='reverse', - help='Unsets date_retraction' - ) - - def handle(self, *args, **options): - reverse = options.get('reverse', False) - dry_run = options.get('dry_run', False) - if not dry_run: - script_utils.add_file_logger(logger, __file__) - with transaction.atomic(): - if reverse: - unset_date_retracted() - else: - set_date_retracted() - if dry_run: - raise RuntimeError('Dry run, transaction rolled back.') diff --git a/osf/management/commands/create_fake_preprint_actions.py b/osf/management/commands/create_fake_preprint_actions.py deleted file mode 100644 index 85b28ae9f20..00000000000 --- a/osf/management/commands/create_fake_preprint_actions.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python3 - -import random -import logging -from faker import Faker - -from django.core.management.base import BaseCommand - -from osf.models import ReviewAction, Preprint, OSFUser -from osf.utils.workflows import DefaultStates, DefaultTriggers - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - """Add fake Actions to every preprint that doesn't already have one""" - - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - 'user', - type=str, - nargs='?', - default=None, - help='Guid for user to list as creator for all fake actions (default to arbitrary user)' - ) - parser.add_argument( - '--num-actions', - action='store', - type=int, - default=10, - help='Number of actions to create for each preprint which does not have one' - ) - - def handle(self, *args, **options): - user_guid = options.get('user') - num_actions = options.get('--num-actions') - - if user_guid is None: - user = OSFUser.objects.first() - else: - user = OSFUser.objects.get(guids___id=user_guid) - - fake = Faker() - triggers = [a.value for a in DefaultTriggers] - states = [s.value for s in DefaultStates] - for preprint in Preprint.objects.filter(actions__isnull=True): - for i in range(num_actions): - action = ReviewAction( - target=preprint, - creator=user, - trigger=random.choice(triggers), - from_state=random.choice(states), - to_state=random.choice(states), - comment=fake.text(), - ) - action.save() diff --git a/osf/management/commands/fake_metrics_reports.py b/osf/management/commands/fake_metrics_reports.py deleted file mode 100644 index 765d6e475c1..00000000000 --- a/osf/management/commands/fake_metrics_reports.py +++ /dev/null @@ -1,62 +0,0 @@ -from datetime import date, timedelta -from random import randint - -from django.conf import settings -from django.core.management.base import BaseCommand - -from osf.metrics import ( - UserSummaryReport, - PreprintSummaryReport, -) -from osf.models import PreprintProvider - - -def fake_user_counts(days_back): - yesterday = date.today() - timedelta(days=1) - first_report = UserSummaryReport( - report_date=(yesterday - timedelta(days=days_back)), - active=randint(0, 23), - deactivated=randint(0, 2), - merged=randint(0, 4), - new_users_daily=randint(0, 7), - new_users_with_institution_daily=randint(0, 5), - unconfirmed=randint(0, 3), - ) - first_report.save() - - last_report = first_report - while last_report.report_date < yesterday: - new_user_count = randint(0, 500) - new_report = UserSummaryReport( - report_date=(last_report.report_date + timedelta(days=1)), - active=(last_report.active + randint(0, new_user_count)), - deactivated=(last_report.deactivated + randint(0, new_user_count)), - merged=(last_report.merged + randint(0, new_user_count)), - new_users_daily=new_user_count, - new_users_with_institution_daily=randint(0, new_user_count), - unconfirmed=(last_report.unconfirmed + randint(0, new_user_count)), - ) - new_report.save() - last_report = new_report - - -def fake_preprint_counts(days_back): - yesterday = date.today() - timedelta(days=1) - provider_keys = PreprintProvider.objects.all().values_list('_id', flat=True) - for day_delta in range(days_back): - for provider_key in provider_keys: - preprint_count = randint(100, 5000) * (days_back - day_delta) - PreprintSummaryReport( - report_date=yesterday - timedelta(days=day_delta), - provider_key=provider_key, - preprint_count=preprint_count, - ).save() - - -class Command(BaseCommand): - def handle(self, *args, **kwargs): - if not settings.DEBUG: - raise NotImplementedError('fake_reports requires DEBUG mode') - fake_user_counts(1000) - fake_preprint_counts(1000) - # TODO: more reports diff --git a/osf/management/commands/make_dummy_pageviews_for_metrics.py b/osf/management/commands/make_dummy_pageviews_for_metrics.py deleted file mode 100644 index 09de34bf7a8..00000000000 --- a/osf/management/commands/make_dummy_pageviews_for_metrics.py +++ /dev/null @@ -1,118 +0,0 @@ -"""osf/management/commands/poke_metrics_timespan_queries.py -""" -import logging -import random -import datetime - -from django.core.management.base import BaseCommand -from osf.metrics import CountedAuthUsage - - -logger = logging.getLogger(__name__) - -TIME_FILTERS = ( - {'gte': 'now/d-150d'}, - {'gte': '2021-11-28T23:00:00.000Z', 'lte': '2023-01-16T00:00:00.000Z'}, -) - -PLATFORM_IRI = 'http://localhost:9201/' - -ITEM_GUID = 'foo' - - -class Command(BaseCommand): - - def add_arguments(self, parser): - parser.add_argument( - '--count', - type=int, - default=100, - help='number of fake pageviews to generate', - ) - parser.add_argument( - '--seconds_back', - type=int, - default=60 * 60 * 24 * 14, # up to two weeks back - help='max age in seconds of random event', - ) - - def handle(self, *args, **options): - self._generate_random_countedusage(options.get('count'), options.get('seconds_back')) - - results = [ - self._run_date_query(time_filter) - for time_filter in TIME_FILTERS - ] - - self._print_line( - (str(f) for f in TIME_FILTERS), - label='timefilter:', - ) - - date_keys = { - k - for r in results - for k in r - } - for date_key in sorted(date_keys): - self._print_line( - (r.get(date_key, 0) for r in results), - label=str(date_key), - ) - - def _print_line(self, lineitems, label=''): - print('\t'.join((label, *map(str, lineitems)))) - - def _generate_random_countedusage(self, n, max_age): - now = datetime.datetime.now(tz=datetime.UTC) - for _ in range(n): - seconds_back = random.randint(0, max_age) - timestamp_time = now - datetime.timedelta(seconds=seconds_back) - CountedAuthUsage.record( - platform_iri=PLATFORM_IRI, - timestamp=timestamp_time, - item_guid=ITEM_GUID, - session_id='freshen by key', - user_is_authenticated=bool(random.randint(0, 1)), - item_public=bool(random.randint(0, 1)), - action_labels=[['view', 'download'][random.randint(0, 1)]], - ) - - def _run_date_query(self, time_range_filter): - result = self._run_query({ - 'query': { - 'bool': { - 'filter': { - 'range': { - 'timestamp': time_range_filter, - }, - }, - }, - }, - 'aggs': { - 'by-date': { - 'date_histogram': { - 'field': 'timestamp', - 'interval': 'day', - }, - }, - 'max-timestamp': { - 'max': {'field': 'timestamp'}, - }, - 'min-timestamp': { - 'min': {'field': 'timestamp'}, - }, - }, - }) - return { - 'min': result.aggs['min-timestamp'].value, - 'max': result.aggs['max-timestamp'].value, - **{ - str(bucket.key.date()): bucket.doc_count - for bucket in result.aggs['by-date'] - }, - } - - def _run_query(self, query_dict): - analytics_search = CountedAuthUsage.search().update_from_dict(query_dict) - return analytics_search.execute() diff --git a/osf/management/commands/migrate_notifications.py b/osf/management/commands/migrate_notifications.py index 8b7c1fe2a5e..afae80b9af2 100644 --- a/osf/management/commands/migrate_notifications.py +++ b/osf/management/commands/migrate_notifications.py @@ -1,13 +1,10 @@ -import yaml -from django.apps import apps -from website import settings - import logging from django.contrib.contenttypes.models import ContentType from osf.models import NotificationType, NotificationSubscription from osf.models.notifications import NotificationSubscriptionLegacy from django.core.management.base import BaseCommand from django.db import transaction +from osf.management.commands.local_setup.populate_notification_types import populate_notification_types logger = logging.getLogger(__name__) @@ -55,61 +52,12 @@ def migrate_legacy_notification_subscriptions(*args, **kwargs): ) logger.info(f'Created NotificationType "{event_name}" with content_type {content_type}') - -def update_notification_types(*args, **kwargs): - - with open(settings.NOTIFICATION_TYPES_YAML) as stream: - notification_types = yaml.safe_load(stream) - for notification_type in notification_types['notification_types']: - notification_type.pop('__docs__') - object_content_type_model_name = notification_type.pop('object_content_type_model_name') - notification_freq = notification_type.pop('notification_freq_default') - - if object_content_type_model_name == 'desk': - content_type = None - elif object_content_type_model_name == 'osfuser': - OSFUser = apps.get_model('osf', 'OSFUser') - content_type = ContentType.objects.get_for_model(OSFUser) - elif object_content_type_model_name == 'preprint': - Preprint = apps.get_model('osf', 'Preprint') - content_type = ContentType.objects.get_for_model(Preprint) - elif object_content_type_model_name == 'collectionsubmission': - CollectionSubmission = apps.get_model('osf', 'CollectionSubmission') - content_type = ContentType.objects.get_for_model(CollectionSubmission) - elif object_content_type_model_name == 'abstractprovider': - AbstractProvider = apps.get_model('osf', 'abstractprovider') - content_type = ContentType.objects.get_for_model(AbstractProvider) - elif object_content_type_model_name == 'osfuser': - OSFUser = apps.get_model('osf', 'OSFUser') - content_type = ContentType.objects.get_for_model(OSFUser) - else: - try: - content_type = ContentType.objects.get( - app_label='osf', - model=object_content_type_model_name - ) - except ContentType.DoesNotExist: - raise ValueError(f'No content type for osf.{object_content_type_model_name}') - - with open(notification_type['template']) as stream: - template = stream.read() - - notification_types['template'] = template - notification_types['notification_freq'] = notification_freq - nt, _ = NotificationType.objects.update_or_create( - name=notification_type['name'], - defaults=notification_type, - ) - nt.object_content_type = content_type - nt.save() - - class Command(BaseCommand): help = 'Migrate legacy NotificationSubscriptionLegacy objects to new Notification app models.' def handle(self, *args, **options): with transaction.atomic(): - update_notification_types(args, options) + populate_notification_types(args, options) with transaction.atomic(): migrate_legacy_notification_subscriptions(args, options) diff --git a/osf/management/commands/migrate_pagecounter_data.py b/osf/management/commands/migrate_pagecounter_data.py deleted file mode 100644 index 050a355123f..00000000000 --- a/osf/management/commands/migrate_pagecounter_data.py +++ /dev/null @@ -1,124 +0,0 @@ -import datetime -import logging - -from django.core.management.base import BaseCommand -from django.db import connection - -from framework import sentry -from framework.celery_tasks import app as celery_app - -logger = logging.getLogger(__name__) - - -LIMIT_CLAUSE = ' LIMIT %s);' -NO_LIMIT_CLAUSE = ');' - -REVERSE_SQL_BASE = ''' -UPDATE osf_pagecounter PC -SET - resource_id = NULL, - file_id = NULL, - version = NULL, - action = NULL -WHERE PC.id IN ( - SELECT PC.id FROM osf_pagecounter PC - INNER JOIN osf_guid Guid on Guid._id = split_part(PC._id, ':', 2) - INNER JOIN osf_basefilenode File on File._id = split_part(PC._id, ':', 3) -''' -REVERSE_SQL = f'{REVERSE_SQL_BASE} {NO_LIMIT_CLAUSE}' -REVERSE_SQL_LIMITED = f'{REVERSE_SQL_BASE} {LIMIT_CLAUSE}' - -FORWARD_SQL_BASE = ''' - UPDATE osf_pagecounter PC - SET - action = split_part(PC._id, ':', 1), - resource_id = Guid.id, - file_id = File.id, - version = NULLIF(split_part(PC._id, ':', 4), '')::int - FROM osf_guid Guid, osf_basefilenode File - WHERE - Guid._id = split_part(PC._id, ':', 2) AND - File._id = split_part(PC._id, ':', 3) AND - PC.id in ( - select PC.id from osf_pagecounter PC - INNER JOIN osf_guid Guid on Guid._id = split_part(PC._id, ':', 2) - INNER JOIN osf_basefilenode File on File._id = split_part(PC._id, ':', 3) - WHERE (PC.resource_id IS NULL OR PC.file_id IS NULL) -''' -FORWARD_SQL = f'{FORWARD_SQL_BASE} {NO_LIMIT_CLAUSE}' -FORWARD_SQL_LIMITED = f'{FORWARD_SQL_BASE} {LIMIT_CLAUSE}' - -COUNT_SQL = ''' -SELECT count(PC.id) - from osf_pagecounter as PC - INNER JOIN osf_guid Guid on Guid._id = split_part(PC._id, ':', 2) - INNER JOIN osf_basefilenode File on File._id = split_part(PC._id, ':', 3) -where (PC.resource_id IS NULL or PC.file_id IS NULL); -''' - -@celery_app.task(name='management.commands.migrate_pagecounter_data') -def migrate_page_counters(dry_run=False, rows=10000, reverse=False): - script_start_time = datetime.datetime.now() - logger.info(f'Script started time: {script_start_time}') - - sql_query = REVERSE_SQL_LIMITED if reverse else FORWARD_SQL_LIMITED - logger.info(f'SQL Query: {sql_query}') - - with connection.cursor() as cursor: - if not dry_run: - cursor.execute(sql_query, [rows]) - if not reverse: - cursor.execute(COUNT_SQL) - number_of_entries_left = cursor.fetchone()[0] - logger.info(f'Entries left: {number_of_entries_left}') - if number_of_entries_left == 0: - sentry.log_message('Migrate pagecounter data complete') - - script_finish_time = datetime.datetime.now() - logger.info(f'Script finished time: {script_finish_time}') - logger.info(f'Run time {script_finish_time - script_start_time}') - - -class Command(BaseCommand): - help = '''Does the work of the pagecounter migration so that it can be done incrementally when convenient. - You will either need to set the page_size large enough to get all of the records, or you will need to run the - script multiple times until it tells you that it is done.''' - - def add_arguments(self, parser): - parser.add_argument( - '--dry_run', - type=bool, - default=False, - help='Run queries but do not write files', - ) - parser.add_argument( - '--rows', - type=int, - default=10000, - help='How many rows to process during this run', - ) - parser.add_argument( - '--reverse', - type=bool, - default=False, - help='Reverse out the migration', - ) - - # Management command handler - def handle(self, *args, **options): - logger.debug(options) - - dry_run = options['dry_run'] - rows = options['rows'] - reverse = options['reverse'] - logger.debug( - 'Dry run: {}, rows: {}, reverse: {}'.format( - dry_run, - rows, - reverse, - ) - ) - if dry_run: - logger.info('DRY RUN') - - migrate_page_counters(dry_run, rows, reverse) diff --git a/osf/management/commands/migrate_preprint_affiliation.py b/osf/management/commands/migrate_preprint_affiliation.py deleted file mode 100644 index e34c6dc6b27..00000000000 --- a/osf/management/commands/migrate_preprint_affiliation.py +++ /dev/null @@ -1,118 +0,0 @@ -import datetime -import logging - -from django.core.management.base import BaseCommand -from django.db import transaction -from django.db.models import F, Exists, OuterRef - -from osf.models import PreprintContributor, InstitutionAffiliation - -logger = logging.getLogger(__name__) - -AFFILIATION_TARGET_DATE = datetime.datetime(2024, 9, 19, 14, 37, 48, tzinfo=datetime.timezone.utc) - - -class Command(BaseCommand): - """Assign affiliations from users to preprints where they have write or admin permissions, with optional exclusion by user GUIDs.""" - - help = 'Assign affiliations from users to preprints where they have write or admin permissions.' - - def add_arguments(self, parser): - parser.add_argument( - '--exclude-guids', - nargs='+', - dest='exclude_guids', - help='List of user GUIDs to exclude from affiliation assignment' - ) - parser.add_argument( - '--dry-run', - action='store_true', - dest='dry_run', - help='If true, performs a dry run without making changes' - ) - parser.add_argument( - '--batch-size', - type=int, - default=1000, - dest='batch_size', - help='Number of contributors to process in each batch' - ) - - def handle(self, *args, **options): - start_time = datetime.datetime.now() - logger.info(f'Script started at: {start_time}') - - exclude_guids = set(options.get('exclude_guids') or []) - dry_run = options.get('dry_run', False) - batch_size = options.get('batch_size', 1000) - - if dry_run: - logger.info('Dry run mode activated.') - - processed_count, updated_count = assign_affiliations_to_preprints( - exclude_guids=exclude_guids, - dry_run=dry_run, - batch_size=batch_size - ) - - finish_time = datetime.datetime.now() - logger.info(f'Script finished at: {finish_time}') - logger.info(f'Total processed: {processed_count}, Updated: {updated_count}') - logger.info(f'Total run time: {finish_time - start_time}') - - -def assign_affiliations_to_preprints(exclude_guids=None, dry_run=True, batch_size=1000): - exclude_guids = exclude_guids or set() - processed_count = updated_count = 0 - - # Subquery to check if the user has any affiliated institutions - user_has_affiliations = Exists( - InstitutionAffiliation.objects.filter( - user=OuterRef('user') - ) - ) - - contributors_qs = PreprintContributor.objects.filter( - preprint__preprintgroupobjectpermission__permission__codename__in=['write_preprint'], - preprint__preprintgroupobjectpermission__group__user=F('user'), - ).filter( - user_has_affiliations - ).select_related( - 'user', - 'preprint' - ).exclude( - user__guids___id__in=exclude_guids - ).order_by('pk') # Ensure consistent ordering for batching - - total_contributors = contributors_qs.count() - logger.info(f'Total contributors to process: {total_contributors}') - - # Process contributors in batches - with transaction.atomic(): - for offset in range(0, total_contributors, batch_size): - # Use select_for_update() to ensure query hits the primary database - batch_contributors = contributors_qs[offset:offset + batch_size].select_for_update() - - logger.info(f'Processing contributors {offset + 1} to {min(offset + batch_size, total_contributors)}') - - for contributor in batch_contributors: - user = contributor.user - preprint = contributor.preprint - - if preprint.created > AFFILIATION_TARGET_DATE: - continue - - user_institutions = user.get_affiliated_institutions() - processed_count += 1 - if not dry_run: - preprint.affiliated_institutions.add(*user_institutions) - updated_count += 1 - logger.info( - f'Assigned {len(user_institutions)} affiliations from user <{user._id}> to preprint <{preprint._id}>.' - ) - else: - logger.info( - f'Dry run: Would assign {len(user_institutions)} affiliations from user <{user._id}> to preprint <{preprint._id}>.' - ) - - return processed_count, updated_count diff --git a/osf/management/commands/migrate_registration_responses.py b/osf/management/commands/migrate_registration_responses.py deleted file mode 100644 index 009dee81c4d..00000000000 --- a/osf/management/commands/migrate_registration_responses.py +++ /dev/null @@ -1,173 +0,0 @@ -import datetime -import logging - -from django.core.management.base import BaseCommand -from django.apps import apps -from tqdm import tqdm - -from framework.celery_tasks import app as celery_app -from framework import sentry - -from osf.exceptions import SchemaBlockConversionError -from osf.utils.registrations import flatten_registration_metadata - -logger = logging.getLogger(__name__) - -# because Registrations and DraftRegistrations are different -def get_nested_responses(registration_or_draft, schema_id): - nested_responses = getattr( - registration_or_draft, - 'registration_metadata', - None, - ) - if nested_responses is None: - registered_meta = registration_or_draft.registered_meta or {} - nested_responses = registered_meta.get(schema_id, None) - return nested_responses - -# because Registrations and DraftRegistrations are different -def get_registration_schema(registration_or_draft): - schema = getattr(registration_or_draft, 'registration_schema', None) - if schema is None: - schema = registration_or_draft.registered_schema.first() - return schema - -def migrate_registrations(dry_run, rows='all', AbstractNodeModel=None): - """ - Loops through registrations whose registration_responses have not been migrated, - and pulls this information from the "registered_meta" and flattens it, with - keys being the "registration_response_key"s and values being the most deeply - nested user response in registered_meta - """ - if AbstractNodeModel is None: - AbstractNodeModel = apps.get_model('osf', 'abstractnode') - - registrations = AbstractNodeModel.objects.filter( - type='osf.registration', - ).exclude( - registration_responses_migrated=True, - ) - return migrate_responses(AbstractNodeModel, registrations, 'registrations', dry_run, rows) - -def migrate_draft_registrations(dry_run, rows='all', DraftRegistrationModel=None): - """ - Populates a subset of draft_registration.registration_responses, and corresponding - draft_registration.registration_responses_migrated. - :params dry_run - :params rows - """ - if DraftRegistrationModel is None: - DraftRegistrationModel = apps.get_model('osf', 'draftregistration') - - draft_registrations = DraftRegistrationModel.objects.exclude( - registration_responses_migrated=True - ) - return migrate_responses(DraftRegistrationModel, draft_registrations, 'draft registrations', dry_run, rows) - - -def migrate_responses(model, resources, resource_name, dry_run=False, rows='all'): - """ - DRY method to be used to migrate both DraftRegistration.registration_responses - and Registration.registration_responses. - """ - progress_bar = None - if rows == 'all': - logger.info(f'Migrating all {resource_name}.') - else: - resources = resources[:rows] - logger.info(f'Migrating up to {rows} {resource_name}.') - progress_bar = tqdm(total=rows) - - successes_to_save = [] - errors_to_save = [] - for resource in resources: - try: - schema = get_registration_schema(resource) - resource.registration_responses = flatten_registration_metadata( - schema, - get_nested_responses(resource, schema._id), - ) - resource.registration_responses_migrated = True - successes_to_save.append(resource) - except SchemaBlockConversionError as e: - resource.registration_responses_migrated = False - errors_to_save.append(resource) - logger.error(f'Unexpected/invalid nested data in resource: {resource} with error {e}') - if progress_bar: - progress_bar.update() - - if progress_bar: - progress_bar.close() - - success_count = len(successes_to_save) - error_count = len(errors_to_save) - total_count = success_count + error_count - - if total_count == 0: - logger.info(f'No {resource_name} left to migrate.') - return total_count - - logger.info(f'Successfully migrated {success_count} out of {total_count} {resource_name}.') - if error_count: - logger.warning(f'Encountered errors on {error_count} out of {total_count} {resource_name}.') - if not success_count: - sentry.log_message(f'`migrate_registration_responses` has only errors left ({error_count} errors)') - - if dry_run: - logger.info('DRY RUN; discarding changes.') - else: - logger.info('Saving changes...') - model.objects.bulk_update(successes_to_save, fields=['registration_responses', 'registration_responses_migrated']) - model.objects.bulk_update(errors_to_save, fields=['registration_responses_migrated']) - - return total_count - - -@celery_app.task(name='management.commands.migrate_registration_responses') -def migrate_registration_responses(dry_run=False, rows=5000): - script_start_time = datetime.datetime.now() - logger.info(f'Script started time: {script_start_time}') - - draft_count = migrate_draft_registrations(dry_run, rows) - registration_count = migrate_registrations(dry_run, rows) - - if draft_count == 0 and registration_count == 0: - logger.info('Migration complete! No more drafts or registrations need migrating.') - sentry.log_message('`migrate_registration_responses` command found nothing to migrate!') - - script_finish_time = datetime.datetime.now() - logger.info(f'Script finished time: {script_finish_time}') - logger.info(f'Run time {script_finish_time - script_start_time}') - - -class Command(BaseCommand): - help = """ Incrementally migrates DraftRegistration.registration_metadata - -> DraftRegistration.registration_responses, and Registration.registered_meta - -> Registration.registered_responses. registration_responses is a flattened version - of registration_metadata/registered_meta. - - This will need to be run multiple times to migrate all records on prod. - """ - - def add_arguments(self, parser): - parser.add_argument( - '--dry_run', - type=bool, - default=False, - help='Run queries but do not write files', - ) - parser.add_argument( - '--rows', - type=int, - default=5000, - help='How many rows to process during this run', - ) - - # Management command handler - def handle(self, *args, **options): - dry_run = options['dry_run'] - rows = options['rows'] - if dry_run: - logger.info('DRY RUN') - - migrate_registration_responses(dry_run, rows) diff --git a/osf/management/commands/migrate_user_institution_affiliation.py b/osf/management/commands/migrate_user_institution_affiliation.py deleted file mode 100644 index 79170c5ece4..00000000000 --- a/osf/management/commands/migrate_user_institution_affiliation.py +++ /dev/null @@ -1,84 +0,0 @@ -import datetime -import logging - -from django.core.management.base import BaseCommand - -from osf.models import Institution, InstitutionAffiliation - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - """Update emails of users from a given affiliated institution (when eligible). - """ - - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='If true, iterate through eligible users and institutions only' - ) - - def handle(self, *args, **options): - script_start_time = datetime.datetime.now() - logger.info(f'Script started time: {script_start_time}') - - dry_run = options.get('dry_run', False) - if dry_run: - logger.warning('Dry Run: This is a dry-run pass!') - migrate_user_institution_affiliation(dry_run=dry_run) - - script_finish_time = datetime.datetime.now() - logger.info(f'Script finished time: {script_finish_time}') - logger.info(f'Run time {script_finish_time - script_start_time}') - - -def migrate_user_institution_affiliation(dry_run=True): - - institutions = Institution.objects.get_all_institutions() - institution_total = institutions.count() - - institution_count = 0 - user_count = 0 - skipped_user_count = 0 - - for institution in institutions: - institution_count += 1 - user_count_per_institution = 0 - skipped_user_count_per_institution = 0 - users = institution.osfuser_set.all() - user_total_per_institution = users.count() - sso_identity = None - if not institution.delegation_protocol: - sso_identity = InstitutionAffiliation.DEFAULT_VALUE_FOR_SSO_IDENTITY_NOT_AVAILABLE - logger.info(f'Migrating affiliation for <{institution.name}> [{institution_count}/{institution_total}]') - for user in institution.osfuser_set.all(): - user_count_per_institution += 1 - user_count += 1 - logger.info(f'\tMigrating affiliation for <{user._id}::{institution.name}> ' - f'[{user_count_per_institution}/{user_total_per_institution}]') - if not dry_run: - affiliation = user.add_or_update_affiliated_institution( - institution, - sso_identity=sso_identity, - sso_department=user.department - ) - if affiliation: - logger.info(f'\tAffiliation=<{affiliation}> migrated or updated ' - f'for user=<{user._id}> @ institution=<{institution._id}>') - else: - skipped_user_count_per_institution += 1 - skipped_user_count += 1 - logger.info(f'\tSkip migration or update since affiliation exists ' - f'for user=<{user._id}> @ institution=<{institution._id}>') - else: - logger.warning(f'\tDry Run: Affiliation not migrated for {user._id} @ {institution._id}!') - if user_count_per_institution == 0: - logger.warning('No eligible user found') - else: - logger.info(f'Finished migrating affiliation for {user_count_per_institution} users ' - f'@ <{institution.name}>, including {skipped_user_count_per_institution} skipped users') - logger.info(f'Finished migrating affiliation for {user_count} users @ {institution_count} institutions, ' - f'including {skipped_user_count} skipped users') diff --git a/osf/management/commands/move_egap_regs_to_provider.py b/osf/management/commands/move_egap_regs_to_provider.py deleted file mode 100644 index 1dcaa7a6b77..00000000000 --- a/osf/management/commands/move_egap_regs_to_provider.py +++ /dev/null @@ -1,44 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand - -from scripts import utils as script_utils - -logger = logging.getLogger(__name__) - -from osf.models import ( - RegistrationProvider, - RegistrationSchema, - Registration -) - - -def main(dry_run): - egap_provider = RegistrationProvider.objects.get(_id='egap') - egap_schemas = RegistrationSchema.objects.filter(name='EGAP Registration').order_by('-schema_version') - - for egap_schema in egap_schemas: - egap_regs = Registration.objects.filter(registered_schema=egap_schema.id, provider___id='osf') - - if dry_run: - logger.info(f'[DRY RUN] {egap_regs.count()} updated to {egap_provider} with id {egap_provider.id}') - else: - egap_regs.update(provider_id=egap_provider.id) - - -class Command(BaseCommand): - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Dry run', - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - if not dry_run: - script_utils.add_file_logger(logger, __file__) - - main(dry_run=dry_run) diff --git a/osf/management/commands/populate_branched_from_node.py b/osf/management/commands/populate_branched_from_node.py deleted file mode 100644 index 086f7e4dbef..00000000000 --- a/osf/management/commands/populate_branched_from_node.py +++ /dev/null @@ -1,67 +0,0 @@ -import logging -import datetime - -from django.core.management.base import BaseCommand -from framework.celery_tasks import app as celery_app -from django.db import connection, transaction - -logger = logging.getLogger(__name__) - -POPULATE_BRANCHED_FROM_NODE = """WITH cte AS ( - SELECT id - FROM osf_abstractnode - WHERE type = 'osf.registration' AND - branched_from_node IS null - LIMIT %s -) -UPDATE osf_abstractnode a - SET branched_from_node = CASE WHEN - EXISTS(SELECT id FROM osf_nodelog WHERE action='project_created_from_draft_reg' AND node_id = a.id) THEN False - ELSE True -END -FROM cte -WHERE cte.id = a.id -""" - -@celery_app.task(name='management.commands.populate_branched_from') -def populate_branched_from(page_size=10000, dry_run=False): - with transaction.atomic(): - with connection.cursor() as cursor: - cursor.execute(POPULATE_BRANCHED_FROM_NODE, [page_size]) - if dry_run: - raise RuntimeError('Dry Run -- Transaction rolled back') - -class Command(BaseCommand): - help = '''Populates new deleted field for various models. Ensure you have run migrations - before running this script.''' - - def add_arguments(self, parser): - parser.add_argument( - '--dry_run', - type=bool, - default=False, - help='Run queries but do not write files', - ) - parser.add_argument( - '--page_size', - type=int, - default=10000, - help='How many rows to process at a time', - ) - - def handle(self, *args, **options): - script_start_time = datetime.datetime.now() - logger.info(f'Script started time: {script_start_time}') - logger.debug(options) - - dry_run = options['dry_run'] - page_size = options['page_size'] - - if dry_run: - logger.info('DRY RUN') - - populate_branched_from(page_size, dry_run) - - script_finish_time = datetime.datetime.now() - logger.info(f'Script finished time: {script_finish_time}') - logger.info(f'Run time {script_finish_time - script_start_time}') diff --git a/osf/management/commands/populate_initial_schema_responses.py b/osf/management/commands/populate_initial_schema_responses.py deleted file mode 100644 index 26ba3da7710..00000000000 --- a/osf/management/commands/populate_initial_schema_responses.py +++ /dev/null @@ -1,100 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from django.db import transaction -from django.db.models import Exists, F, OuterRef -from framework.celery_tasks import app as celery_app - -from osf.exceptions import PreviousSchemaResponseError, SchemaResponseUpdateError -from osf.models import Registration, SchemaResponse -from osf.utils.workflows import ApprovalStates, RegistrationModerationStates as RegStates - -logger = logging.getLogger(__name__) - -# Initial response pending amin approval or rejected while awaiting it -UNAPPROVED_STATES = [RegStates.INITIAL.db_name, RegStates.REVERTED.db_name] -# Initial response pending moderator approval or rejected while awaiting it -PENDING_MODERATION_STATES = [RegStates.PENDING.db_name, RegStates.REJECTED.db_name] - - -def _update_schema_response_state(schema_response): - '''Set the schema_response's state based on the current state of the parent rgistration.''' - moderation_state = schema_response.parent.moderation_state - if moderation_state in UNAPPROVED_STATES: - schema_response.state = ApprovalStates.UNAPPROVED - elif moderation_state in PENDING_MODERATION_STATES: - schema_response.state = ApprovalStates.PENDING_MODERATION - else: # All remainint states imply initial responses were approved by users at some point - schema_response.state = ApprovalStates.APPROVED - schema_response.save() - - -@celery_app.task(name='management.commands.populate_initial_schema_responses') -@transaction.atomic -def populate_initial_schema_responses(dry_run=False, batch_size=None): - '''Migrate registration_responses into a SchemaResponse for historical registrations.''' - # Find all root registrations that do not yet have SchemaResponses - qs = Registration.objects.prefetch_related('root').annotate( - has_schema_response=Exists(SchemaResponse.objects.filter(nodes__id=OuterRef('id'))) - ).filter( - has_schema_response=False, root=F('id') - ) - if batch_size: - qs = qs[:batch_size] - - count = 0 - for registration in qs: - logger.info( - f'{"[DRY RUN] " if dry_run else ""}' - f'Creating initial SchemaResponse for Registration with guid {registration._id}' - ) - try: - registration.copy_registration_responses_into_schema_response() - except SchemaResponseUpdateError as e: - logger.info( - f'Ignoring unsupported values "registration_responses" for registration ' - f'with guid [{registration._id}]: {str(e)}' - ) - except (ValueError, PreviousSchemaResponseError): - logger.exception( - f'{"[DRY RUN] " if dry_run else ""}' - f'Failure creating SchemaResponse for Registration with guid {registration._id}' - ) - # These errors should have prevented SchemaResponse creation, but better safe than sorry - registration.schema_responses.all().delete() - continue - - _update_schema_response_state(registration.schema_responses.last()) - count += 1 - - logger.info( - f'{"[DRY RUN] " if dry_run else ""}' - f'Created initial SchemaResponses for {count} registrations' - ) - - if dry_run: - raise RuntimeError('Dry run, transaction rolled back') - - return count - - -class Command(BaseCommand): - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Dry run', - ) - - parser.add_argument( - '--batch_size', - type=int, - default=0 - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run') - batch_size = options.get('batch_size') - populate_initial_schema_responses(dry_run=dry_run, batch_size=batch_size) diff --git a/osf/management/commands/populate_notification_types.py b/osf/management/commands/populate_notification_types.py new file mode 100644 index 00000000000..8f20531f06a --- /dev/null +++ b/osf/management/commands/populate_notification_types.py @@ -0,0 +1,72 @@ +import yaml +from django.apps import apps +from website import settings + +import logging +from django.contrib.contenttypes.models import ContentType +from osf.models import NotificationType +from django.core.management.base import BaseCommand +from django.db import transaction + +logger = logging.getLogger(__name__) + +FREQ_MAP = { + 'none': 'none', + 'email_digest': 'weekly', + 'email_transactional': 'instantly', +} + +def populate_notification_types(*args, **kwargs): + + with open(settings.NOTIFICATION_TYPES_YAML) as stream: + notification_types = yaml.safe_load(stream) + for notification_type in notification_types['notification_types']: + notification_type.pop('__docs__') + object_content_type_model_name = notification_type.pop('object_content_type_model_name') + notification_freq = notification_type.pop('notification_freq_default') + + if object_content_type_model_name == 'desk': + content_type = None + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + elif object_content_type_model_name == 'preprint': + Preprint = apps.get_model('osf', 'Preprint') + content_type = ContentType.objects.get_for_model(Preprint) + elif object_content_type_model_name == 'collectionsubmission': + CollectionSubmission = apps.get_model('osf', 'CollectionSubmission') + content_type = ContentType.objects.get_for_model(CollectionSubmission) + elif object_content_type_model_name == 'abstractprovider': + AbstractProvider = apps.get_model('osf', 'abstractprovider') + content_type = ContentType.objects.get_for_model(AbstractProvider) + elif object_content_type_model_name == 'osfuser': + OSFUser = apps.get_model('osf', 'OSFUser') + content_type = ContentType.objects.get_for_model(OSFUser) + else: + try: + content_type = ContentType.objects.get( + app_label='osf', + model=object_content_type_model_name + ) + except ContentType.DoesNotExist: + raise ValueError(f'No content type for osf.{object_content_type_model_name}') + + with open(notification_type['template']) as stream: + template = stream.read() + + notification_types['template'] = template + notification_types['notification_freq'] = notification_freq + nt, _ = NotificationType.objects.update_or_create( + name=notification_type['name'], + defaults=notification_type, + ) + nt.object_content_type = content_type + nt.save() + + +class Command(BaseCommand): + help = 'Population notification types.' + + def handle(self, *args, **options): + with transaction.atomic(): + populate_notification_types(args, options) diff --git a/osf_tests/management_commands/test_migrate_notifications.py b/osf_tests/management_commands/test_migrate_notifications.py index f303ec3f996..35837f7cc7c 100644 --- a/osf_tests/management_commands/test_migrate_notifications.py +++ b/osf_tests/management_commands/test_migrate_notifications.py @@ -14,7 +14,7 @@ ) from osf.management.commands.migrate_notifications import ( migrate_legacy_notification_subscriptions, - update_notification_types + populate_notification_types ) @pytest.mark.django_db @@ -22,7 +22,7 @@ class TestNotificationSubscriptionMigration: @pytest.fixture(autouse=True) def notification_types(self): - return update_notification_types() + return populate_notification_types() @pytest.fixture() def user(self): diff --git a/osf_tests/management_commands/test_move_egap_regs_to_provider.py b/osf_tests/management_commands/test_move_egap_regs_to_provider.py deleted file mode 100644 index 4e1ac7291aa..00000000000 --- a/osf_tests/management_commands/test_move_egap_regs_to_provider.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest - -from osf_tests.factories import ( - RegistrationFactory, - RegistrationProviderFactory -) - -from osf.models import ( - RegistrationSchema, - RegistrationProvider -) - -from osf.management.commands.move_egap_regs_to_provider import ( - main as move_egap_regs -) - - -@pytest.mark.django_db -class TestEGAPMoveToProvider: - - @pytest.fixture() - def egap_provider(self): - return RegistrationProviderFactory(_id='egap') - - @pytest.fixture() - def non_egap_provider(self): - return RegistrationProvider.get_default() - - @pytest.fixture() - def egap_reg(self): - egap_schema = RegistrationSchema.objects.filter( - name='EGAP Registration' - ).order_by( - '-schema_version' - )[0] - cos = RegistrationProvider.get_default() - return RegistrationFactory(schema=egap_schema, provider=cos) - - @pytest.fixture() - def egap_non_reg(self, non_egap_provider): - return RegistrationFactory(provider=non_egap_provider) - - def test_move_to_provider(self, egap_provider, egap_reg, non_egap_provider, egap_non_reg): - assert egap_reg.provider != egap_provider - assert egap_non_reg.provider != egap_provider - - move_egap_regs(dry_run=False) - - egap_reg.refresh_from_db() - assert egap_reg.provider == egap_provider - assert egap_non_reg.provider != egap_provider diff --git a/osf_tests/management_commands/test_populate_initial_schema_responses.py b/osf_tests/management_commands/test_populate_initial_schema_responses.py deleted file mode 100644 index 18949c09b33..00000000000 --- a/osf_tests/management_commands/test_populate_initial_schema_responses.py +++ /dev/null @@ -1,130 +0,0 @@ -import pytest - -from osf.management.commands.populate_initial_schema_responses import populate_initial_schema_responses -from osf.models import SchemaResponse, SchemaResponseBlock -from osf.utils.workflows import ApprovalStates, RegistrationModerationStates as RegStates -from osf_tests.factories import ProjectFactory, RegistrationFactory -from osf_tests.utils import get_default_test_schema - -DEFAULT_RESPONSES = { - 'q1': 'An answer', 'q2': 'Another answer', 'q3': 'A', 'q4': ['E'], 'q5': '', 'q6': [], -} - -@pytest.fixture -def control_registration(): - return RegistrationFactory() - - -@pytest.fixture -def test_registration(): - registration = RegistrationFactory(schema=get_default_test_schema()) - registration.schema_responses.clear() - registration.registration_responses = dict(DEFAULT_RESPONSES) - registration.save() - return registration - - -@pytest.fixture -def nested_registration(test_registration): - registration = RegistrationFactory( - project=ProjectFactory(parent=test_registration.registered_from), - parent=test_registration - ) - registration.schema_responses.clear() - return registration - - -@pytest.mark.django_db -class TestPopulateInitialSchemaResponses: - - def test_schema_response_created(self, test_registration): - assert not test_registration.schema_responses.exists() - - count = populate_initial_schema_responses() - assert count == 1 - - assert test_registration.schema_responses.count() == 1 - - schema_response = test_registration.schema_responses.get() - assert schema_response.schema == test_registration.registration_schema - assert schema_response.all_responses == test_registration.registration_responses - - @pytest.mark.parametrize( - 'registration_state, schema_response_state', - [ - (RegStates.INITIAL, ApprovalStates.UNAPPROVED), - (RegStates.PENDING, ApprovalStates.PENDING_MODERATION), - (RegStates.ACCEPTED, ApprovalStates.APPROVED), - (RegStates.EMBARGO, ApprovalStates.APPROVED), - (RegStates.PENDING_EMBARGO_TERMINATION, ApprovalStates.APPROVED), - (RegStates.PENDING_WITHDRAW_REQUEST, ApprovalStates.APPROVED), - (RegStates.PENDING_WITHDRAW, ApprovalStates.APPROVED), - (RegStates.WITHDRAWN, ApprovalStates.APPROVED), - (RegStates.REVERTED, ApprovalStates.UNAPPROVED), - (RegStates.REJECTED, ApprovalStates.PENDING_MODERATION), - ] - ) - def test_schema_response_state( - self, test_registration, registration_state, schema_response_state): - test_registration.moderation_state = registration_state.db_name - test_registration.save() - - populate_initial_schema_responses() - - schema_response = test_registration.schema_responses.get() - assert schema_response.state == schema_response_state - - def test_errors_from_invalid_keys_are_ignored(self, test_registration): - test_registration.registration_responses.update({'invalid_key': 'lolol'}) - test_registration.save() - - populate_initial_schema_responses() - - schema_response = test_registration.schema_responses.get() - assert schema_response.all_responses == DEFAULT_RESPONSES - - def test_populate_responses_is_atomic_per_registration(self, test_registration): - invalid_registration = RegistrationFactory() - invalid_registration.schema_responses.clear() - invalid_registration.registered_schema.clear() - - count = populate_initial_schema_responses() - assert count == 1 - - assert test_registration.schema_responses.exists() - assert not invalid_registration.schema_responses.exists() - - def test_dry_run(self, test_registration): - # donfirm that the delete works even if the schema_response isn't IN_PROGRESS - test_registration.moderation_state = RegStates.ACCEPTED.db_name - test_registration.save() - with pytest.raises(RuntimeError): - populate_initial_schema_responses(dry_run=True) - - assert not test_registration.schema_responses.exists() - assert not SchemaResponse.objects.exists() - assert not SchemaResponseBlock.objects.exists() - - def test_batch_size(self): - for _ in range(5): - r = RegistrationFactory() - r.schema_responses.clear() - assert not SchemaResponse.objects.exists() - - count = populate_initial_schema_responses(batch_size=3) - assert count == 3 - - assert SchemaResponse.objects.count() == 3 - - def test_schema_response_not_created_for_registration_with_response(self, control_registration): - control_registration_response = control_registration.schema_responses.get() - - count = populate_initial_schema_responses() - assert count == 0 - - assert control_registration.schema_responses.get() == control_registration_response - - def test_schema_response_not_created_for_nested_registration(self, nested_registration): - count = populate_initial_schema_responses() - assert count == 1 # parent registration - assert not nested_registration.schema_responses.exists() diff --git a/osf_tests/test_registration_moderation_notifications.py b/osf_tests/test_registration_moderation_notifications.py deleted file mode 100644 index 100c15e64e1..00000000000 --- a/osf_tests/test_registration_moderation_notifications.py +++ /dev/null @@ -1,457 +0,0 @@ -import pytest -from unittest import mock -from unittest.mock import call - -from django.utils import timezone -from osf.management.commands.add_notification_subscription import add_reviews_notification_setting -from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions - -from osf.migrations import update_provider_auth_groups -from osf.models import Brand, NotificationDigest -from osf.models.action import RegistrationAction -from osf.utils.notifications import ( - notify_submit, - notify_accept_reject, - notify_moderator_registration_requests_withdrawal, - notify_reject_withdraw_request, - notify_withdraw_registration -) -from osf.utils.workflows import RegistrationModerationTriggers, RegistrationModerationStates - -from osf_tests.factories import ( - RegistrationFactory, - AuthUserFactory, - RetractionFactory -) - -from website import settings -from website.notifications import emails, tasks - - -def get_moderator(provider): - user = AuthUserFactory() - provider.add_to_group(user, 'moderator') - return user - - -def get_daily_moderator(provider): - user = AuthUserFactory() - provider.add_to_group(user, 'moderator') - for subscription_type in provider.DEFAULT_SUBSCRIPTIONS: - subscription = provider.notification_subscriptions.get(event_name=subscription_type) - subscription.add_user_to_subscription(user, 'email_digest') - return user - - -# Set USE_EMAIL to true and mock out the default mailer for consistency with other mocked settings -@pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') -class TestRegistrationMachineNotification: - - MOCK_NOW = timezone.now() - - @pytest.fixture(autouse=True) - def setup(self): - populate_registration_provider_notification_subscriptions() - with mock.patch('osf.utils.machines.timezone.now', return_value=self.MOCK_NOW): - yield - - @pytest.fixture() - def contrib(self): - return AuthUserFactory() - - @pytest.fixture() - def admin(self): - return AuthUserFactory() - - @pytest.fixture() - def registration(self, admin, contrib): - registration = RegistrationFactory(creator=admin) - registration.add_contributor(admin, 'admin') - registration.add_contributor(contrib, 'write') - update_provider_auth_groups() - return registration - - @pytest.fixture() - def registration_with_retraction(self, admin, contrib): - sanction = RetractionFactory(user=admin) - registration = sanction.target_registration - registration.update_moderation_state() - registration.add_contributor(admin, 'admin') - registration.add_contributor(contrib, 'write') - registration.save() - return registration - - @pytest.fixture() - def provider(self, registration): - return registration.provider - - @pytest.fixture() - def moderator(self, provider): - user = AuthUserFactory() - provider.add_to_group(user, 'moderator') - return user - - @pytest.fixture() - def daily_moderator(self, provider): - user = AuthUserFactory() - provider.add_to_group(user, 'moderator') - for subscription_type in provider.DEFAULT_SUBSCRIPTIONS: - subscription = provider.notification_subscriptions.get(event_name=subscription_type) - subscription.add_user_to_subscription(user, 'email_digest') - return user - - @pytest.fixture() - def accept_action(self, registration, admin): - registration_action = RegistrationAction.objects.create( - creator=admin, - target=registration, - trigger=RegistrationModerationTriggers.ACCEPT_SUBMISSION.db_name, - from_state=RegistrationModerationStates.INITIAL.db_name, - to_state=RegistrationModerationStates.ACCEPTED.db_name, - comment='yo' - ) - return registration_action - - @pytest.fixture() - def withdraw_request_action(self, registration, admin): - registration_action = RegistrationAction.objects.create( - creator=admin, - target=registration, - trigger=RegistrationModerationTriggers.REQUEST_WITHDRAWAL.db_name, - from_state=RegistrationModerationStates.ACCEPTED.db_name, - to_state=RegistrationModerationStates.PENDING_WITHDRAW.db_name, - comment='yo' - ) - return registration_action - - @pytest.fixture() - def withdraw_action(self, registration, admin): - registration_action = RegistrationAction.objects.create( - creator=admin, - target=registration, - trigger=RegistrationModerationTriggers.ACCEPT_WITHDRAWAL.db_name, - from_state=RegistrationModerationStates.PENDING_WITHDRAW.db_name, - to_state=RegistrationModerationStates.WITHDRAWN.db_name, - comment='yo' - ) - return registration_action - - def test_submit_notifications(self, registration, moderator, admin, contrib, provider, mock_send_grid): - """ - [REQS-96] "As moderator of branded registry, I receive email notification upon admin author(s) submission approval" - :param mock_email: - :param draft_registration: - :return: - """ - # Set up mock_send_mail as a pass-through to the original function. - # This lets us assert on the call/args and also implicitly ensures - # that the email acutally renders as normal in send_mail. - notify_submit(registration, admin) - - assert len(mock_send_grid.call_args_list) == 2 - admin_message, contrib_message = mock_send_grid.call_args_list - - assert admin_message[1]['to_addr'] == admin.email - assert contrib_message[1]['to_addr'] == contrib.email - assert admin_message[1]['subject'] == 'Confirmation of your submission to OSF Registries' - assert contrib_message[1]['subject'] == 'Confirmation of your submission to OSF Registries' - - assert NotificationDigest.objects.count() == 1 - digest = NotificationDigest.objects.last() - - assert digest.user == moderator - assert digest.send_type == 'email_transactional' - assert digest.event == 'new_pending_submissions' - - def test_accept_notifications(self, registration, moderator, admin, contrib, accept_action): - """ - [REQS-98] "As registration authors, we receive email notification upon moderator acceptance" - :param draft_registration: - :return: - """ - add_reviews_notification_setting('global_reviews') - - # Set up mock_email as a pass-through to the original function. - # This lets us assert on the call count/args and also implicitly - # ensures that the email acutally renders correctly. - store_emails = emails.store_emails - with mock.patch.object(emails, 'store_emails', side_effect=store_emails) as mock_email: - notify_accept_reject(registration, registration.creator, accept_action, RegistrationModerationStates) - - assert len(mock_email.call_args_list) == 2 - - admin_message, contrib_message = mock_email.call_args_list - - assert admin_message == call( - [admin._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=True, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - requester=admin, - reviewable=registration, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) - - assert contrib_message == call( - [contrib._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=False, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - reviewable=registration, - requester=admin, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) - - def test_reject_notifications(self, registration, moderator, admin, contrib, accept_action): - """ - [REQS-100] "As authors of rejected by moderator registration, we receive email notification of registration returned - to draft state" - :param draft_registration: - :return: - """ - add_reviews_notification_setting('global_reviews') - - # Set up mock_email as a pass-through to the original function. - # This lets us assert on the call count/args and also implicitly - # ensures that the email acutally renders correctly - store_emails = emails.store_emails - with mock.patch.object(emails, 'store_emails', side_effect=store_emails) as mock_email: - notify_accept_reject(registration, registration.creator, accept_action, RegistrationModerationStates) - - assert len(mock_email.call_args_list) == 2 - - admin_message, contrib_message = mock_email.call_args_list - - assert admin_message == call( - [admin._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=True, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - reviewable=registration, - requester=admin, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) - - assert contrib_message == call( - [contrib._id], - 'email_transactional', - 'global_reviews', - admin, - registration, - self.MOCK_NOW, - comment='yo', - document_type='registration', - domain='http://localhost:5000/', - draft_registration=registration.draft_registration.get(), - has_psyarxiv_chronos_text=False, - is_creator=False, - is_rejected=False, - notify_comment='yo', - provider_contact_email=settings.OSF_CONTACT_EMAIL, - provider_support_email=settings.OSF_SUPPORT_EMAIL, - provider_url='http://localhost:5000/', - reviewable=registration, - requester=admin, - template='reviews_submission_status', - was_pending=False, - workflow=None - ) - - def test_notify_moderator_registration_requests_withdrawal_notifications(self, moderator, daily_moderator, registration, admin, provider): - """ - [REQS-106] "As moderator, I receive registration withdrawal request notification email" - - :param mock_email: - :param draft_registration: - :param contrib: - :return: - """ - assert NotificationDigest.objects.count() == 0 - notify_moderator_registration_requests_withdrawal(registration, admin) - - assert NotificationDigest.objects.count() == 2 - - daily_digest = NotificationDigest.objects.get(send_type='email_digest') - transactional_digest = NotificationDigest.objects.get(send_type='email_transactional') - assert daily_digest.user == daily_moderator - assert transactional_digest.user == moderator - - for digest in (daily_digest, transactional_digest): - assert 'requested withdrawal' in digest.message - assert digest.event == 'new_pending_withdraw_requests' - assert digest.provider == provider - - def test_withdrawal_registration_accepted_notifications(self, registration_with_retraction, contrib, admin, withdraw_action, mock_send_grid): - """ - [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator - decision" - - :param mock_email: - :param draft_registration: - :param contrib: - :return: - """ - # Set up mock_send_mail as a pass-through to the original function. - # This lets us assert on the call count/args and also implicitly - # ensures that the email acutally renders as normal in send_mail. - notify_withdraw_registration(registration_with_retraction, withdraw_action) - - assert len(mock_send_grid.call_args_list) == 2 - admin_message, contrib_message = mock_send_grid.call_args_list - - assert admin_message[1]['to_addr'] == admin.email - assert contrib_message[1]['to_addr'] == contrib.email - assert admin_message[1]['subject'] == 'Your registration has been withdrawn' - assert contrib_message[1]['subject'] == 'Your registration has been withdrawn' - - def test_withdrawal_registration_rejected_notifications(self, registration, contrib, admin, withdraw_request_action, mock_send_grid): - """ - [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator - decision" - - :param mock_email: - :param draft_registration: - :param contrib: - :return: - """ - # Set up mock_send_mail as a pass-through to the original function. - # This lets us assert on the call count/args and also implicitly - # ensures that the email acutally renders as normal in send_mail. - notify_reject_withdraw_request(registration, withdraw_request_action) - - assert len(mock_send_grid.call_args_list) == 2 - admin_message, contrib_message = mock_send_grid.call_args_list - - assert admin_message[1]['to_addr'] == admin.email - assert contrib_message[1]['to_addr'] == contrib.email - assert admin_message[1]['subject'] == 'Your withdrawal request has been declined' - assert contrib_message[1]['subject'] == 'Your withdrawal request has been declined' - - def test_withdrawal_registration_force_notifications(self, registration_with_retraction, contrib, admin, withdraw_action, mock_send_grid): - """ - [REQS-109] "As registration author(s) requesting registration withdrawal, we receive notification email of moderator - decision" - - :param mock_email: - :param draft_registration: - :param contrib: - :return: - """ - # Set up mock_send_mail as a pass-through to the original function. - # This lets us assert on the call count/args and also implicitly - # ensures that the email acutally renders as normal in send_mail. - notify_withdraw_registration(registration_with_retraction, withdraw_action) - - assert len(mock_send_grid.call_args_list) == 2 - admin_message, contrib_message = mock_send_grid.call_args_list - - assert admin_message[1]['to_addr'] == admin.email - assert contrib_message[1]['to_addr'] == contrib.email - assert admin_message[1]['subject'] == 'Your registration has been withdrawn' - assert contrib_message[1]['subject'] == 'Your registration has been withdrawn' - - @pytest.mark.parametrize( - 'digest_type, expected_recipient', - [('email_transactional', get_moderator), ('email_digest', get_daily_moderator)] - ) - def test_submissions_and_withdrawals_both_appear_in_moderator_digest(self, digest_type, expected_recipient, registration, admin, provider, mock_send_grid): - # Invoke the fixture function to get the recipient because parametrize - expected_recipient = expected_recipient(provider) - - notify_submit(registration, admin) - notify_moderator_registration_requests_withdrawal(registration, admin) - - # One user, one provider => one email - grouped_notifications = list(tasks.get_moderators_emails(digest_type)) - assert len(grouped_notifications) == 1 - - moderator_message = grouped_notifications[0] - assert moderator_message['user_id'] == expected_recipient._id - assert moderator_message['provider_id'] == provider.id - - # No fixed ordering of the entires, so just make sure that - # keywords for each action type are in some message - updates = moderator_message['info'] - assert len(updates) == 2 - assert any('submitted' in entry['message'] for entry in updates) - assert any('requested withdrawal' in entry['message'] for entry in updates) - - @pytest.mark.parametrize('digest_type', ['email_transactional', 'email_digest']) - def test_submsissions_and_withdrawals_do_not_appear_in_node_digest(self, digest_type, registration, admin, moderator, daily_moderator): - notify_submit(registration, admin) - notify_moderator_registration_requests_withdrawal(registration, admin) - - assert not list(tasks.get_users_emails(digest_type)) - - def test_moderator_digest_emails_render(self, registration, admin, moderator, mock_send_grid): - notify_moderator_registration_requests_withdrawal(registration, admin) - # Set up mock_send_mail as a pass-through to the original function. - # This lets us assert on the call count/args and also implicitly - # ensures that the email acutally renders as normal in send_mail. - tasks._send_reviews_moderator_emails('email_transactional') - - mock_send_grid.assert_called() - - def test_branded_provider_notification_renders(self, registration, admin, moderator): - # Set brand details to be checked in notify_base.mako - provider = registration.provider - provider.brand = Brand.objects.create(hero_logo_image='not-a-url', primary_color='#FFA500') - provider.name = 'Test Provider' - provider.save() - - # Implicitly check that all of our uses of notify_base.mako render with branded details: - # - # notify_submit renders reviews_submission_confirmation using context from - # osf.utils.notifications and stores emails to be picked up in the moderator digest - # - # _send_Reviews_moderator_emails renders digest_reviews_moderators using context from - # website.notifications.tasks - notify_submit(registration, admin) - tasks._send_reviews_moderator_emails('email_transactional') - assert True # everything rendered! diff --git a/osf_tests/test_s3_folder_migration.py b/osf_tests/test_s3_folder_migration.py deleted file mode 100644 index 067e63c34a3..00000000000 --- a/osf_tests/test_s3_folder_migration.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest -from osf.management.commands.add_colon_delim_to_s3_buckets import update_folder_names, reverse_update_folder_names - -@pytest.mark.django_db -class TestUpdateFolderNamesMigration: - - def test_update_folder_names_migration(self): - from addons.s3.models import NodeSettings - from addons.s3.tests.factories import S3NodeSettingsFactory - # Create sample folder names and IDs - S3NodeSettingsFactory(folder_name='Folder 1 (Location 1)', folder_id='folder1') - S3NodeSettingsFactory(folder_name='Folder 2', folder_id='folder2') - S3NodeSettingsFactory(folder_name='Folder 3 (Location 3)', folder_id='folder3') - S3NodeSettingsFactory(folder_name='Folder 4:/ (Location 4)', folder_id='folder4:/') - - update_folder_names() - - # Verify updated folder names and IDs - updated_folder_names_ids = NodeSettings.objects.values_list('folder_name', 'folder_id') - expected_updated_folder_names_ids = { - ('Folder 1:/ (Location 1)', 'folder1:/'), - ('Folder 2:/', 'folder2:/'), - ('Folder 3:/ (Location 3)', 'folder3:/'), - ('Folder 3:/ (Location 3)', 'folder3:/'), - ('Folder 4:/ (Location 4)', 'folder4:/'), - - } - assert set(updated_folder_names_ids) == expected_updated_folder_names_ids - - # Reverse the migration - reverse_update_folder_names() - - # Verify the folder names and IDs after the reverse migration - reverted_folder_names_ids = NodeSettings.objects.values_list('folder_name', 'folder_id') - expected_reverted_folder_names_ids = { - ('Folder 1 (Location 1)', 'folder1'), - ('Folder 2', 'folder2'), - ('Folder 3 (Location 3)', 'folder3'), - ('Folder 4 (Location 4)', 'folder4'), - } - assert set(reverted_folder_names_ids) == expected_reverted_folder_names_ids diff --git a/website/settings/defaults.py b/website/settings/defaults.py index a9ee3085a9c..f5c9b500272 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -703,10 +703,6 @@ class CeleryConfig: # 'task': 'management.commands.addon_deleted_date', # 'schedule': crontab(minute=0, hour=3), # Daily 11:00 p.m. # }, - # 'populate_branched_from': { - # 'task': 'management.commands.populate_branched_from', - # 'schedule': crontab(minute=0, hour=3), - # }, 'generate_sitemap': { 'task': 'scripts.generate_sitemap', 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. From 37b419a44e7d379c38dbf8ade4999231cd8f0893 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 10 Jul 2025 15:00:13 -0400 Subject: [PATCH 061/176] fix issues with migrate schema response task deletion --- api_tests/users/views/test_user_settings.py | 125 ------------- .../test_user_settings_reset_password.py | 131 +++++++++++++ .../commands/migrate_notifications.py | 2 +- .../migrate_registration_responses.py | 173 ++++++++++++++++++ .../test_migrate_preprint_affiliations.py | 151 --------------- website/settings/defaults.py | 4 - 6 files changed, 305 insertions(+), 281 deletions(-) create mode 100644 api_tests/users/views/test_user_settings_reset_password.py create mode 100644 osf/management/commands/migrate_registration_responses.py delete mode 100644 osf_tests/management_commands/test_migrate_preprint_affiliations.py diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index ec60c1f4c3d..82b22f42739 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -1,15 +1,12 @@ from unittest import mock import pytest -import urllib from api.base.settings.defaults import API_BASE -from api.base.settings import CSRF_COOKIE_NAME from api.base.utils import hashids from osf_tests.factories import ( AuthUserFactory, UserFactory, ) -from django.middleware import csrf from osf.models import Email, NotableDomain from framework.auth.views import auth_email_logout @@ -167,128 +164,6 @@ def test_multiple_errors(self, app, user_one, url, payload): assert res.json['errors'][1]['detail'] == 'Password should be at least eight characters' -@pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') -class TestResetPassword: - - @pytest.fixture() - def user_one(self): - user = UserFactory() - user.set_password('password1') - user.auth = (user.username, 'password1') - user.save() - return user - - @pytest.fixture() - def url(self): - return f'/{API_BASE}users/reset_password/' - - @pytest.fixture - def csrf_token(self): - return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) - - def test_get(self, mock_send_grid, app, url, user_one): - encoded_email = urllib.parse.quote(user_one.email) - url = f'{url}?email={encoded_email}' - res = app.get(url) - assert res.status_code == 200 - - user_one.reload() - assert mock_send_grid.call_args[1]['to_addr'] == user_one.username - - def test_get_invalid_email(self, mock_send_grid, app, url): - url = f'{url}?email={'invalid_email'}' - res = app.get(url) - assert res.status_code == 200 - assert not mock_send_grid.called - - def test_post(self, app, url, user_one, csrf_token): - app.set_cookie(CSRF_COOKIE_NAME, csrf_token) - encoded_email = urllib.parse.quote(user_one.email) - url = f'{url}?email={encoded_email}' - res = app.get(url) - user_one.reload() - payload = { - 'data': { - 'attributes': { - 'uid': user_one._id, - 'token': user_one.verification_key_v2['token'], - 'password': 'password2', - } - } - } - - res = app.post_json_api(url, payload, headers={'X-CSRFToken': csrf_token}) - user_one.reload() - assert res.status_code == 200 - assert user_one.check_password('password2') - - def test_post_empty_payload(self, app, url, csrf_token): - app.set_cookie(CSRF_COOKIE_NAME, csrf_token) - payload = { - 'data': { - 'attributes': { - } - } - } - res = app.post_json_api(url, payload, expect_errors=True, headers={'X-CSRFToken': csrf_token}) - assert res.status_code == 400 - - def test_post_invalid_token(self, app, url, user_one, csrf_token): - app.set_cookie(CSRF_COOKIE_NAME, csrf_token) - payload = { - 'data': { - 'attributes': { - 'uid': user_one._id, - 'token': 'invalid_token', - 'password': 'password2', - } - } - } - res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) - assert res.status_code == 400 - - def test_post_invalid_password(self, app, url, user_one, csrf_token): - app.set_cookie(CSRF_COOKIE_NAME, csrf_token) - encoded_email = urllib.parse.quote(user_one.email) - url = f'{url}?email={encoded_email}' - res = app.get(url) - user_one.reload() - payload = { - 'data': { - 'attributes': { - 'uid': user_one._id, - 'token': user_one.verification_key_v2['token'], - 'password': user_one.username, - } - } - } - - res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) - assert res.status_code == 400 - - def test_throrrle(self, app, url, user_one): - encoded_email = urllib.parse.quote(user_one.email) - url = f'{url}?email={encoded_email}' - res = app.get(url) - user_one.reload() - payload = { - 'data': { - 'attributes': { - 'uid': user_one._id, - 'token': user_one.verification_key_v2['token'], - 'password': '12345', - } - } - } - - res = app.post_json_api(url, payload, expect_errors=True) - assert res.status_code == 429 - - res = app.get(url, expect_errors=True) - assert res.json['message'] == 'You have recently requested to change your password. Please wait a few minutes before trying again.' - - @pytest.mark.django_db class TestUserEmailsList: diff --git a/api_tests/users/views/test_user_settings_reset_password.py b/api_tests/users/views/test_user_settings_reset_password.py new file mode 100644 index 00000000000..4c6c4022285 --- /dev/null +++ b/api_tests/users/views/test_user_settings_reset_password.py @@ -0,0 +1,131 @@ +import pytest +import urllib + +from api.base.settings.defaults import API_BASE +from api.base.settings import CSRF_COOKIE_NAME +from osf_tests.factories import ( + UserFactory, +) +from django.middleware import csrf + + +@pytest.mark.django_db +@pytest.mark.usefixtures('mock_send_grid') +class TestResetPassword: + + @pytest.fixture() + def user_one(self): + user = UserFactory() + user.set_password('password1') + user.auth = (user.username, 'password1') + user.save() + return user + + @pytest.fixture() + def url(self): + return f'/{API_BASE}users/reset_password/' + + @pytest.fixture + def csrf_token(self): + return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) + + def test_get(self, mock_send_grid, app, url, user_one): + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + res = app.get(url) + assert res.status_code == 200 + + user_one.reload() + assert mock_send_grid.call_args[1]['to_addr'] == user_one.username + + def test_get_invalid_email(self, mock_send_grid, app, url): + url = f'{url}?email={'invalid_email'}' + res = app.get(url) + assert res.status_code == 200 + assert not mock_send_grid.called + + def test_post(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + res = app.get(url) + user_one.reload() + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': user_one.verification_key_v2['token'], + 'password': 'password2', + } + } + } + + res = app.post_json_api(url, payload, headers={'X-CSRFToken': csrf_token}) + user_one.reload() + assert res.status_code == 200 + assert user_one.check_password('password2') + + def test_post_empty_payload(self, app, url, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + payload = { + 'data': { + 'attributes': { + } + } + } + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-CSRFToken': csrf_token}) + assert res.status_code == 400 + + def test_post_invalid_token(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': 'invalid_token', + 'password': 'password2', + } + } + } + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) + assert res.status_code == 400 + + def test_post_invalid_password(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + res = app.get(url) + user_one.reload() + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': user_one.verification_key_v2['token'], + 'password': user_one.username, + } + } + } + + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) + assert res.status_code == 400 + + def test_throttle(self, app, url, user_one): + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + app.get(url) + user_one.reload() + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': user_one.verification_key_v2['token'], + 'password': '12345', + } + } + } + + res = app.post_json_api(url, payload, expect_errors=True) + assert res.status_code == 429 + + res = app.get(url, expect_errors=True) + assert res.json['message'] == 'You have recently requested to change your password. Please wait a few minutes before trying again.' diff --git a/osf/management/commands/migrate_notifications.py b/osf/management/commands/migrate_notifications.py index afae80b9af2..f4dfaf3c0c8 100644 --- a/osf/management/commands/migrate_notifications.py +++ b/osf/management/commands/migrate_notifications.py @@ -4,7 +4,7 @@ from osf.models.notifications import NotificationSubscriptionLegacy from django.core.management.base import BaseCommand from django.db import transaction -from osf.management.commands.local_setup.populate_notification_types import populate_notification_types +from osf.management.commands.populate_notification_types import populate_notification_types logger = logging.getLogger(__name__) diff --git a/osf/management/commands/migrate_registration_responses.py b/osf/management/commands/migrate_registration_responses.py new file mode 100644 index 00000000000..009dee81c4d --- /dev/null +++ b/osf/management/commands/migrate_registration_responses.py @@ -0,0 +1,173 @@ +import datetime +import logging + +from django.core.management.base import BaseCommand +from django.apps import apps +from tqdm import tqdm + +from framework.celery_tasks import app as celery_app +from framework import sentry + +from osf.exceptions import SchemaBlockConversionError +from osf.utils.registrations import flatten_registration_metadata + +logger = logging.getLogger(__name__) + +# because Registrations and DraftRegistrations are different +def get_nested_responses(registration_or_draft, schema_id): + nested_responses = getattr( + registration_or_draft, + 'registration_metadata', + None, + ) + if nested_responses is None: + registered_meta = registration_or_draft.registered_meta or {} + nested_responses = registered_meta.get(schema_id, None) + return nested_responses + +# because Registrations and DraftRegistrations are different +def get_registration_schema(registration_or_draft): + schema = getattr(registration_or_draft, 'registration_schema', None) + if schema is None: + schema = registration_or_draft.registered_schema.first() + return schema + +def migrate_registrations(dry_run, rows='all', AbstractNodeModel=None): + """ + Loops through registrations whose registration_responses have not been migrated, + and pulls this information from the "registered_meta" and flattens it, with + keys being the "registration_response_key"s and values being the most deeply + nested user response in registered_meta + """ + if AbstractNodeModel is None: + AbstractNodeModel = apps.get_model('osf', 'abstractnode') + + registrations = AbstractNodeModel.objects.filter( + type='osf.registration', + ).exclude( + registration_responses_migrated=True, + ) + return migrate_responses(AbstractNodeModel, registrations, 'registrations', dry_run, rows) + +def migrate_draft_registrations(dry_run, rows='all', DraftRegistrationModel=None): + """ + Populates a subset of draft_registration.registration_responses, and corresponding + draft_registration.registration_responses_migrated. + :params dry_run + :params rows + """ + if DraftRegistrationModel is None: + DraftRegistrationModel = apps.get_model('osf', 'draftregistration') + + draft_registrations = DraftRegistrationModel.objects.exclude( + registration_responses_migrated=True + ) + return migrate_responses(DraftRegistrationModel, draft_registrations, 'draft registrations', dry_run, rows) + + +def migrate_responses(model, resources, resource_name, dry_run=False, rows='all'): + """ + DRY method to be used to migrate both DraftRegistration.registration_responses + and Registration.registration_responses. + """ + progress_bar = None + if rows == 'all': + logger.info(f'Migrating all {resource_name}.') + else: + resources = resources[:rows] + logger.info(f'Migrating up to {rows} {resource_name}.') + progress_bar = tqdm(total=rows) + + successes_to_save = [] + errors_to_save = [] + for resource in resources: + try: + schema = get_registration_schema(resource) + resource.registration_responses = flatten_registration_metadata( + schema, + get_nested_responses(resource, schema._id), + ) + resource.registration_responses_migrated = True + successes_to_save.append(resource) + except SchemaBlockConversionError as e: + resource.registration_responses_migrated = False + errors_to_save.append(resource) + logger.error(f'Unexpected/invalid nested data in resource: {resource} with error {e}') + if progress_bar: + progress_bar.update() + + if progress_bar: + progress_bar.close() + + success_count = len(successes_to_save) + error_count = len(errors_to_save) + total_count = success_count + error_count + + if total_count == 0: + logger.info(f'No {resource_name} left to migrate.') + return total_count + + logger.info(f'Successfully migrated {success_count} out of {total_count} {resource_name}.') + if error_count: + logger.warning(f'Encountered errors on {error_count} out of {total_count} {resource_name}.') + if not success_count: + sentry.log_message(f'`migrate_registration_responses` has only errors left ({error_count} errors)') + + if dry_run: + logger.info('DRY RUN; discarding changes.') + else: + logger.info('Saving changes...') + model.objects.bulk_update(successes_to_save, fields=['registration_responses', 'registration_responses_migrated']) + model.objects.bulk_update(errors_to_save, fields=['registration_responses_migrated']) + + return total_count + + +@celery_app.task(name='management.commands.migrate_registration_responses') +def migrate_registration_responses(dry_run=False, rows=5000): + script_start_time = datetime.datetime.now() + logger.info(f'Script started time: {script_start_time}') + + draft_count = migrate_draft_registrations(dry_run, rows) + registration_count = migrate_registrations(dry_run, rows) + + if draft_count == 0 and registration_count == 0: + logger.info('Migration complete! No more drafts or registrations need migrating.') + sentry.log_message('`migrate_registration_responses` command found nothing to migrate!') + + script_finish_time = datetime.datetime.now() + logger.info(f'Script finished time: {script_finish_time}') + logger.info(f'Run time {script_finish_time - script_start_time}') + + +class Command(BaseCommand): + help = """ Incrementally migrates DraftRegistration.registration_metadata + -> DraftRegistration.registration_responses, and Registration.registered_meta + -> Registration.registered_responses. registration_responses is a flattened version + of registration_metadata/registered_meta. + + This will need to be run multiple times to migrate all records on prod. + """ + + def add_arguments(self, parser): + parser.add_argument( + '--dry_run', + type=bool, + default=False, + help='Run queries but do not write files', + ) + parser.add_argument( + '--rows', + type=int, + default=5000, + help='How many rows to process during this run', + ) + + # Management command handler + def handle(self, *args, **options): + dry_run = options['dry_run'] + rows = options['rows'] + if dry_run: + logger.info('DRY RUN') + + migrate_registration_responses(dry_run, rows) diff --git a/osf_tests/management_commands/test_migrate_preprint_affiliations.py b/osf_tests/management_commands/test_migrate_preprint_affiliations.py deleted file mode 100644 index 8c80737b3dd..00000000000 --- a/osf_tests/management_commands/test_migrate_preprint_affiliations.py +++ /dev/null @@ -1,151 +0,0 @@ -import pytest -from datetime import timedelta -from osf.management.commands.migrate_preprint_affiliation import AFFILIATION_TARGET_DATE, assign_affiliations_to_preprints -from osf_tests.factories import ( - PreprintFactory, - InstitutionFactory, - AuthUserFactory, -) - - -@pytest.mark.django_db -class TestAssignAffiliationsToPreprints: - - @pytest.fixture() - def institution(self): - return InstitutionFactory() - - @pytest.fixture() - def user_with_affiliation(self, institution): - user = AuthUserFactory() - user.add_or_update_affiliated_institution(institution) - user.save() - return user - - @pytest.fixture() - def user_without_affiliation(self): - return AuthUserFactory() - - @pytest.fixture() - def preprint_with_affiliated_contributor(self, user_with_affiliation): - preprint = PreprintFactory() - preprint.add_contributor( - user_with_affiliation, - permissions='admin', - visible=True - ) - preprint.created = AFFILIATION_TARGET_DATE - timedelta(days=1) - preprint.save() - return preprint - - @pytest.fixture() - def preprint_with_non_affiliated_contributor(self, user_without_affiliation): - preprint = PreprintFactory() - preprint.add_contributor( - user_without_affiliation, - permissions='admin', - visible=True - ) - preprint.created = AFFILIATION_TARGET_DATE - timedelta(days=1) - preprint.save() - return preprint - - @pytest.fixture() - def preprint_past_target_date_with_affiliated_contributor(self, user_with_affiliation): - preprint = PreprintFactory() - preprint.add_contributor( - user_with_affiliation, - permissions='admin', - visible=True - ) - preprint.created = AFFILIATION_TARGET_DATE + timedelta(days=1) - preprint.save() - return preprint - - @pytest.mark.parametrize('dry_run', [True, False]) - def test_assign_affiliations_with_affiliated_contributor(self, preprint_with_affiliated_contributor, institution, dry_run): - preprint = preprint_with_affiliated_contributor - preprint.affiliated_institutions.clear() - preprint.save() - - assign_affiliations_to_preprints(dry_run=dry_run) - - if dry_run: - assert not preprint.affiliated_institutions.exists() - else: - assert institution in preprint.affiliated_institutions.all() - - @pytest.mark.parametrize('dry_run', [True, False]) - def test_no_affiliations_for_non_affiliated_contributor(self, preprint_with_non_affiliated_contributor, dry_run): - preprint = preprint_with_non_affiliated_contributor - preprint.affiliated_institutions.clear() - preprint.save() - - assign_affiliations_to_preprints(dry_run=dry_run) - - assert not preprint.affiliated_institutions.exists() - - @pytest.mark.parametrize('dry_run', [True, False]) - def test_exclude_contributor_by_guid(self, preprint_with_affiliated_contributor, user_with_affiliation, institution, dry_run): - preprint = preprint_with_affiliated_contributor - preprint.affiliated_institutions.clear() - preprint.save() - - assert user_with_affiliation.get_affiliated_institutions() - assert user_with_affiliation in preprint.contributors.all() - exclude_guids = {user._id for user in preprint.contributors.all()} - - assign_affiliations_to_preprints(exclude_guids=exclude_guids, dry_run=dry_run) - - assert not preprint.affiliated_institutions.exists() - - @pytest.mark.parametrize('dry_run', [True, False]) - def test_affiliations_from_multiple_contributors(self, institution, dry_run): - institution_not_include = InstitutionFactory() - read_contrib = AuthUserFactory() - read_contrib.add_or_update_affiliated_institution(institution_not_include) - read_contrib.save() - - write_contrib = AuthUserFactory() - write_contrib.add_or_update_affiliated_institution(institution) - write_contrib.save() - - admin_contrib = AuthUserFactory() - institution2 = InstitutionFactory() - admin_contrib.add_or_update_affiliated_institution(institution2) - admin_contrib.save() - - preprint = PreprintFactory() - preprint.affiliated_institutions.clear() - preprint.created = AFFILIATION_TARGET_DATE - timedelta(days=1) - preprint.add_contributor(read_contrib, permissions='read', visible=True) - preprint.add_contributor(write_contrib, permissions='write', visible=True) - preprint.add_contributor(admin_contrib, permissions='admin', visible=True) - preprint.save() - - assign_affiliations_to_preprints(dry_run=dry_run) - - if dry_run: - assert not preprint.affiliated_institutions.exists() - else: - affiliations = set(preprint.affiliated_institutions.all()) - assert affiliations == {institution, institution2} - assert institution_not_include not in affiliations - - @pytest.mark.parametrize('dry_run', [True, False]) - def test_exclude_recent_preprints(self, preprint_past_target_date_with_affiliated_contributor, preprint_with_affiliated_contributor, institution, dry_run): - new_preprint = preprint_past_target_date_with_affiliated_contributor - new_preprint.affiliated_institutions.clear() - new_preprint.save() - - old_preprint = preprint_with_affiliated_contributor - old_preprint.affiliated_institutions.clear() - old_preprint.save() - - assign_affiliations_to_preprints(dry_run=dry_run) - - assert not new_preprint.affiliated_institutions.exists() - if dry_run: - assert not old_preprint.affiliated_institutions.exists() - else: - assert institution in old_preprint.affiliated_institutions.all() diff --git a/website/settings/defaults.py b/website/settings/defaults.py index f5c9b500272..a20a50c3e52 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -441,7 +441,6 @@ class CeleryConfig: 'osf.management.commands.migrate_pagecounter_data', 'osf.management.commands.migrate_deleted_date', 'osf.management.commands.addon_deleted_date', - 'osf.management.commands.migrate_registration_responses', 'osf.management.commands.archive_registrations_on_IA' 'osf.management.commands.sync_doi_metadata', 'osf.management.commands.sync_collection_provider_indices', @@ -693,9 +692,6 @@ class CeleryConfig: # 'task': 'management.commands.migrate_pagecounter_data', # 'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m. # }, - # 'migrate_registration_responses': { - # 'task': 'management.commands.migrate_registration_responses', - # 'schedule': crontab(minute=32, hour=7), # Daily 2:32 a.m. # 'migrate_deleted_date': { # 'task': 'management.commands.migrate_deleted_date', # 'schedule': crontab(minute=0, hour=3), From 03cad99e8475d99ded53df47e1b9cb8aa1598a35 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 11 Jul 2025 08:40:35 -0400 Subject: [PATCH 062/176] fix issues with migrate schema response task deletion --- .../views/test_user_settings_reset_password.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/api_tests/users/views/test_user_settings_reset_password.py b/api_tests/users/views/test_user_settings_reset_password.py index 4c6c4022285..d677bf8ec3d 100644 --- a/api_tests/users/views/test_user_settings_reset_password.py +++ b/api_tests/users/views/test_user_settings_reset_password.py @@ -8,9 +8,8 @@ ) from django.middleware import csrf - -@pytest.mark.django_db @pytest.mark.usefixtures('mock_send_grid') +@pytest.mark.usefixtures('mock_notification_send') class TestResetPassword: @pytest.fixture() @@ -29,14 +28,14 @@ def url(self): def csrf_token(self): return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) - def test_get(self, mock_send_grid, app, url, user_one): + def test_get(self, mock_notification_send, app, url, user_one): encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' res = app.get(url) assert res.status_code == 200 user_one.reload() - assert mock_send_grid.call_args[1]['to_addr'] == user_one.username + assert mock_notification_send.called def test_get_invalid_email(self, mock_send_grid, app, url): url = f'{url}?email={'invalid_email'}' @@ -109,10 +108,11 @@ def test_post_invalid_password(self, app, url, user_one, csrf_token): res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) assert res.status_code == 400 - def test_throttle(self, app, url, user_one): + def test_throrrle(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' - app.get(url) + res = app.get(url) user_one.reload() payload = { 'data': { @@ -123,9 +123,8 @@ def test_throttle(self, app, url, user_one): } } } - - res = app.post_json_api(url, payload, expect_errors=True) - assert res.status_code == 429 + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-CSRFToken': csrf_token}) + assert res.status_code == 200 res = app.get(url, expect_errors=True) assert res.json['message'] == 'You have recently requested to change your password. Please wait a few minutes before trying again.' From 4c69f7e0a6c6ff7570c985f79eed229eedc80b24 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 15 Jul 2025 12:49:22 -0400 Subject: [PATCH 063/176] Update notifications for withdraw and retraction types --- api/institutions/authentication.py | 18 +++-- api/users/views.py | 33 ++++---- .../views/test_registration_detail.py | 10 +-- notifications.yaml | 5 ++ osf/models/sanctions.py | 76 +++++++++++-------- tests/test_registrations/test_embargoes.py | 6 +- tests/test_registrations/test_retractions.py | 8 +- 7 files changed, 92 insertions(+), 64 deletions(-) diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index b052834f181..c28905fae9c 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -334,13 +334,17 @@ def authenticate(self, request): user.save() # Send confirmation email for all three: created, confirmed and claimed - notification_type = NotificationType.objects.filter(name='welcome_osf4i') - if not notification_type.exists(): - raise NotificationType.DoesNotExist( - 'NotificationType with name welcome_osf4i does not exist.', - ) - notification_type = notification_type.first() - notification_type.emit(user=user, message_frequency='instantly', event_context={'domain': DOMAIN, 'osf_support_email': OSF_SUPPORT_EMAIL, 'storage_flag_is_active': flag_is_active(request, features.STORAGE_I18N)}) + NotificationType.objects.get( + name=NotificationType.Type.USER_WELCOME_OSF4I.value, + ).emit( + user=user, + message_frequency='instantly', + event_context={ + 'domain': DOMAIN, + 'osf_support_email': OSF_SUPPORT_EMAIL, + 'storage_flag_is_active': flag_is_active(request, features.STORAGE_I18N), + }, + ) # Add the email to the user's account if it is identified by the eppn if email_to_add: diff --git a/api/users/views.py b/api/users/views.py index 04fdb101d6f..7045c3df7f1 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -844,13 +844,14 @@ def get(self, request, *args, **kwargs): user_obj.save() reset_link = f'{settings.RESET_PASSWORD_URL}{user_obj._id}/{user_obj.verification_key_v2['token']}/' - notification_type = NotificationType.objects.filter(name=mail_template) - if not notification_type.exists(): - raise NotificationType.DoesNotExist( - f'NotificationType with name {mail_template} does not exist.', - ) - notification_type = notification_type.first() - notification_type.emit(user=user_obj, message_frequency='instantly', event_context={'can_change_preferences': False, 'reset_link': reset_link}) + NotificationType.objects.get(name=mail_template).emit( + user=user_obj, + message_frequency='instantly', + event_context={ + 'can_change_preferences': False, + 'reset_link': reset_link, + }, + ) return Response(status=status.HTTP_200_OK, data={'message': status_message, 'kind': kind, 'institutional': institutional}) @@ -1063,14 +1064,16 @@ def _process_external_identity(self, user, external_identity, service_url): if external_status == 'CREATE': service_url += '&' + urlencode({'new': 'true'}) elif external_status == 'LINK': - notification_type = NotificationType.objects.filter(name='external_confirm_success') - if not notification_type.exists(): - raise NotificationType.DoesNotExist( - 'NotificationType with name external_confirm_success does not exist.', - ) - notification_type = notification_type.first() - notification_type.emit(user=user, message_frequency='instantly', event_context={'can_change_preferences': False, 'external_id_provider': provider}) - + NotificationType.objects.get( + name=NotificationType.Type.USER_EXTERNAL_CONFIRM_SUCCESS.value, + ).emit( + user=user, + message_frequency='instantly', + event_context={ + 'can_change_preferences': False, + 'external_id_provider': provider, + }, + ) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) return service_url diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 9112d0a3264..b9917bd70f9 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -695,7 +695,7 @@ def test_read_write_contributor_can_edit_writeable_fields( @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') +@pytest.mark.usefixtures('mock_notification_send') class TestRegistrationWithdrawal(TestRegistrationUpdateTestCase): @pytest.fixture @@ -754,14 +754,14 @@ def test_initiate_withdraw_registration_fails( res = app.put_json_api(public_url, public_payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 - def test_initiate_withdrawal_success(self, mock_send_grid, app, user, public_registration, public_url, public_payload): + def test_initiate_withdrawal_success(self, mock_notification_send, app, user, public_registration, public_url, public_payload): res = app.put_json_api(public_url, public_payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['attributes']['pending_withdrawal'] is True public_registration.refresh_from_db() assert public_registration.is_pending_retraction assert public_registration.registered_from.logs.first().action == 'retraction_initiated' - assert mock_send_grid.called + assert mock_notification_send.called @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') def test_initiate_withdrawal_with_embargo_ends_embargo( @@ -786,7 +786,7 @@ def test_initiate_withdrawal_with_embargo_ends_embargo( assert not public_registration.is_pending_embargo def test_withdraw_request_does_not_send_email_to_unregistered_admins( - self, mock_send_grid, app, user, public_registration, public_url, public_payload): + self, mock_notification_send, app, user, public_registration, public_url, public_payload): unreg = UnregUserFactory() with disconnected_from_listeners(contributor_added): public_registration.add_unregistered_contributor( @@ -803,7 +803,7 @@ def test_withdraw_request_does_not_send_email_to_unregistered_admins( # Only the creator gets an email; the unreg user does not get emailed assert public_registration._contributors.count() == 2 - assert mock_send_grid.call_count == 3 + assert mock_notification_send.call_count == 3 @pytest.mark.django_db diff --git a/notifications.yaml b/notifications.yaml index 6054b727e8e..f1ad12340d7 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -102,6 +102,11 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly + - name: node_pending_registration_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_registration_admin.html.mako' + notification_freq_default: instantly #### PREPRINT - name: pending_retraction_admin diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index 6d8b904b4b9..b5f80260c85 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -20,6 +20,7 @@ from osf.utils import tokens from osf.utils.machines import ApprovalsMachine from osf.utils.workflows import ApprovalStates, SanctionTypes +from osf.models import NotificationType VIEW_PROJECT_URL_TEMPLATE = osf_settings.DOMAIN + '{node_id}/' @@ -375,6 +376,12 @@ def _format_or_empty(template, context): return template.format(**context) return '' + def _get_authoriser_notification_type(self): + return None + + def _get_non_authoriser_notification_type(self): + return None + def _view_url(self, user_id, node): return self._format_or_empty(self.VIEW_URL_TEMPLATE, self._view_url_context(user_id, node)) @@ -403,22 +410,22 @@ def _email_template_context(self, user, node, is_authorizer=False): return {} def _notify_authorizer(self, authorizer, node): - context = self._email_template_context(authorizer, - node, - is_authorizer=True) - if self.AUTHORIZER_NOTIFY_EMAIL_TEMPLATE: - self._send_approval_request_email( - authorizer, self.AUTHORIZER_NOTIFY_EMAIL_TEMPLATE, context) - else: - raise NotImplementedError() + if notification_type := self._get_authoriser_notification_type(): + notification_type.emit( + authorizer, + event_context=self._email_template_context( + authorizer, + node, + is_authorizer=True + ) + ) def _notify_non_authorizer(self, user, node): - context = self._email_template_context(user, node) - if self.NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE: - self._send_approval_request_email( - user, self.NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE, context) - else: - raise NotImplementedError + if notification_type := self._get_authoriser_notification_type(): + notification_type.emit( + user, + event_context=self._email_template_context(user, node) + ) def ask(self, group): """ @@ -467,9 +474,6 @@ class Embargo(SanctionCallbackMixin, EmailApprovableSanction): DISPLAY_NAME = 'Embargo' SHORT_NAME = 'embargo' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_NON_ADMIN - VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -502,6 +506,12 @@ def embargo_end_date(self): def pending_registration(self): return not self.for_existing_registration and self.is_pending_approval + def _get_authoriser_notification_type(self): + return NotificationType.objects.get(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) + + def _get_non_authoriser_notification_type(self): + return NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) + def _get_registration(self): return self.registrations.first() @@ -555,19 +565,19 @@ def _email_template_context(self, 'project_name': registration.title, 'disapproval_link': disapproval_link, 'registration_link': registration_link, - 'embargo_end_date': self.end_date, + 'embargo_end_date': str(self.end_date), 'approval_time_span': approval_time_span, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, }) else: context.update({ 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, - 'embargo_end_date': self.end_date, + 'embargo_end_date': str(self.end_date), 'approval_time_span': approval_time_span, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, }) return context @@ -647,9 +657,6 @@ class Retraction(EmailApprovableSanction): DISPLAY_NAME = 'Retraction' SHORT_NAME = 'retraction' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_RETRACTION_NON_ADMIN - VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -709,7 +716,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): return { 'is_initiator': self.initiated_by == user, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, 'initiated_by': self.initiated_by.fullname, 'project_name': self.registrations.filter().values_list('title', flat=True).get(), 'registration_link': registration_link, @@ -722,7 +729,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, 'approval_time_span': approval_time_span, } @@ -770,6 +777,9 @@ class RegistrationApproval(SanctionCallbackMixin, EmailApprovableSanction): AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_ADMIN NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_pending_registration_admin' + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_pending_registration_non_admin' + VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -788,6 +798,12 @@ def find_approval_backlog(): guid=models.F('_id') ).order_by('-initiation_date') + def _get_authoriser_notification_type(self): + return NotificationType.objects.get(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) + + def _get_non_authoriser_notification_type(self): + return NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) + def _get_registration(self): return self.registrations.first() @@ -836,7 +852,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'is_initiator': self.initiated_by == user, 'initiated_by': self.initiated_by.fullname, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, 'registration_link': registration_link, 'approval_link': approval_link, 'disapproval_link': disapproval_link, @@ -848,7 +864,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, 'approval_time_span': approval_time_span, }) return context @@ -995,7 +1011,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): context.update({ 'is_initiator': self.initiated_by == user, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, 'initiated_by': self.initiated_by.fullname, 'approval_link': approval_link, 'project_name': registration.title, @@ -1011,7 +1027,7 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'registration_link': registration_link, 'embargo_end_date': self.end_date, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration(), + 'reviewable': self._get_registration()._id, 'approval_time_span': approval_time_span, }) return context diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py index 4c310eecd79..992a968f224 100644 --- a/tests/test_registrations/test_embargoes.py +++ b/tests/test_registrations/test_embargoes.py @@ -29,7 +29,7 @@ from osf.models.sanctions import SanctionCallbackMixin, Embargo from osf.utils import permissions from osf.models import Registration, Contributor, OSFUser, SpamStatus -from conftest import start_mock_send_grid +from conftest import start_mock_notification_send DUMMY_TOKEN = tokens.encode({ 'dummy': 'token' @@ -1101,7 +1101,7 @@ def setUp(self): } }) - self.mock_send_grid = start_mock_send_grid(self) + self.start_mock_notification_send = start_mock_notification_send(self) @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') @@ -1160,7 +1160,7 @@ def test_embargoed_registration_set_privacy_sends_mail(self): if Contributor.objects.get(user_id=contributor.id, node_id=self.registration.id).permission == permissions.ADMIN: admin_contributors.append(contributor) for admin in admin_contributors: - assert any([each[1]['to_addr'] == admin.username for each in self.mock_send_grid.call_args_list]) + assert any([each[1]['to_addr'] == admin.username for each in self.start_mock_notification_send.call_args_list]) @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_make_child_embargoed_registration_public_asks_all_admins_in_tree(self, mock_ask): diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index dcc62d40b8b..d3f8cb72abf 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -24,7 +24,7 @@ ) from osf.models import Contributor, Retraction from osf.utils import permissions -from conftest import start_mock_send_grid +from conftest import start_mock_notification_send @@ -767,7 +767,7 @@ def setUp(self): self.retraction_get_url = self.registration.web_url_for('node_registration_retraction_get') self.justification = fake.sentence() - self.mock_send_grid = start_mock_send_grid(self) + self.start_mock_notification_send = start_mock_notification_send(self) def test_GET_retraction_page_when_pending_retraction_returns_HTTPError_BAD_REQUEST(self): self.registration.retract_registration(self.user) @@ -807,7 +807,7 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): json={'justification': ''}, auth=self.user.auth, ) - assert self.mock_send_grid.call_count == 1 + assert self.start_mock_notification_send.call_count == 1 def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -897,7 +897,7 @@ def test_valid_POST_calls_send_mail_with_username(self): json={'justification': ''}, auth=self.user.auth, ) - assert self.mock_send_grid.called + assert self.start_mock_notification_send.called def test_non_contributor_GET_approval_returns_HTTPError_FORBIDDEN(self): non_contributor = AuthUserFactory() From da9712fc454d1d118e5e9abe229191f9e649e5de Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 15 Jul 2025 13:00:46 -0400 Subject: [PATCH 064/176] clean-up user confirmation emails --- osf/models/user.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/osf/models/user.py b/osf/models/user.py index 420171dc61f..fc3526d71f1 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -1072,13 +1072,16 @@ def set_password(self, raw_password, notify=True): raise ChangePasswordError(['Password cannot be the same as your email address']) super().set_password(raw_password) if had_existing_password and notify: - notification_type = NotificationType.objects.filter(name='password_reset') - if not notification_type.exists(): - raise NotificationType.DoesNotExist( - 'NotificationType with name password_reset does not exist.', - ) - notification_type = notification_type.first() - notification_type.emit(user=self, message_frequency='instantly', event_context={'can_change_preferences': False, 'osf_contact_email': website_settings.OSF_CONTACT_EMAIL}) + NotificationType.objects.get( + name=NotificationType.Type.USER_PASSWORD_RESET + ).emit( + user=self, + message_frequency='instantly', + event_context={ + 'can_change_preferences': False, + 'osf_contact_email': website_settings.OSF_CONTACT_EMAIL + } + ) remove_sessions_for_user(self) @classmethod From 928d0c19e4c8dab694b9941915298ce05a433845 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 15 Jul 2025 13:14:21 -0400 Subject: [PATCH 065/176] add file update notifications types --- addons/base/views.py | 58 +++++++++++++++++++++++---------- notifications.yaml | 4 +-- osf/email/__init__.py | 2 +- osf/models/notification_type.py | 1 + 4 files changed, 44 insertions(+), 21 deletions(-) diff --git a/addons/base/views.py b/addons/base/views.py index b302115bb7f..43c433f10be 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -34,7 +34,6 @@ from framework.flask import redirect from framework.sentry import log_exception from framework.transactions.handlers import no_auto_transaction -from website import mails from website import settings from addons.base import signals as file_signals from addons.base.utils import format_last_known_metadata, get_mfr_url @@ -52,11 +51,12 @@ DraftRegistration, Guid, FileVersionUserMetadata, - FileVersion + FileVersion, NotificationType ) from osf.metrics import PreprintView, PreprintDownload from osf.utils import permissions from osf.external.gravy_valet import request_helpers +from website.notifications.emails import localize_timestamp from website.profile.utils import get_profile_image_url from website.project import decorators from website.project.decorators import must_be_contributor_or_public, must_be_valid_project, check_contributor_auth @@ -576,25 +576,29 @@ def create_waterbutler_log(payload, **kwargs): params=payload ) - if payload.get('email') is True or payload.get('errors'): - mails.send_mail( - user.username, - mails.FILE_OPERATION_FAILED if payload.get('errors') - else mails.FILE_OPERATION_SUCCESS, - action=payload['action'], - source_node=source_node, - destination_node=destination_node, - source_path=payload['source']['materialized'], - source_addon=payload['source']['addon'], - destination_addon=payload['destination']['addon'], - osf_support_email=settings.OSF_SUPPORT_EMAIL - ) - + if payload.get('email') is True: + notification_type = NotificationType.Type.FILE_OPERATION_SUCCESS + elif payload.get('errors'): + notification_type = NotificationType.Type.FILE_OPERATION_FAILED + else: + raise NotImplementedError('No email template for this') + + NotificationType.objects.get(name=notification_type.value).emit( + user=user, + event_context={ + 'action': payload['action'], + 'source_node': source_node, + 'destination_node': destination_node, + 'source_path': payload['source']['materialized'], + 'source_addon': payload['source']['addon'], + 'destination_addon': payload['destination']['addon'], + 'osf_support_email': settings.OSF_SUPPORT_EMAIL + } + ) if payload.get('errors'): # Action failed but our function succeeded # Bail out to avoid file_signals return {'status': 'success'} - else: node.create_waterbutler_log(auth, action, payload) @@ -605,7 +609,25 @@ def create_waterbutler_log(payload, **kwargs): update_storage_usage_with_size(payload) with transaction.atomic(): - file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload) + f_type, action = action.split('_') + if payload['metadata']['materialized'].endswith('/'): + f_type = 'folder' + html_message = '{action} {f_type} "{name}".'.format( + action=markupsafe.escape(action), + f_type=markupsafe.escape(f_type), + name=markupsafe.escape(payload['metadata']['materialized'].lstrip('/')) + ) + + context = {} + context['message'] = html_message + context['profile_image_url'] = user.profile_image_url() + context['localized_timestamp'] = localize_timestamp(timezone.now(), user) + context['user_fullname'] = user.fullname + context['url'] = node.absolute_url + NotificationType.objects.get(name=action).emit( + user=user, + event_context=context, + ) return {'status': 'success'} diff --git a/notifications.yaml b/notifications.yaml index f1ad12340d7..b31d0cecf04 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -82,12 +82,12 @@ notification_types: template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly #### NODE - - name: file_updated + - name: node_file_updated __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly - - name: wiki_updated + - name: node_wiki_updated __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/new_pending_submissions.html.mako' diff --git a/osf/email/__init__.py b/osf/email/__init__.py index d8cc1d6de5a..689519bdeb5 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -20,7 +20,7 @@ def send_email_over_smtp(to_addr, notification_type, context): raise NotImplementedError('MAIL_USERNAME and MAIL_PASSWORD are required for STMP') msg = MIMEText( - notification_type.template.format(context), + notification_type.template.format(**context), 'html', _charset='utf-8' ) diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 9b36d20e93a..b965f47bd04 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -97,6 +97,7 @@ class Type(str, Enum): NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' + NODE_FILE_UPDATED = 'node_file_updated' # Provider notifications PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' From ff0ba30c90c50c403baf125cdeaa9c985d2f888f Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 15 Jul 2025 13:17:21 -0400 Subject: [PATCH 066/176] fix typo --- api/users/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/users/views.py b/api/users/views.py index 7045c3df7f1..39b7c847469 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -1065,7 +1065,7 @@ def _process_external_identity(self, user, external_identity, service_url): service_url += '&' + urlencode({'new': 'true'}) elif external_status == 'LINK': NotificationType.objects.get( - name=NotificationType.Type.USER_EXTERNAL_CONFIRM_SUCCESS.value, + name=NotificationType.Type.USER_EXTERNAL_LOGIN_LINK_SUCCESS.value, ).emit( user=user, message_frequency='instantly', From 55e155d384adbc298497ea9be0ec1140c6e2be92 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 15 Jul 2025 13:26:10 -0400 Subject: [PATCH 067/176] add for institutional access emails --- api/requests/serializers.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/api/requests/serializers.py b/api/requests/serializers.py index 08a574e38ce..deec3043e23 100644 --- a/api/requests/serializers.py +++ b/api/requests/serializers.py @@ -14,12 +14,11 @@ NodeRequest, PreprintRequest, Institution, - OSFUser, + OSFUser, NotificationType, ) from osf.utils.workflows import DefaultStates, RequestTypes, NodeRequestTypes from osf.utils import permissions as osf_permissions from website import language, settings -from website.mails import send_mail, NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST from rest_framework.exceptions import PermissionDenied, ValidationError @@ -188,18 +187,20 @@ def make_node_institutional_access_request(self, node, validated_data) -> NodeRe comment = validated_data.get('comment', '').strip() or language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT - send_mail( - to_addr=recipient.username, - mail=NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, + NotificationType.objects.get( + name=NotificationType.Type.NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST.value, + ).emit( user=recipient, - sender=sender, - bcc_addr=[sender.username] if validated_data['bcc_sender'] else None, - reply_to=sender.username if validated_data['reply_to'] else None, - recipient=recipient, - comment=comment, - institution=institution, - osf_url=settings.DOMAIN, - node=node_request.target, + event_context={ + 'sender': sender, + 'bcc_addr': [sender.username] if validated_data['bcc_sender'] else None, + 'reply_to': sender.username if validated_data['reply_to'] else None, + 'recipient': recipient, + 'comment': comment, + 'institution': institution, + 'osf_url': settings.DOMAIN, + 'node': node_request.target._id, + }, ) return node_request From 6dbcdda085f06b76e63a40509db0766ac478d07e Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Wed, 16 Jul 2025 08:57:59 +0300 Subject: [PATCH 068/176] [ENG-8246] Fixed deletion of maintenance alerts in admin (#11226) What When you try to delete the active maintenance banner from the admin app, it 502s. The traceback says that there is no url to redirect to and to provide a success url. Acceptance Criteria A user will be able to add a maintenance banner and then delete it. When the banner is added, it will show on the OSF dashboard page (among other places). When deleted, it will no longer show. --- admin/maintenance/views.py | 5 ++++- admin_tests/maintenance/test_views.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/admin/maintenance/views.py b/admin/maintenance/views.py index 6bc6eed86ac..05e7a8372c9 100644 --- a/admin/maintenance/views.py +++ b/admin/maintenance/views.py @@ -6,6 +6,7 @@ from admin.maintenance.forms import MaintenanceForm from django.shortcuts import redirect +from django.urls import reverse_lazy from django.forms.models import model_to_dict from django.views.generic import DeleteView, TemplateView from django.contrib.auth.mixins import PermissionRequiredMixin @@ -15,11 +16,13 @@ class DeleteMaintenance(PermissionRequiredMixin, DeleteView): permission_required = 'osf.delete_maintenancestate' raise_exception = True template_name = 'maintenance/delete_maintenance.html' + success_url = reverse_lazy('maintenance:display') def get_object(self, queryset=None): return MaintenanceState.objects.first() - def delete(self, request, *args, **kwargs): + def post(self, request, *args, **kwargs): + super().post(request, *args, **kwargs) maintenance.unset_maintenance() return redirect('maintenance:display') diff --git a/admin_tests/maintenance/test_views.py b/admin_tests/maintenance/test_views.py index fa3ed37c270..abeaa6af677 100644 --- a/admin_tests/maintenance/test_views.py +++ b/admin_tests/maintenance/test_views.py @@ -89,7 +89,7 @@ def view(self, req, plain_view): return view def test_delete(self, view, req): - res = view.delete(req) + res = view.post(req) assert res.url == '/maintenance/' assert res.status_code == 302 assert MaintenanceState.objects.all().count() == 0 From 76f601c0f50f97f10807e49e1c2f3cf8225fc141 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Wed, 16 Jul 2025 09:01:10 +0300 Subject: [PATCH 069/176] [ENG-8325] Public column does not display the visibility status of child nodes on the Nodes page in the Admin App Public column does not display the visibility status of child nodes on the Nodes page in the Admin App --- admin/templates/nodes/children.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/admin/templates/nodes/children.html b/admin/templates/nodes/children.html index bde24f8e4e1..02f92a398ea 100644 --- a/admin/templates/nodes/children.html +++ b/admin/templates/nodes/children.html @@ -23,7 +23,7 @@ {{ child.title }} - {{ child.public }} + {{ child.is_public }} {{ child.contributors|length }} {% if perms.osf.delete_node %} From 95be0e04e4f01587fabd90f63f3555f03cfa4754 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Wed, 16 Jul 2025 15:38:06 +0300 Subject: [PATCH 070/176] API: Allow /v2/users/me/preprints list view to filter by title Allow the User Preprints endpoint (/v2/users/me/preprints) to be filterable by title (?filter[title]=example), similar to how the User Nodes endpoint (/v2/users/me/nodes) supports this filter capability --- api/preprints/serializers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index d22bb00ab81..e9dc8f0cbdf 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -92,6 +92,7 @@ def to_internal_value(self, license_id): class PreprintSerializer(TaxonomizableSerializerMixin, MetricsSerializerMixin, JSONAPISerializer): filterable_fields = frozenset([ 'id', + 'title', 'date_created', 'date_modified', 'date_published', From cc51cb3d4592388ee098105b52b0e50a96718ff3 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Wed, 16 Jul 2025 16:17:43 +0300 Subject: [PATCH 071/176] [ENG-8224] Fixed force archive template with registration addons (#11210) Enable Product Team to Force Archive Registrations in the Admin App --- admin/nodes/views.py | 6 +++--- .../nodes/registration_force_archive_form.html | 11 ----------- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/admin/nodes/views.py b/admin/nodes/views.py index 1789ca773c0..71c3f60e965 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -762,7 +762,7 @@ def post(self, request, *args, **kwargs): allow_unconfigured = force_archive_params.get('allow_unconfigured', False) - addons = set(force_archive_params.getlist('addons', [])) + addons = set(registration.registered_from.get_addon_names()) addons.update(DEFAULT_PERMISSIBLE_ADDONS) try: @@ -781,8 +781,8 @@ def post(self, request, *args, **kwargs): registration, permissible_addons=addons, allow_unconfigured=allow_unconfigured, - skip_collision=skip_collision, - delete_collision=delete_collision, + skip_collisions=skip_collision, + delete_collisions=delete_collision, ) messages.success(request, 'Registration archive process has finished.') except Exception as exc: diff --git a/admin/templates/nodes/registration_force_archive_form.html b/admin/templates/nodes/registration_force_archive_form.html index ab52d7f7c33..32331fa7a54 100644 --- a/admin/templates/nodes/registration_force_archive_form.html +++ b/admin/templates/nodes/registration_force_archive_form.html @@ -15,17 +15,6 @@

    Handle collision:

    -
    -

    Permissible Addons (Optional):

    -
    -
    - {% for addon_name in node.get_addon_names %} - - - {% endfor %} -
    -
    -

    Other:

    From d43de2bf18ac792012fef5c091e66e38567c2a86 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 16 Jul 2025 09:52:53 -0400 Subject: [PATCH 072/176] fix embargo sanctions typo --- notifications.yaml | 20 +++++++++++++++----- osf/models/sanctions.py | 3 +++ 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index b31d0cecf04..352ac478edd 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -14,7 +14,7 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly - - name: password_reset + - name: user_password_reset __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/password_reset.html.mako' @@ -85,28 +85,38 @@ notification_types: - name: node_file_updated __docs__: ... object_content_type_model_name: abstractnode - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/file_updated.html.mako' notification_freq_default: instantly - name: node_wiki_updated __docs__: ... object_content_type_model_name: abstractnode - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/file_updated.html.mako' notification_freq_default: instantly - name: node_request_institutional_access_request __docs__: ... object_content_type_model_name: abstractnode - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/node_request_institutional_access_request.html.mako' notification_freq_default: instantly - name: node_contributor_added_access_request __docs__: ... object_content_type_model_name: abstractnode - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/contributor_added_access_request.html.mako' notification_freq_default: instantly - name: node_pending_registration_admin __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_registration_admin.html.mako' notification_freq_default: instantly + - name: node_embargo_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_embargo_admin.html.mako' + notification_freq_default: instantly + - name: node_embargo_nonadmin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_embargo_non_admin.html.mako' + notification_freq_default: instantly #### PREPRINT - name: pending_retraction_admin diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index b5f80260c85..baaa810527a 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -474,6 +474,9 @@ class Embargo(SanctionCallbackMixin, EmailApprovableSanction): DISPLAY_NAME = 'Embargo' SHORT_NAME = 'embargo' + AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_embargo_admin' + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_embargo_non_admin' + VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' From 99e26cb25f2b0f5b8209a2a33189ffa943d3c14f Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 16 Jul 2025 10:49:43 -0400 Subject: [PATCH 073/176] add brand relationship to collectionprovider --- api/providers/serializers.py | 7 ++++++ .../views/test_collection_provider_detail.py | 24 +++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/api/providers/serializers.py b/api/providers/serializers.py index 3de618d4a39..47c3d4a6115 100644 --- a/api/providers/serializers.py +++ b/api/providers/serializers.py @@ -121,6 +121,12 @@ class Meta: related_view='providers:collection-providers:notification-subscription-list', related_view_kwargs={'provider_id': '<_id>'}, ) + + brand = RelationshipField( + related_view='brands:brand-detail', + related_view_kwargs={'brand_id': ''}, + ) + filterable_fields = frozenset([ 'allow_submissions', 'allow_commenting', @@ -129,6 +135,7 @@ class Meta: 'reviews_workflow', 'domain_redirect_enabled', 'id', + 'brand', 'name', ]) diff --git a/api_tests/providers/collections/views/test_collection_provider_detail.py b/api_tests/providers/collections/views/test_collection_provider_detail.py index 208d8d2a1a8..25f1f4b80ca 100644 --- a/api_tests/providers/collections/views/test_collection_provider_detail.py +++ b/api_tests/providers/collections/views/test_collection_provider_detail.py @@ -4,6 +4,7 @@ from api_tests.providers.mixins import ProviderExistsMixin from osf_tests.factories import ( CollectionProviderFactory, + BrandFactory, ) @@ -31,3 +32,26 @@ def provider_list_url(self, provider): @pytest.fixture() def provider_list_url_fake(self, fake_url): return f'{fake_url}submissions/' + + @pytest.fixture() + def brand(self): + return BrandFactory() + + @pytest.fixture() + def provider_with_brand(self, brand): + registration_provider = CollectionProviderFactory() + registration_provider.brand = brand + registration_provider.save() + return registration_provider + + @pytest.fixture() + def provider_url_w_brand(self, provider_with_brand): + return f'/{API_BASE}providers/collections/{provider_with_brand._id}/' + + def test_registration_provider_with_special_fields(self, app, provider_with_brand, brand, provider_url_w_brand): + res = app.get(provider_url_w_brand) + + assert res.status_code == 200 + data = res.json['data'] + + assert data['relationships']['brand']['data']['id'] == str(brand.id) From 9776d92c6fdb8d3aa3eb45c2ba4c4c0c61cb5ef6 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 16 Jul 2025 11:14:31 -0400 Subject: [PATCH 074/176] fix mocks for institutional access tests --- .../test_node_request_institutional_access.py | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/api_tests/requests/views/test_node_request_institutional_access.py b/api_tests/requests/views/test_node_request_institutional_access.py index d868739e9bd..35e18042117 100644 --- a/api_tests/requests/views/test_node_request_institutional_access.py +++ b/api_tests/requests/views/test_node_request_institutional_access.py @@ -9,7 +9,7 @@ @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') +@pytest.mark.usefixtures('mock_notification_send') class TestNodeRequestListInstitutionalAccess(NodeRequestTestMixin): @pytest.fixture() @@ -206,37 +206,37 @@ def test_institutional_admin_unauth_institution(self, app, project, institution_ assert res.status_code == 403 assert 'Institutional request access is not enabled.' in res.json['errors'][0]['detail'] - def test_email_not_sent_without_recipient(self, mock_send_grid, app, project, institutional_admin, url, + def test_email_not_sent_without_recipient(self, mock_notification_send, app, project, institutional_admin, url, create_payload, institution): """ Test that an email is not sent when no recipient is listed when an institutional access request is made, but the request is still made anyway without email. """ del create_payload['data']['relationships']['message_recipient'] - mock_send_grid.reset_mock() + mock_notification_send.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 # Check that an email is sent - assert not mock_send_grid.called + assert not mock_notification_send.called - def test_email_not_sent_outside_institution(self, mock_send_grid, app, project, institutional_admin, url, + def test_email_not_sent_outside_institution(self, mock_notification_send, app, project, institutional_admin, url, create_payload, user_without_affiliation, institution): """ Test that you are prevented from requesting a user with the correct institutional affiliation. """ create_payload['data']['relationships']['message_recipient']['data']['id'] = user_without_affiliation._id - mock_send_grid.reset_mock() + mock_notification_send.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth, expect_errors=True) assert res.status_code == 403 assert f'User {user_without_affiliation._id} is not affiliated with the institution.' in res.json['errors'][0]['detail'] # Check that an email is sent - assert not mock_send_grid.called + assert not mock_notification_send.called def test_email_sent_on_creation( self, - mock_send_grid, + mock_notification_send, app, project, institutional_admin, @@ -248,15 +248,15 @@ def test_email_sent_on_creation( """ Test that an email is sent to the appropriate recipients when an institutional access request is made. """ - mock_send_grid.reset_mock() + mock_notification_send.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_send_grid.call_count == 1 + assert mock_notification_send.call_count == 1 def test_bcc_institutional_admin( self, - mock_send_grid, + mock_notification_send, app, project, institutional_admin, @@ -269,15 +269,15 @@ def test_bcc_institutional_admin( Ensure BCC option works as expected, sending messages to sender giving them a copy for themselves. """ create_payload['data']['attributes']['bcc_sender'] = True - mock_send_grid.reset_mock() + mock_notification_send.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_send_grid.call_count == 1 + assert mock_notification_send.call_count == 1 def test_reply_to_institutional_admin( self, - mock_send_grid, + mock_notification_send, app, project, institutional_admin, @@ -290,11 +290,11 @@ def test_reply_to_institutional_admin( Ensure reply-to option works as expected, allowing a reply to header be added to the email. """ create_payload['data']['attributes']['reply_to'] = True - mock_send_grid.reset_mock() + mock_notification_send.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - assert mock_send_grid.call_count == 1 + assert mock_notification_send.call_count == 1 def test_access_requests_disabled_raises_permission_denied( self, app, node_with_disabled_access_requests, user_with_affiliation, institutional_admin, create_payload @@ -313,7 +313,7 @@ def test_access_requests_disabled_raises_permission_denied( def test_placeholder_text_when_comment_is_empty( self, - mock_send_grid, + mock_notification_send, app, project, institutional_admin, @@ -327,11 +327,11 @@ def test_placeholder_text_when_comment_is_empty( """ # Test with empty comment create_payload['data']['attributes']['comment'] = '' - mock_send_grid.reset_mock() + mock_notification_send.reset_mock() res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert res.status_code == 201 - mock_send_grid.assert_called() + mock_notification_send.assert_called() def test_requester_can_resubmit(self, app, project, institutional_admin, url, create_payload): """ From cfc1f97e14c333c8c3084de4d3f7f2c119fa5b62 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 16 Jul 2025 15:40:08 -0400 Subject: [PATCH 075/176] fix file added updates for notifications --- addons/base/views.py | 35 ++++++++++++++-------------- api/requests/serializers.py | 9 +++---- notifications.yaml | 5 ++++ osf/models/notification_type.py | 2 ++ website/project/views/contributor.py | 34 +++++++++++++-------------- 5 files changed, 47 insertions(+), 38 deletions(-) diff --git a/addons/base/views.py b/addons/base/views.py index 43c433f10be..5f64c6b3f24 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -609,25 +609,26 @@ def create_waterbutler_log(payload, **kwargs): update_storage_usage_with_size(payload) with transaction.atomic(): - f_type, action = action.split('_') + f_type, item_action = action.split('_') if payload['metadata']['materialized'].endswith('/'): f_type = 'folder' - html_message = '{action} {f_type} "{name}".'.format( - action=markupsafe.escape(action), - f_type=markupsafe.escape(f_type), - name=markupsafe.escape(payload['metadata']['materialized'].lstrip('/')) - ) - - context = {} - context['message'] = html_message - context['profile_image_url'] = user.profile_image_url() - context['localized_timestamp'] = localize_timestamp(timezone.now(), user) - context['user_fullname'] = user.fullname - context['url'] = node.absolute_url - NotificationType.objects.get(name=action).emit( - user=user, - event_context=context, - ) + match f'node_{action}': + case NotificationType.Type.NODE_FILE_ADDED: + NotificationType.objects.get( + name=NotificationType.Type.NODE_FILE_ADDED + ).emit( + user=user, + event_context={ + 'message': f'{markupsafe.escape(item_action)} {markupsafe.escape(f_type)} "' + f'{markupsafe.escape(payload['metadata']['materialized'].lstrip('/'))}".', + 'profile_image_url': user.profile_image_url(), + 'localized_timestamp': localize_timestamp(timezone.now(), user), + 'user_fullname': user.fullname, + 'url': node.absolute_url, + } + ) + case _: + raise NotImplementedError(f'action {action} not implemented') return {'status': 'success'} diff --git a/api/requests/serializers.py b/api/requests/serializers.py index deec3043e23..a8b5830031f 100644 --- a/api/requests/serializers.py +++ b/api/requests/serializers.py @@ -188,16 +188,17 @@ def make_node_institutional_access_request(self, node, validated_data) -> NodeRe comment = validated_data.get('comment', '').strip() or language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT NotificationType.objects.get( - name=NotificationType.Type.NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST.value, + name=NotificationType.Type.NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, ).emit( user=recipient, + message_frequency='instantly', event_context={ - 'sender': sender, + 'sender': sender.username, 'bcc_addr': [sender.username] if validated_data['bcc_sender'] else None, 'reply_to': sender.username if validated_data['reply_to'] else None, - 'recipient': recipient, + 'recipient': recipient.username if recipient else None, 'comment': comment, - 'institution': institution, + 'institution': institution.id if institution else None, 'osf_url': settings.DOMAIN, 'node': node_request.target._id, }, diff --git a/notifications.yaml b/notifications.yaml index 352ac478edd..21fb74965a3 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -87,6 +87,11 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' notification_freq_default: instantly + - name: node_file_added + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + notification_freq_default: instantly - name: node_wiki_updated __docs__: ... object_content_type_model_name: abstractnode diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index b965f47bd04..b780fe129ff 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -78,6 +78,7 @@ class Type(str, Enum): USER_FORWARD_INVITE_REGISTERED = 'user_forward_invite_registered' USER_INVITE_DRAFT_REGISTRATION = 'user_invite_draft_registration' USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' + USER_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'user_contributor_added_preprint_node_from_osf' # Node notifications NODE_COMMENT = 'node_comments' @@ -98,6 +99,7 @@ class Type(str, Enum): NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' NODE_FILE_UPDATED = 'node_file_updated' + NODE_FILE_ADDED = 'node_file_added' # Provider notifications PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index f3e06aff3fc..d8e1968636d 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -17,7 +17,7 @@ from framework.sessions import get_session from framework.transactions.handlers import no_auto_transaction from framework.utils import get_timestamp, throttle_period_expired -from osf.models import Tag +from osf.models import Tag, NotificationType from osf.exceptions import NodeStateError from osf.models import AbstractNode, DraftRegistration, OSFUser, Preprint, PreprintProvider, RecentlyAddedContributor from osf.utils import sanitize @@ -584,35 +584,35 @@ def notify_added_contributor(node, contributor, auth=None, email_template='defau if contrib_on_parent_node: if email_template == 'preprint': if node.provider.is_default: - email_template = mails.CONTRIBUTOR_ADDED_OSF_PREPRINT + email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT logo = settings.OSF_PREPRINTS_LOGO else: email_template = mails.CONTRIBUTOR_ADDED_PREPRINT(node.provider) logo = node.provider._id elif email_template == 'draft_registration': - email_template = mails.CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION elif email_template == 'access_request': email_template = mails.CONTRIBUTOR_ADDED_ACCESS_REQUEST elif node.has_linked_published_preprints: # Project holds supplemental materials for a published preprint - email_template = mails.CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF + email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF logo = settings.OSF_PREPRINTS_LOGO else: - email_template = mails.CONTRIBUTOR_ADDED_DEFAULT + email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT - mails.send_mail( - to_addr=contributor.username, - mail=email_template, + NotificationType.objects.create(name=email_template).emit( user=contributor, - node=node, - referrer_name=auth.user.fullname if auth else '', - is_initiator=getattr(auth, 'user', False) == contributor, - all_global_subscriptions_none=check_if_all_global_subscriptions_are_none(contributor), - branded_service=node.provider, - can_change_preferences=False, - logo=logo, - osf_contact_email=settings.OSF_CONTACT_EMAIL, - published_preprints=[] if isinstance(node, (Preprint, DraftRegistration)) else serialize_preprints(node, user=None) + event_context={ + 'node': node.id, + 'referrer_name': auth.user.fullname if auth else '', + 'is_initiator': getattr(auth, 'user', False) == contributor, + 'all_global_subscriptions_none': check_if_all_global_subscriptions_are_none(contributor), + 'branded_service': node.provider, + 'can_change_preferences': False, + 'logo': logo, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + 'published_preprints': [] if isinstance(node, (Preprint, DraftRegistration)) else serialize_preprints(node, contributor) + } ) contributor.contributor_added_email_records[node._id]['last_sent'] = get_timestamp() From 91672c074378bdf9ec6a619c87b4b722a4afd882 Mon Sep 17 00:00:00 2001 From: futa-ikeda Date: Wed, 16 Jul 2025 19:01:18 -0400 Subject: [PATCH 076/176] Add collections scopes to FULL_READ and FULL_WRITE --- framework/auth/oauth_scopes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/framework/auth/oauth_scopes.py b/framework/auth/oauth_scopes.py index 65a852f058b..534e6c77684 100644 --- a/framework/auth/oauth_scopes.py +++ b/framework/auth/oauth_scopes.py @@ -353,6 +353,7 @@ class ComposedScopes: CoreScopes.USERS_MESSAGE_READ_EMAIL )\ + ( + CoreScopes.NODE_COLLECTIONS_READ, CoreScopes.READ_COLLECTION_SUBMISSION, CoreScopes.READ_COLLECTION_SUBMISSION_ACTION, ) @@ -370,6 +371,7 @@ class ComposedScopes: + APPLICATIONS_WRITE\ + ( CoreScopes.CEDAR_METADATA_RECORD_WRITE, + CoreScopes.NODE_COLLECTIONS_WRITE, CoreScopes.WRITE_COLLECTION_SUBMISSION_ACTION, CoreScopes.WRITE_COLLECTION_SUBMISSION, CoreScopes.USERS_MESSAGE_WRITE_EMAIL, From 2feec4a78ebd536a833407dbbaea762c8242a27a Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 16 Jul 2025 16:55:45 -0400 Subject: [PATCH 077/176] add notification type emit for institutional requests --- notifications.yaml | 35 ++++++++++++++++++++++++++++ osf/models/mixins.py | 25 ++++++++++++-------- osf/models/notification_type.py | 1 + osf/utils/machines.py | 31 +++++++++++++++--------- website/project/views/contributor.py | 4 ++-- 5 files changed, 73 insertions(+), 23 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index 21fb74965a3..5d91d5675fe 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -19,6 +19,26 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/password_reset.html.mako' notification_freq_default: instantly + - name: user_contributor_added_default + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/contributor_added_default.html.mako' + notification_freq_default: instantly + - name: user_contributor_added_draft_registration + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/contributor_added_draft_registration.html.mako' + notification_freq_default: instantly + - name: user_contributor_added_preprint_node_from_osf + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/contributor_added_preprint_node_from_osf.html.mako' + notification_freq_default: instantly + - name: user_contributor_added_access_request + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/contributor_added_access_request.html.mako' + notification_freq_default: instantly - name: forgot_password __docs__: ... object_content_type_model_name: osfuser @@ -122,6 +142,21 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_embargo_non_admin.html.mako' notification_freq_default: instantly + - name: node_affiliation_changed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/project_affiliation_changed.html.mako' + notification_freq_default: instantly + - name: node_request_access_denied + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/access_request_rejected.html.mako' + notification_freq_default: instantly + - name: node_access_request_submitted + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/access_request_submitted.html.mako' + notification_freq_default: instantly #### PREPRINT - name: pending_retraction_admin diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 1e2d033e713..b8e7e8e8778 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -306,15 +306,18 @@ def add_affiliated_institution(self, inst, user, log=True, ignore_user_affiliati if not self.is_affiliated_with_institution(inst): self.affiliated_institutions.add(inst) self.update_search() + from . import NotificationType + if notify and getattr(self, 'type', False) == 'osf.node': for user, _ in self.get_admin_contributors_recursive(unique_users=True): - mails.send_mail( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - **{ + NotificationType.objects.get( + name=NotificationType.Type.NODE_AFFILIATION_CHANGED + ).emit( + user=user, + event_context={ 'user': user, 'node': self, - }, + } ) if log: params = self.log_params @@ -345,16 +348,18 @@ def remove_affiliated_institution(self, inst, user, save=False, log=True, notify if save: self.save() self.update_search() + from . import NotificationType if notify and getattr(self, 'type', False) == 'osf.node': for user, _ in self.get_admin_contributors_recursive(unique_users=True): - mails.send_mail( - user.username, - mails.PROJECT_AFFILIATION_CHANGED, - **{ + NotificationType.objects.get( + name=NotificationType.Type.NODE_AFFILIATION_CHANGED + ).emit( + user=user, + event_context={ 'user': user, 'node': self, - }, + } ) return True diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index b780fe129ff..9fffd8d508e 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -79,6 +79,7 @@ class Type(str, Enum): USER_INVITE_DRAFT_REGISTRATION = 'user_invite_draft_registration' USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' USER_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'user_contributor_added_preprint_node_from_osf' + USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'user_contributor_added_access_request' # Node notifications NODE_COMMENT = 'node_comments' diff --git a/osf/utils/machines.py b/osf/utils/machines.py index 04713b3cb26..7c7ff055511 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -238,14 +238,18 @@ def notify_submit(self, ev): context = self.get_context() context['contributors_url'] = f'{self.machineable.target.absolute_url}contributors/' context['project_settings_url'] = f'{self.machineable.target.absolute_url}settings/' + from osf.models import NotificationType + if not self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST.value: for admin in self.machineable.target.get_users_with_perm(permissions.ADMIN): - mails.send_mail( - admin.username, - mails.ACCESS_REQUEST_SUBMITTED, - admin=admin, - osf_contact_email=OSF_CONTACT_EMAIL, - **context + NotificationType.objects.get( + name=NotificationType.Type.NODE_REQUEST_ACCESS_SUBMITTED, + ).emit( + user=admin, + event_context={ + 'osf_contact_email': OSF_CONTACT_EMAIL, + **context + } ) def notify_resubmit(self, ev): @@ -257,13 +261,18 @@ def notify_resubmit(self, ev): def notify_accept_reject(self, ev): """ Notify requester that admins have approved/denied """ + from osf.models import NotificationType + if ev.event.name == DefaultTriggers.REJECT.value: context = self.get_context() - mails.send_mail( - self.machineable.creator.username, - mails.ACCESS_REQUEST_DENIED, - osf_contact_email=OSF_CONTACT_EMAIL, - **context + NotificationType.objects.get( + name=NotificationType.Type.NODE_REQUEST_ACCESS_DENIED + ).emit( + user=self.machineable.creator, + event_context={ + 'osf_contact_email': OSF_CONTACT_EMAIL, + **context + } ) else: # add_contributor sends approval notification email diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index d8e1968636d..f27aaa01084 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -592,7 +592,7 @@ def notify_added_contributor(node, contributor, auth=None, email_template='defau elif email_template == 'draft_registration': email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION elif email_template == 'access_request': - email_template = mails.CONTRIBUTOR_ADDED_ACCESS_REQUEST + email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST elif node.has_linked_published_preprints: # Project holds supplemental materials for a published preprint email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF @@ -600,7 +600,7 @@ def notify_added_contributor(node, contributor, auth=None, email_template='defau else: email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT - NotificationType.objects.create(name=email_template).emit( + NotificationType.objects.get(name=email_template).emit( user=contributor, event_context={ 'node': node.id, From f3965f734cd6eacbcc042fb08f558ab1e025b271 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 17 Jul 2025 09:53:07 -0400 Subject: [PATCH 078/176] add notification type emit for contributor added to provider for preprints --- notifications.yaml | 5 +++++ website/project/views/contributor.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/notifications.yaml b/notifications.yaml index 5d91d5675fe..23f00b2f0fe 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -101,6 +101,11 @@ notification_types: object_content_type_model_name: abstractprovider template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly + - name: provider_contributor_added_preprint + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/contributor_added_preprints.html.mako' + notification_freq_default: instantly #### NODE - name: node_file_updated __docs__: ... diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index f27aaa01084..591b6b716c7 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -587,7 +587,7 @@ def notify_added_contributor(node, contributor, auth=None, email_template='defau email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT logo = settings.OSF_PREPRINTS_LOGO else: - email_template = mails.CONTRIBUTOR_ADDED_PREPRINT(node.provider) + email_template = NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT logo = node.provider._id elif email_template == 'draft_registration': email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION From 19c838dec6febb260d0f9875f92eb8b3050ad88e Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 17 Jul 2025 10:11:14 -0400 Subject: [PATCH 079/176] add external login link success notification type --- notifications.yaml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index 23f00b2f0fe..3dceffc8874 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -1,6 +1,5 @@ # This file contains the configuration for our notification system using the NotificationType object, this is intended to -# exist as a simple declarative list of NotificationTypes and their attributes. Every notification sent by OSF should be -# represented here for bussiness logic dnd metrics reasons. +# exist as a simple declarative list of NotificationTypes and their attributes. # Workflow: # 1. Add a new notification template @@ -39,12 +38,17 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_access_request.html.mako' notification_freq_default: instantly + - name: user_external_login_link_success + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/external_confirm_success.html.mako' + notification_freq_default: instantly - name: forgot_password __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password.html.mako' notification_freq_default: instantly - - name: welcome_osf4i + - name: user_welcome_osf4i __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/welcome_osf4i.html.mako' From c1bfdf3b5b5d1cb5fa6de85bff8d494e5879b6e3 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 17 Jul 2025 10:52:26 -0400 Subject: [PATCH 080/176] populate notification types in OSF Gather tests --- osf_tests/metadata/test_osf_gathering.py | 3 +++ tests/test_adding_contributor_views.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/osf_tests/metadata/test_osf_gathering.py b/osf_tests/metadata/test_osf_gathering.py index 33be346e2df..3480a106276 100644 --- a/osf_tests/metadata/test_osf_gathering.py +++ b/osf_tests/metadata/test_osf_gathering.py @@ -8,6 +8,7 @@ from api_tests.utils import create_test_file from framework.auth import Auth +from osf.management.commands.populate_notification_types import populate_notification_types from osf.metadata import osf_gathering from osf.metadata.rdfutils import ( FOAF, @@ -36,6 +37,8 @@ class TestOsfGathering(TestCase): @classmethod def setUpTestData(cls): + + populate_notification_types() # users: cls.user__admin = factories.UserFactory() cls.user__readwrite = factories.UserFactory( diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 17c2da39bc3..b6015e194bf 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -39,7 +39,7 @@ OsfTestCase, ) from tests.test_cas_authentication import generate_external_user_with_resp -from website import mails, settings +from website import settings from website.profile.utils import add_contributor_json, serialize_unregistered from website.project.signals import contributor_added from website.project.views.contributor import ( From a6e3ee888cfb4b4678415015c8b8893c5c5019e4 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 17 Jul 2025 15:29:24 -0400 Subject: [PATCH 081/176] update the file operations to use new notifications system --- addons/base/views.py | 87 ++++++++++---------- api/nodes/views.py | 27 +++++-- notifications.yaml | 25 ++++++ osf/models/notification_type.py | 6 +- website/archiver/utils.py | 135 ++++++++++++++++++-------------- 5 files changed, 176 insertions(+), 104 deletions(-) diff --git a/addons/base/views.py b/addons/base/views.py index 5f64c6b3f24..e6d19d02758 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -576,25 +576,24 @@ def create_waterbutler_log(payload, **kwargs): params=payload ) - if payload.get('email') is True: - notification_type = NotificationType.Type.FILE_OPERATION_SUCCESS - elif payload.get('errors'): - notification_type = NotificationType.Type.FILE_OPERATION_FAILED - else: - raise NotImplementedError('No email template for this') - - NotificationType.objects.get(name=notification_type.value).emit( - user=user, - event_context={ - 'action': payload['action'], - 'source_node': source_node, - 'destination_node': destination_node, - 'source_path': payload['source']['materialized'], - 'source_addon': payload['source']['addon'], - 'destination_addon': payload['destination']['addon'], - 'osf_support_email': settings.OSF_SUPPORT_EMAIL - } - ) + if payload.get('email') or payload.get('errors'): + if payload.get('email'): + notification_type = NotificationType.Type.FILE_OPERATION_SUCCESS + if payload.get('errors'): + notification_type = NotificationType.Type.FILE_OPERATION_FAILED + + NotificationType.objects.get(name=notification_type.value).emit( + user=user, + event_context={ + 'action': payload['action'], + 'source_node': source_node, + 'destination_node': destination_node, + 'source_path': payload['source']['materialized'], + 'source_addon': payload['source']['addon'], + 'destination_addon': payload['destination']['addon'], + 'osf_support_email': settings.OSF_SUPPORT_EMAIL + } + ) if payload.get('errors'): # Action failed but our function succeeded # Bail out to avoid file_signals @@ -608,27 +607,35 @@ def create_waterbutler_log(payload, **kwargs): if target_node and payload['action'] != 'download_file': update_storage_usage_with_size(payload) - with transaction.atomic(): - f_type, item_action = action.split('_') - if payload['metadata']['materialized'].endswith('/'): - f_type = 'folder' - match f'node_{action}': - case NotificationType.Type.NODE_FILE_ADDED: - NotificationType.objects.get( - name=NotificationType.Type.NODE_FILE_ADDED - ).emit( - user=user, - event_context={ - 'message': f'{markupsafe.escape(item_action)} {markupsafe.escape(f_type)} "' - f'{markupsafe.escape(payload['metadata']['materialized'].lstrip('/'))}".', - 'profile_image_url': user.profile_image_url(), - 'localized_timestamp': localize_timestamp(timezone.now(), user), - 'user_fullname': user.fullname, - 'url': node.absolute_url, - } - ) - case _: - raise NotImplementedError(f'action {action} not implemented') + file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload) + + match f'node_{action}': + case NotificationType.Type.NODE_FILE_ADDED: + notification = NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_ADDED) + case NotificationType.Type.NODE_FILE_REMOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_REMOVED) + case NotificationType.Type.NODE_FILE_UPDATED: + notification = NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) + case NotificationType.Type.NODE_ADDON_FILE_RENAMED: + notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_RENAMED) + case NotificationType.Type.NODE_ADDON_FILE_COPIED: + notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_COPIED) + case NotificationType.Type.NODE_ADDON_FILE_REMOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_REMOVED) + case NotificationType.Type.NODE_ADDON_FILE_MOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_MOVED) + case _: + raise NotImplementedError(f'action {action} not implemented') + + notification.emit( + user=user, + event_context={ + 'profile_image_url': user.profile_image_url(), + 'localized_timestamp': localize_timestamp(timezone.now(), user), + 'user_fullname': user.fullname, + 'url': node.absolute_url, + } + ) return {'status': 'success'} diff --git a/api/nodes/views.py b/api/nodes/views.py index 8e5352f6f30..14e104b4de0 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -152,11 +152,11 @@ File, Folder, CedarMetadataRecord, - Preprint, Collection, + Preprint, Collection, NotificationType, ) from addons.osfstorage.models import Region from osf.utils.permissions import ADMIN, WRITE_NODE -from website import mails, settings +from website import settings # This is used to rethrow v1 exceptions as v2 HTTP_CODE_MAP = { @@ -1045,11 +1045,28 @@ def perform_create(self, serializer): try: fork = serializer.save(node=node) except Exception as exc: - mails.send_mail(user.email, mails.FORK_FAILED, title=node.title, guid=node._id, can_change_preferences=False) + NotificationType.objects.get( + name=NotificationType.Type.NODE_FORK_FAILED, + ).emit( + user=user, + event_context={ + 'guid': node._id, + 'title': node._id, + 'can_change_preferences': False, + }, + ) raise exc else: - mails.send_mail(user.email, mails.FORK_COMPLETED, title=node.title, guid=fork._id, can_change_preferences=False) - + NotificationType.objects.get( + name=NotificationType.Type.NODE_FORK_COMPLETED, + ).emit( + user=user, + event_context={ + 'guid': fork._id, + 'title': node._id, + 'can_change_preferences': False, + }, + ) class NodeLinkedByNodesList(JSONAPIBaseView, generics.ListAPIView, NodeMixin): permission_classes = ( diff --git a/notifications.yaml b/notifications.yaml index 3dceffc8874..9b596962240 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -121,6 +121,31 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' notification_freq_default: instantly + - name: node_file_removed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + notification_freq_default: instantly + - name: node_addon_file_renamed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + notification_freq_default: instantly + - name: node_addon_file_copied + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + notification_freq_default: instantly + - name: node_addon_file_moved + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + notification_freq_default: instantly + - name: node_addon_file_removed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + notification_freq_default: instantly - name: node_wiki_updated __docs__: ... object_content_type_model_name: abstractnode diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 9fffd8d508e..a45bee7126f 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -59,7 +59,6 @@ class Type(str, Enum): USER_ARCHIVE_JOB_EXCEEDED = 'user_archive_job_exceeded' USER_ARCHIVE_JOB_COPY_ERROR = 'user_archive_job_copy_error' USER_ARCHIVE_JOB_FILE_NOT_FOUND = 'user_archive_job_file_not_found' - USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' USER_COMMENT_REPLIES = 'user_comment_replies' USER_COMMENTS = 'user_comments' USER_FILE_UPDATED = 'user_file_updated' @@ -101,6 +100,11 @@ class Type(str, Enum): NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' NODE_FILE_UPDATED = 'node_file_updated' NODE_FILE_ADDED = 'node_file_added' + NODE_FILE_REMOVED = 'node_file_removed' + NODE_ADDON_FILE_COPIED = 'node_addon_file_copied' + NODE_ADDON_FILE_RENAMED = 'node_addon_file_renamed' + NODE_ADDON_FILE_MOVED = 'node_addon_file_moved' + NODE_ADDON_FILE_REMOVED = 'node_addon_file_removed' # Provider notifications PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' diff --git a/website/archiver/utils.py b/website/archiver/utils.py index 44cd7517413..3e76b6014a9 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -5,11 +5,9 @@ from django.db.models import CharField, OuterRef, Subquery from framework.auth import Auth from framework.utils import sanitize_html +from osf.models import NotificationType -from website import ( - mails, - settings -) +from website import settings from website.archiver import ( StatResult, AggregateStatResult, ARCHIVER_NETWORK_ERROR, @@ -29,79 +27,100 @@ def normalize_unicode_filenames(filename): def send_archiver_size_exceeded_mails(src, user, stat_result, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, - user=user, - src=src, - stat_result=stat_result, - can_change_preferences=False, - url=url, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_EXCEEDED + ).emit( + event_context={ + 'user': user.id, + 'src': src._id, + 'stat_result': stat_result, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_SIZE_EXCEEDED_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_EXCEEDED, + ).emit( user=user, - src=src, - can_change_preferences=False, + event_context={ + 'user': user, + 'src': src, + 'can_change_preferences': False, + } ) def send_archiver_copy_error_mails(src, user, results, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_COPY_ERROR_DESK, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_COPY_ERROR + ).emit( user=user, - src=src, - results=results, - url=url, - can_change_preferences=False, + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_COPY_ERROR_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_COPY_ERROR + ).emit( user=user, - src=src, - results=results, - can_change_preferences=False, + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'can_change_preferences': False, + } ) def send_archiver_file_not_found_mails(src, user, results, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_FILE_NOT_FOUND_DESK, - can_change_preferences=False, - user=user, - src=src, - results=results, - url=url, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_FILE_NOT_FOUND + ).emit( + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_FILE_NOT_FOUND_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_FILE_NOT_FOUND + ).emit( user=user, - src=src, - results=results, - can_change_preferences=False, + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'can_change_preferences': False, + } ) def send_archiver_uncaught_error_mails(src, user, results, url): - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.ARCHIVE_UNCAUGHT_ERROR_DESK, - user=user, - src=src, - results=results, - can_change_preferences=False, - url=url, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_JOB_UNCAUGHT_ERROR + ).emit( + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'url': url, + 'can_change_preferences': False, + } ) - mails.send_mail( - to_addr=user.username, - mail=mails.ARCHIVE_UNCAUGHT_ERROR_USER, + NotificationType.objects.get( + name=NotificationType.Type.USER_ARCHIVE_JOB_UNCAUGHT_ERROR + ).emit( user=user, - src=src, - results=results, - can_change_preferences=False, + event_context={ + 'user': user.id, + 'src': src._id, + 'results': results, + 'can_change_preferences': False, + } ) From 79e82a12aaa16881eaaca523e067230781a0f125 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Fri, 18 Jul 2025 16:21:55 +0300 Subject: [PATCH 082/176] add notifications admin and unit tests --- .../notifications/test_notifications_admin.py | 84 +++++++ osf/admin.py | 209 +++++++++++++++++- osf/static/admin/notification_subscription.js | 34 +++ 3 files changed, 323 insertions(+), 4 deletions(-) create mode 100644 admin_tests/notifications/test_notifications_admin.py create mode 100644 osf/static/admin/notification_subscription.js diff --git a/admin_tests/notifications/test_notifications_admin.py b/admin_tests/notifications/test_notifications_admin.py new file mode 100644 index 00000000000..8a4334596a9 --- /dev/null +++ b/admin_tests/notifications/test_notifications_admin.py @@ -0,0 +1,84 @@ +import pytest +from django.test import RequestFactory +from django.contrib.admin.sites import AdminSite +from osf.models import NotificationType, NotificationSubscription, OSFUser +from osf.admin import ( + NotificationTypeAdmin, + NotificationSubscriptionAdmin, + NotificationTypeAdminForm, + NotificationSubscriptionForm +) +from tests.base import AdminTestCase + +pytestmark = pytest.mark.django_db + + +class TestNotificationAdmin(AdminTestCase): + + def setUp(self): + super().setUp() + self.user = OSFUser.objects.create(username='admin', is_staff=True) + self.notification_type = NotificationType.objects.create( + name="Test Notification", + subject="Hello", + template="Sample Template", + notification_interval_choices=["daily", "custom"] + ) + self.subscription = NotificationSubscription.objects.create( + user=self.user, + notification_type=self.notification_type, + message_frequency="daily", + subscribed_object=None + ) + self.admin_site = AdminSite() + self.request_factory = RequestFactory() + + def test_notification_type_admin_preview_button(self): + admin = NotificationTypeAdmin(NotificationType, self.admin_site) + html = admin.preview_button(self.notification_type) + assert f'{self.notification_type.id}/preview/' in html + assert "Preview" in html + + def test_notification_type_admin_preview_view(self): + admin = NotificationTypeAdmin(NotificationType, self.admin_site) + request = self.request_factory.get(f'/admin/osf/notificationtype/{self.notification_type.id}/preview/') + request.user = self.user + response = admin._preview_notification_template_view(request, pk=self.notification_type.id) + content = response.content.decode() + + assert response.status_code == 200 + assert "Template Preview for" in content + assert self.notification_type.name in content + assert self.notification_type.subject in content + + def test_notification_type_admin_form_save_combines_intervals(self): + form_data = { + 'name': 'Updated Notification', + 'subject': 'Updated Subject', + 'template': 'Updated Template', + 'default_intervals': ['daily'], + 'custom_intervals': ['weekly'] + } + form = NotificationTypeAdminForm(data=form_data, instance=self.notification_type) + assert form.is_valid(), form.errors + instance = form.save() + assert set(instance.notification_interval_choices) == {'daily', 'weekly'} + + def test_notification_subscription_admin_preview_button(self): + admin = NotificationSubscriptionAdmin(NotificationSubscription, self.admin_site) + html = admin.preview_button(self.subscription) + assert f'/admin/osf/notificationtype/{self.notification_type.id}/preview/' in html + assert "Preview" in html + + def test_notification_subscription_admin_get_intervals(self): + admin = NotificationSubscriptionAdmin(NotificationSubscription, self.admin_site) + request = self.request_factory.get(f'/admin/osf/notificationsubscription/get-intervals/{self.notification_type.id}/') + request.user = self.user + response = admin.get_intervals(request, pk=self.notification_type.id) + assert response.status_code == 200 + + def test_notification_subscription_form_sets_choices(self): + form = NotificationSubscriptionForm(data={'notification_type': self.notification_type.id}) + assert 'message_frequency' in form.fields + expected_choices = [(x, x) for x in self.notification_type.notification_interval_choices] + assert form.fields['message_frequency'].choices == expected_choices diff --git a/osf/admin.py b/osf/admin.py index b94c168c5be..b8ece40873a 100644 --- a/osf/admin.py +++ b/osf/admin.py @@ -1,15 +1,20 @@ from django.contrib import admin, messages -from django.urls import re_path +from django.urls import re_path, reverse, path from django.template.response import TemplateResponse from django_extensions.admin import ForeignKeyAutocompleteAdmin from django.contrib.auth.models import Group from django.db.models import Q, Count -from django.http import HttpResponseRedirect -from django.urls import reverse +from django.http import HttpResponseRedirect, HttpResponse, JsonResponse +from django.utils.html import format_html +from django.shortcuts import get_object_or_404 +from django import forms +from django.contrib.postgres.forms import SimpleArrayField +from django.contrib.admin import SimpleListFilter import waffle from osf.external.spam.tasks import reclassify_domain_references -from osf.models import OSFUser, Node, NotableDomain, NodeLicense +from osf.models import OSFUser, Node, NotableDomain, NodeLicense, NotificationType, NotificationSubscription +from osf.models.notification_type import get_default_frequency_choices from osf.models.notable_domain import DomainReference @@ -153,10 +158,206 @@ class _ManygroupWaffleFlagAdmin(waffle.admin.FlagAdmin): raw_id_fields = (*waffle.admin.FlagAdmin.raw_id_fields, 'groups') +class NotificationTypeAdminForm(forms.ModelForm): + default_intervals = forms.MultipleChoiceField( + choices=[(c, c) for c in get_default_frequency_choices()], + required=False, + widget=forms.CheckboxSelectMultiple, # Or use SelectMultiple + label="Default Intervals" + ) + + custom_intervals = SimpleArrayField( + base_field=forms.CharField(), + required=False, + widget=forms.Textarea(attrs={'rows': 2}), + label="Custom Intervals (comma-separated)" + ) + + class Meta: + model = NotificationType + exclude = ['notification_interval_choices'] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # Pre-fill existing values + if self.instance and self.instance.notification_interval_choices: + defaults = get_default_frequency_choices() + existing = self.instance.notification_interval_choices + self.fields['default_intervals'].initial = [v for v in existing if v in defaults] + self.fields['custom_intervals'].initial = [v for v in existing if v not in defaults] + + def save(self, commit=True): + # Assign combined intervals + default_intervals = self.cleaned_data.get('default_intervals') or [] + custom_intervals = self.cleaned_data.get('custom_intervals') or [] + combined = list(default_intervals + custom_intervals) + + self.instance.notification_interval_choices = combined + + return super().save(commit=commit) + + +class NotificationIntervalFilter(SimpleListFilter): + title = 'Notification Interval' + parameter_name = 'notification_interval' + + def lookups(self, request, model_admin): + default_choices = [(choice, choice) for choice in get_default_frequency_choices()] + custom_choices_list = [ + (choice, choice) + for choice_list in NotificationType.objects.values_list('notification_interval_choices', flat=True).distinct() + for choice in choice_list + if choice not in get_default_frequency_choices() + ] + return default_choices + list(set(custom_choices_list)) + + def queryset(self, request, queryset): + if self.value(): + return queryset.filter(notification_interval_choices__contains=[self.value()]) + return queryset + +class NotificationTypeAdmin(admin.ModelAdmin): + form = NotificationTypeAdminForm + list_display = ('name', 'object_content_type', 'notification_interval_choices', 'preview_button') + list_filter = (NotificationIntervalFilter,) + search_fields = ('name',) + + def preview_button(self, obj): + return format_html( + 'Preview', + f'{obj.id}/preview/' + ) + + def get_urls(self): + custom_urls = [ + path( + '/preview/', + self.admin_site.admin_view(self._preview_notification_template_view), + name='notificationtype_preview', + ), + ] + return custom_urls + super().get_urls() + + def _preview_notification_template_view(self, request, pk): + obj = get_object_or_404(NotificationType, pk=pk) + return HttpResponse(''' + + + + Template Preview + + ''' + f''' + +
    +
    +

    Template Preview for {obj.name}

    +

    Object Content Type: {obj.object_content_type}

    +

    Notification Intervals: {', '.join(obj.notification_interval_choices)}

    +

    Subject:

    +

    {obj.subject}

    + +

    Template:

    +
    {obj.template}
    +
    +
    + + ''', content_type='text/html') + + +class NotificationSubscriptionForm(forms.ModelForm): + class Meta: + model = NotificationSubscription + fields = '__all__' + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + notification_type_id = ( + self.data.get('notification_type') or + getattr(self.instance.notification_type, 'id', None) + ) + + if notification_type_id: + try: + nt = NotificationType.objects.get(pk=notification_type_id) + choices = [(x, x) for x in nt.notification_interval_choices] + except NotificationType.DoesNotExist: + choices = [] + else: + choices = [] + + self.fields['message_frequency'] = forms.ChoiceField( + choices=choices, + required=False + ) + +class NotificationSubscriptionAdmin(admin.ModelAdmin): + list_display = ('user', 'notification_type', 'message_frequency', 'subscribed_object', 'preview_button') + form = NotificationSubscriptionForm + + class Media: + js = ['admin/notification_subscription.js'] + + def preview_button(self, obj): + url = reverse( + 'admin:notificationtype_preview', # app_label_modelname_viewname + args=[obj.notification_type.id] + ) + return format_html( + 'Preview', + url + ) + + def get_urls(self): + urls = super().get_urls() + custom_urls = [ + path( + 'get-intervals//', + self.admin_site.admin_view(self.get_intervals), + name='get_notification_intervals' + ), + ] + return custom_urls + urls + + def get_intervals(self, request, pk): + try: + nt = NotificationType.objects.get(pk=pk) + return JsonResponse({'intervals': nt.notification_interval_choices}) + except NotificationType.DoesNotExist: + return JsonResponse({'intervals': []}) + admin.site.register(OSFUser, OSFUserAdmin) admin.site.register(Node, NodeAdmin) admin.site.register(NotableDomain, NotableDomainAdmin) admin.site.register(NodeLicense, LicenseAdmin) +admin.site.register(NotificationType, NotificationTypeAdmin) +admin.site.register(NotificationSubscription, NotificationSubscriptionAdmin) # waffle admins, with Flag admin override admin.site.register(waffle.models.Flag, _ManygroupWaffleFlagAdmin) diff --git a/osf/static/admin/notification_subscription.js b/osf/static/admin/notification_subscription.js new file mode 100644 index 00000000000..c4f9ac87a2e --- /dev/null +++ b/osf/static/admin/notification_subscription.js @@ -0,0 +1,34 @@ +document.addEventListener('DOMContentLoaded', function () { + const typeSelect = document.querySelector('#id_notification_type'); + const freqSelect = document.querySelector('#id_message_frequency'); + + if (!typeSelect || !freqSelect) return; + + function updateIntervals(typeId) { + fetch(`/admin/osf/notificationsubscription/get-intervals/${typeId}/`) + .then(response => response.json()) + .then(data => { + // Clear current options + freqSelect.innerHTML = ''; + + // Add new ones + data.intervals.forEach(choice => { + const option = document.createElement('option'); + option.value = choice; + option.textContent = choice; + freqSelect.appendChild(option); + }); + }); + } + + typeSelect.addEventListener('change', function () { + if (this.value) { + updateIntervals(this.value); + } + }); + + // Auto-load if there's an initial value + if (typeSelect.value) { + updateIntervals(typeSelect.value); + } +}); \ No newline at end of file From 23a83c78a5f4c7ef8c0dffefcb51eb3434518f19 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Fri, 18 Jul 2025 16:28:10 +0300 Subject: [PATCH 083/176] fix unit tests --- .../notifications/test_notifications_admin.py | 16 ++++++++-------- osf/admin.py | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/admin_tests/notifications/test_notifications_admin.py b/admin_tests/notifications/test_notifications_admin.py index 8a4334596a9..e27e70bb35c 100644 --- a/admin_tests/notifications/test_notifications_admin.py +++ b/admin_tests/notifications/test_notifications_admin.py @@ -19,15 +19,15 @@ def setUp(self): super().setUp() self.user = OSFUser.objects.create(username='admin', is_staff=True) self.notification_type = NotificationType.objects.create( - name="Test Notification", - subject="Hello", - template="Sample Template", - notification_interval_choices=["daily", "custom"] + name='Test Notification', + subject='Hello', + template='Sample Template', + notification_interval_choices=['daily', 'custom'] ) self.subscription = NotificationSubscription.objects.create( user=self.user, notification_type=self.notification_type, - message_frequency="daily", + message_frequency='daily', subscribed_object=None ) self.admin_site = AdminSite() @@ -37,7 +37,7 @@ def test_notification_type_admin_preview_button(self): admin = NotificationTypeAdmin(NotificationType, self.admin_site) html = admin.preview_button(self.notification_type) assert f'{self.notification_type.id}/preview/' in html - assert "Preview" in html + assert 'Preview' in html def test_notification_type_admin_preview_view(self): admin = NotificationTypeAdmin(NotificationType, self.admin_site) @@ -47,7 +47,7 @@ def test_notification_type_admin_preview_view(self): content = response.content.decode() assert response.status_code == 200 - assert "Template Preview for" in content + assert 'Template Preview for' in content assert self.notification_type.name in content assert self.notification_type.subject in content @@ -68,7 +68,7 @@ def test_notification_subscription_admin_preview_button(self): admin = NotificationSubscriptionAdmin(NotificationSubscription, self.admin_site) html = admin.preview_button(self.subscription) assert f'/admin/osf/notificationtype/{self.notification_type.id}/preview/' in html - assert "Preview" in html + assert 'Preview' in html def test_notification_subscription_admin_get_intervals(self): admin = NotificationSubscriptionAdmin(NotificationSubscription, self.admin_site) diff --git a/osf/admin.py b/osf/admin.py index b8ece40873a..006040d4bde 100644 --- a/osf/admin.py +++ b/osf/admin.py @@ -162,15 +162,15 @@ class NotificationTypeAdminForm(forms.ModelForm): default_intervals = forms.MultipleChoiceField( choices=[(c, c) for c in get_default_frequency_choices()], required=False, - widget=forms.CheckboxSelectMultiple, # Or use SelectMultiple - label="Default Intervals" + widget=forms.CheckboxSelectMultiple, + label='Default Intervals' ) custom_intervals = SimpleArrayField( base_field=forms.CharField(), required=False, widget=forms.Textarea(attrs={'rows': 2}), - label="Custom Intervals (comma-separated)" + label='Custom Intervals (comma-separated)' ) class Meta: @@ -326,7 +326,7 @@ class Media: def preview_button(self, obj): url = reverse( - 'admin:notificationtype_preview', # app_label_modelname_viewname + 'admin:notificationtype_preview', args=[obj.notification_type.id] ) return format_html( From 1177b1d5e2fbdd792c06e87e9816ef66bcef4551 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Fri, 18 Jul 2025 16:48:03 +0300 Subject: [PATCH 084/176] fix unit tests --- .../views/test_user_settings_reset_password.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/api_tests/users/views/test_user_settings_reset_password.py b/api_tests/users/views/test_user_settings_reset_password.py index cb8bacec7dc..8153a283867 100644 --- a/api_tests/users/views/test_user_settings_reset_password.py +++ b/api_tests/users/views/test_user_settings_reset_password.py @@ -12,6 +12,14 @@ @pytest.mark.usefixtures('mock_notification_send') class TestResetPassword: + @pytest.fixture() + def throttle_user(self): + user = UserFactory() + user.set_password('password1') + user.auth = (user.username, 'password1') + user.save() + return user + @pytest.fixture() def user_one(self): user = UserFactory() @@ -108,17 +116,17 @@ def test_post_invalid_password(self, app, url, user_one, csrf_token): res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) assert res.status_code == 400 - def test_throttle(self, app, url, user_one, csrf_token): + def test_throttle(self, app, url, throttle_user, csrf_token): app.set_cookie(CSRF_COOKIE_NAME, csrf_token) - encoded_email = urllib.parse.quote(user_one.email) + encoded_email = urllib.parse.quote(throttle_user.email) url = f'{url}?email={encoded_email}' res = app.get(url) - user_one.reload() + throttle_user.reload() payload = { 'data': { 'attributes': { - 'uid': user_one._id, - 'token': user_one.verification_key_v2['token'], + 'uid': throttle_user._id, + 'token': throttle_user.verification_key_v2['token'], 'password': '12345', } } From 0d17c05cce33430e71d599a34b4b5d71b1565567 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 18 Jul 2025 09:51:24 -0400 Subject: [PATCH 085/176] update more notifications to use NotificationType --- api/providers/tasks.py | 133 ++++---- .../views/test_subscriptions_list.py | 2 +- framework/auth/views.py | 169 +++++------ notifications.yaml | 32 +- osf/models/collection_submission.py | 228 ++++++-------- osf/models/notification_type.py | 3 +- osf/models/sanctions.py | 2 +- osf/utils/machines.py | 4 +- website/archiver/utils.py | 2 +- website/project/views/contributor.py | 285 ++++++++++-------- 10 files changed, 445 insertions(+), 415 deletions(-) diff --git a/api/providers/tasks.py b/api/providers/tasks.py index b8fb8e06233..b0a39c9c337 100644 --- a/api/providers/tasks.py +++ b/api/providers/tasks.py @@ -26,7 +26,7 @@ RegistrationBulkUploadRow, RegistrationProvider, RegistrationSchema, - Subject, + Subject, NotificationType, ) from osf.models.licenses import NodeLicense from osf.models.registration_bulk_upload_job import JobState @@ -34,7 +34,7 @@ from osf.registrations.utils import get_registration_provider_submissions_url from osf.utils.permissions import ADMIN -from website import mails, settings +from website import settings logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -136,13 +136,16 @@ def prepare_for_registration_bulk_creation(payload_hash, initiator_id, provider_ # Cancel the preparation task if duplicates are found in the CSV and/or in DB if draft_error_list: upload.delete() - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES, - fullname=initiator.fullname, - count=initial_row_count, - draft_errors=draft_error_list, - osf_support_email=settings.OSF_SUPPORT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES, + ).emit( + user=initiator, + event_context={ + 'fullname': initiator.fullname, + 'count': initial_row_count, + 'draft_errors': draft_error_list, + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + }, ) return @@ -636,88 +639,78 @@ def bulk_upload_finish_job(upload, row_count, success_count, draft_errors, appro approval_errors.sort() if not dry_run: upload.save() + notification_type = None + event_context = { + 'initiator_fullname': initiator.fullname, + 'auto_approval': auto_approval, + 'count': row_count, + 'pending_submissions_url': get_registration_provider_submissions_url(provider), + 'draft_errors': draft_errors, + 'approval_errors': approval_errors, + 'successes': success_count, + 'failures': len(draft_errors), + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + } + if upload.state == JobState.DONE_FULL: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_ALL, - fullname=initiator.fullname, - auto_approval=auto_approval, - count=row_count, - pending_submissions_url=get_registration_provider_submissions_url(provider), - ) + notification_type = NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL elif upload.state == JobState.DONE_PARTIAL: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL, - fullname=initiator.fullname, - auto_approval=auto_approval, - total=row_count, - successes=success_count, - draft_errors=draft_errors, - approval_errors=approval_errors, - failures=len(draft_errors), - pending_submissions_url=get_registration_provider_submissions_url(provider), - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) + notification_type = NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL elif upload.state == JobState.DONE_ERROR: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_FAILURE_ALL, - fullname=initiator.fullname, - count=row_count, - draft_errors=draft_errors, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) + notification_type = NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL else: - message = f'Failed to send registration bulk upload outcome email due to invalid ' \ - f'upload state: [upload={upload.id}, state={upload.state.name}]' - logger.error(message) - sentry.log_message(message) + logger.error(f'Unexpected job state for upload [{upload.id}]: {upload.state.name}') return + + NotificationType.objects.get( + name=notification_type, + ).emit( + user=initiator, + event_context=event_context, + ) + upload.email_sent = timezone.now() upload.save() - logger.info(f'Email sent to bulk upload initiator [{initiator._id}]') + logger.info(f'Notification sent to bulk upload initiator [{initiator._id}]') def handle_internal_error(initiator=None, provider=None, message=None, dry_run=True): - """Log errors that happened due to unexpected bug and send emails the uploader (if available) - about failures. Product owner (if available) is informed as well with more details. Emails are - not sent during dry run. - """ - + """Log errors due to unexpected bugs and send notifications instead of direct emails.""" if not message: message = 'Registration bulk upload failure' logger.error(message) sentry.log_message(message) - if not dry_run: - if initiator: - mails.send_mail( - to_addr=initiator.username, - mail=mails.REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE, - fullname=initiator.fullname, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) - inform_product_of_errors(initiator=initiator, provider=provider, message=message) - + if not dry_run and initiator: + NotificationType.objects.get( + name=NotificationType.Type.DESK_USER_REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE, + ).emit( + user=initiator, + event_context={ + 'initiator_fullname': initiator.fullname, + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + 'message': message, + }, + ) + inform_product_of_errors(initiator=initiator, provider=provider, message=message) def inform_product_of_errors(initiator=None, provider=None, message=None): - """Inform product owner of internal errors. - """ - + """Inform product owner of internal errors via notifications.""" email = settings.PRODUCT_OWNER_EMAIL_ADDRESS.get('Registration') if not email: logger.warning('Missing email for OSF Registration product owner.') return - if not message: - message = 'Bulk upload preparation failure' - user = f'{initiator._id}, {initiator.fullname}, {initiator.username}' if initiator else 'UNIDENTIFIED' + user_info = f'{initiator._id}, {initiator.fullname}, {initiator.username}' if initiator else 'UNIDENTIFIED' provider_name = provider.name if provider else 'UNIDENTIFIED' - mails.send_mail( - to_addr=email, - mail=mails.REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER, - message=message, - user=user, - provider_name=provider_name, + + NotificationType.objects.get( + name=NotificationType.Type.DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER, + ).emit( + user=object('mockuser', (), {'username': email}), + event_context={ + 'user': user_info, + 'provider_name': provider_name, + 'message': message, + }, ) diff --git a/api_tests/subscriptions/views/test_subscriptions_list.py b/api_tests/subscriptions/views/test_subscriptions_list.py index a0a01bf513c..1ac00d02e05 100644 --- a/api_tests/subscriptions/views/test_subscriptions_list.py +++ b/api_tests/subscriptions/views/test_subscriptions_list.py @@ -1,7 +1,7 @@ import pytest from api.base.settings.defaults import API_BASE -from osf.models import NotificationType +from osf.models.notification_type import NotificationType from osf_tests.factories import ( AuthUserFactory, PreprintProviderFactory, diff --git a/framework/auth/views.py b/framework/auth/views.py index a1c42eda1ca..0938bb58510 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -26,14 +26,13 @@ from framework.sessions.utils import remove_sessions_for_user from framework.sessions import get_session from framework.utils import throttle_period_expired -from osf.models import OSFUser +from osf.models import OSFUser, NotificationType from osf.utils.sanitize import strip_html -from website import settings, mails, language +from website import settings, language from website.util import web_url_for from osf.exceptions import ValidationValueError, BlockedEmailError from osf.models.provider import PreprintProvider from osf.models.tag import Tag -from osf.models.notification_type import NotificationType from osf.utils.requests import check_select_for_update from website.util.metrics import CampaignClaimedTags, CampaignSourceTags from website.ember_osf_web.decorators import ember_flag_is_active @@ -208,19 +207,23 @@ def redirect_unsupported_institution(auth): def forgot_password_post(): """Dispatches to ``_forgot_password_post`` passing non-institutional user mail template and reset action.""" - return _forgot_password_post(mail_template='forgot_password', - reset_route='reset_password_get') + return _forgot_password_post( + notificaton_type=NotificationType.Type.USER_FORGOT_PASSWORD, + reset_route='reset_password_get' + ) def forgot_password_institution_post(): """Dispatches to `_forgot_password_post` passing institutional user mail template, reset action, and setting the ``institutional`` flag.""" - return _forgot_password_post(mail_template='forgot_password_institution', - reset_route='reset_password_institution_get', - institutional=True) + return _forgot_password_post( + notificaton_type=NotificationType.Type.USER_FORGOT_PASSWORD_INSTITUTION, + reset_route='reset_password_institution_get', + institutional=True + ) -def _forgot_password_post(mail_template, reset_route, institutional=False): +def _forgot_password_post(notificaton_type, reset_route, institutional=False): """ View for user to submit forgot password form (standard or institutional). Validates submitted form and sends reset-password link via email if valid. If user has submitted another password @@ -273,13 +276,16 @@ def _forgot_password_post(mail_template, reset_route, institutional=False): token=user_obj.verification_key_v2['token'] ) ) - notification_type = NotificationType.objects.filter(name=mail_template) - if not notification_type.exists(): - raise NotificationType.DoesNotExist( - f'NotificationType with name {mail_template} does not exist.' - ) - notification_type = notification_type.first() - notification_type.emit(user=user_obj, message_frequency='instantly', event_context={'can_change_preferences': False, 'reset_link': reset_link}) + NotificationType.objects.get( + name=notificaton_type, + ).emit( + user=user_obj, + event_context={ + 'reset_link': reset_link, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, + ) # institutional forgot password page displays the message as main text, not as an alert if institutional: @@ -655,12 +661,16 @@ def external_login_confirm_email_get(auth, uid, token): if external_status == 'CREATE': service_url += '&{}'.format(urlencode({'new': 'true'})) elif external_status == 'LINK': - mails.send_mail( + NotificationType.objects.get( + name=NotificationType.Type.USER_EXTERNAL_LOGIN_LINK_SUCCESS, + ).emit( user=user, - to_addr=user.username, - mail=mails.EXTERNAL_LOGIN_LINK_SUCCESS, - external_id_provider=provider, - can_change_preferences=False, + subscribed_object=user, # or whatever the correct related object is + event_context={ + 'external_id_provider': getattr(provider, 'id', None), + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) # Send to celery the following async task to affiliate the user with eligible institutions if verified @@ -813,14 +823,14 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte :return: :raises: KeyError if user does not have a confirmation token for the given email. """ - confirmation_url = user.get_confirmation_url( - email, - external=True, - force=True, - renew=renew, - external_id_provider=external_id_provider, - destination=destination - ) + # confirmation_url = user.get_confirmation_url( + # email, + # external=True, + # force=True, + # renew=renew, + # external_id_provider=external_id_provider, + # destination=destination + # ) try: merge_target = OSFUser.objects.get(emails__address=email) @@ -828,45 +838,34 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte merge_target = None campaign = campaigns.campaign_for_user(user) - branded_preprints_provider = None - logo = None # Choose the appropriate email template to use and add existing_user flag if a merge or adding an email. if external_id_provider and external_id: # First time login through external identity provider, link or create an OSF account confirmation if user.external_identity[external_id_provider][external_id] == 'CREATE': - mail_template = mails.EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE + notificaton_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE elif user.external_identity[external_id_provider][external_id] == 'LINK': - mail_template = mails.EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK + notificaton_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK elif merge_target: # Merge account confirmation - mail_template = mails.CONFIRM_MERGE - confirmation_url = f'{confirmation_url}?logout=1' + notificaton_type = NotificationType.Type.USER_CONFIRM_MERGE elif user.is_active: # Add email confirmation - mail_template = mails.CONFIRM_EMAIL - confirmation_url = f'{confirmation_url}?logout=1' + notificaton_type = NotificationType.Type.USER_CONFIRM_EMAIL elif campaign: # Account creation confirmation: from campaign - mail_template = campaigns.email_template_for_campaign(campaign) - if campaigns.is_proxy_login(campaign) and campaigns.get_service_provider(campaign) != 'OSF': - branded_preprints_provider = campaigns.get_service_provider(campaign) - logo = campaigns.get_campaign_logo(campaign) + notificaton_type = campaigns.email_template_for_campaign(campaign) else: # Account creation confirmation: from OSF - mail_template = mails.INITIAL_CONFIRM_EMAIL + notificaton_type = NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL - mails.send_mail( - email, - mail_template, + NotificationType.objects.get( + name=notificaton_type.value, + ).emit( user=user, - confirmation_url=confirmation_url, - email=email, - merge_target=merge_target, - external_id_provider=external_id_provider, - branded_preprints_provider=branded_preprints_provider, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - logo=logo if logo else settings.OSF_LOGO + event_context={ + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def send_confirm_email_async(user, email, renew=False, external_id_provider=None, external_id=None, destination=None): @@ -972,41 +971,39 @@ def resend_confirmation_post(auth): View for user to submit resend confirmation form. HTTP Method: POST """ - try: - # If user is already logged in, log user out - if auth.logged_in: - return auth_logout(redirect_url=request.url) - form = ResendConfirmationForm(request.form) + # If user is already logged in, log user out + if auth.logged_in: + return auth_logout(redirect_url=request.url) - if form.validate(): - clean_email = form.email.data - user = get_user(email=clean_email) - status_message = ( - f'If there is an OSF account associated with this unconfirmed email address {clean_email}, ' - 'a confirmation email has been resent to it. If you do not receive an email and believe ' - 'you should have, please contact OSF Support.' - ) - kind = 'success' - if user: - if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): - try: - send_confirm_email(user, clean_email, renew=True) - except KeyError: - # already confirmed, redirect to dashboard - status_message = f'This email {clean_email} has already been confirmed.' - kind = 'warning' - user.email_last_sent = timezone.now() - user.save() - else: - status_message = ('You have recently requested to resend your confirmation email. ' - 'Please wait a few minutes before trying again.') - kind = 'error' - status.push_status_message(status_message, kind=kind, trust=False) - else: - forms.push_errors_to_status(form.errors) - except Exception as err: - sentry.log_exception(f'Async email confirmation failed because of the error: {err}') + form = ResendConfirmationForm(request.form) + + if form.validate(): + clean_email = form.email.data + user = get_user(email=clean_email) + status_message = ( + f'If there is an OSF account associated with this unconfirmed email address {clean_email}, ' + 'a confirmation email has been resent to it. If you do not receive an email and believe ' + 'you should have, please contact OSF Support.' + ) + kind = 'success' + if user: + if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): + try: + send_confirm_email(user, clean_email, renew=True) + except KeyError: + # already confirmed, redirect to dashboard + status_message = f'This email {clean_email} has already been confirmed.' + kind = 'warning' + user.email_last_sent = timezone.now() + user.save() + else: + status_message = ('You have recently requested to resend your confirmation email. ' + 'Please wait a few minutes before trying again.') + kind = 'error' + status.push_status_message(status_message, kind=kind, trust=False) + else: + forms.push_errors_to_status(form.errors) # Don't go anywhere return {'form': form} diff --git a/notifications.yaml b/notifications.yaml index 9b596962240..da31b544be1 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -11,7 +11,12 @@ notification_types: - name: user_pending_verification_registered __docs__: ... object_content_type_model_name: osfuser - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/pending_registered.html.mako' + notification_freq_default: instantly + - name: user_pending_verification + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/pending_invite.html.mako' notification_freq_default: instantly - name: user_password_reset __docs__: ... @@ -38,11 +43,21 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_access_request.html.mako' notification_freq_default: instantly + - name: user_contributor_added_draft_registration + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/contributor_added_draft_registration.html.mako' + notification_freq_default: instantly - name: user_external_login_link_success __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_success.html.mako' notification_freq_default: instantly + - name: user_confirm_email + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/confirm.html.mako' + notification_freq_default: instantly - name: forgot_password __docs__: ... object_content_type_model_name: osfuser @@ -53,12 +68,12 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/welcome_osf4i.html.mako' notification_freq_default: instantly - - name: invite_preprints_osf + - name: user_invite_preprints_osf __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_preprints_osf.html.mako' notification_freq_default: instantly - - name: invite_preprints + - name: user_invite_preprints __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_preprints.html.mako' @@ -68,17 +83,22 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/invite_draft_registration.html.mako' notification_freq_default: instantly - - name: invite_default + - name: user_invite_default __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_default.html.mako' notification_freq_default: instantly - - name: pending_invite + - name: user_pending_invite __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/pending_invite.html.mako' notification_freq_default: instantly - - name: forward_invite + - name: user_forward_invite_registered + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/forward_invite.html.mako' + notification_freq_default: instantly + - name: user_forward_invite __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forward_invite.html.mako' diff --git a/osf/models/collection_submission.py b/osf/models/collection_submission.py index 56c5a64f659..42976319369 100644 --- a/osf/models/collection_submission.py +++ b/osf/models/collection_submission.py @@ -11,13 +11,12 @@ from website.util import api_v2_url from website.search.exceptions import SearchUnavailableError from osf.utils.workflows import CollectionSubmissionsTriggers, CollectionSubmissionStates -from website.filters import profile_image_url -from website import mails, settings +from website import settings from osf.utils.machines import CollectionSubmissionMachine +from osf.models.notification_type import NotificationType from django.db.models.signals import post_save from django.dispatch import receiver -from django.utils import timezone logger = logging.getLogger(__name__) @@ -102,72 +101,35 @@ def _notify_contributors_pending(self, event_data): assert str(e) == f'No unclaimed record for user {contributor._id} on node {self.guid.referent._id}' claim_url = None - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_SUBMITTED(self.creator, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_SUBMITTED, + ).emit( user=contributor, - submitter=user, - is_initator=self.creator == contributor, - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - is_registered_contrib=contributor.is_registered, - collection=self.collection, - claim_url=claim_url, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + subscribed_object=self, + event_context={ + 'user': contributor.id, + 'submitter': user.id, + 'is_initiator': self.creator == contributor, + 'is_admin': self.guid.referent.has_permission(contributor, ADMIN), + 'is_registered_contrib': contributor.is_registered, + 'collection': self.collection.id, + 'claim_url': claim_url, + 'node': self.guid.referent.id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def _notify_moderators_pending(self, event_data): - context = { - 'reviewable': self.guid.referent, - 'abstract_provider': self.collection.provider, - 'reviews_submission_url': f'{settings.DOMAIN}{self.guid.referent._id}?mode=moderator', - 'profile_image_url': profile_image_url( - settings.PROFILE_IMAGE_PROVIDER, - self.creator, - use_ssl=True, - size=settings.PROFILE_IMAGE_MEDIUM - ), - 'message': f'submitted "{self.guid.referent.title}".', - 'allow_submissions': True, - } - - from .notifications import NotificationSubscriptionLegacy - from website.notifications.emails import store_emails - - provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( - _id=f'{self.collection.provider._id}_new_pending_submissions', - provider=self.collection.provider - ) - email_transactors_ids = list( - provider_subscription.email_transactional.all().values_list( - 'guids___id', - flat=True - ) - ) - store_emails( - email_transactors_ids, - 'email_transactional', - 'new_pending_submissions', - self.creator, - self.guid.referent, - timezone.now(), - **context - ) - email_digester_ids = list( - provider_subscription.email_digest.all().values_list( - 'guids___id', - flat=True - ) - ) - store_emails( - email_digester_ids, - 'email_digest', - 'new_pending_submissions', - self.creator, - self.guid.referent, - timezone.now(), - **context + user = event_data.kwargs.get('user', None) + NotificationType.objects.get( + name=NotificationType.Type.NEW_PENDING_SUBMISSIONS, + ).emit( + user=user, + subscribed_object=self.guid.referent, + event_context={ + 'submitter': self.creator.id, + }, ) def _validate_accept(self, event_data): @@ -182,16 +144,20 @@ def _validate_accept(self, event_data): def _notify_accepted(self, event_data): if self.collection.provider: for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_ACCEPTED(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_ACCEPTED, + ).emit( user=contributor, - submitter=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + subscribed_object=self, + event_context={ + 'user': contributor.id, + 'submitter': event_data.kwargs.get('user').id, + 'is_admin': self.guid.referent.has_permission(contributor, ADMIN), + 'collection': self.collection.id, + 'node': self.guid.referent.id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def _validate_reject(self, event_data): @@ -209,15 +175,19 @@ def _validate_reject(self, event_data): def _notify_moderated_rejected(self, event_data): for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REJECTED(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REJECTED, + ).emit( user=contributor, - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - rejection_justification=event_data.kwargs.get('comment'), - osf_contact_email=settings.OSF_CONTACT_EMAIL, + subscribed_object=self, + event_context={ + 'user': contributor.id, + 'is_admin': self.guid.referent.has_permission(contributor, ADMIN), + 'collection': self.collection.id, + 'node': self.guid.referent.id, + 'rejection_justification': event_data.kwargs.get('comment'), + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + }, ) def _validate_remove(self, event_data): @@ -243,56 +213,60 @@ def _notify_removed(self, event_data): removed_due_to_privacy = event_data.kwargs.get('removed_due_to_privacy') is_moderator = user.has_perm('withdraw_submissions', self.collection.provider) is_admin = self.guid.referent.has_permission(user, ADMIN) + node = self.guid.referent + + event_context_base = { + 'remover': user.id, + 'collection_id': self.collection.id, + 'node_id': node.id, + 'domain': settings.DOMAIN, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } + if removed_due_to_privacy and self.collection.provider: if self.is_moderated: for moderator in self.collection.moderators: - mails.send_mail( - to_addr=moderator.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_PRIVATE(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_PRIVATE + ).emit( user=moderator, - remover=user, - is_admin=self.guid.referent.has_permission(moderator, ADMIN), - collection=self.collection, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(moderator, ADMIN), + }, ) - for contributor in self.guid.referent.contributors.all(): - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_PRIVATE(self.collection, self.guid.referent), + for contributor in node.contributors.all(): + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_PRIVATE + ).emit( user=contributor, - remover=user, - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - domain=settings.DOMAIN, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(contributor, ADMIN), + }, ) elif is_moderator and self.collection.provider: - for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_MODERATOR(self.collection, self.guid.referent), + for contributor in node.contributors.all(): + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_MODERATOR + ).emit( user=contributor, - rejection_justification=event_data.kwargs.get('comment'), - remover=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(contributor, ADMIN), + 'rejection_justification': event_data.kwargs.get('comment'), + }, ) elif is_admin and self.collection.provider: - for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_REMOVED_ADMIN(self.collection, self.guid.referent), + for contributor in node.contributors.all(): + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + ).emit( user=contributor, - remover=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + **event_context_base, + 'is_admin': node.has_permission(contributor, ADMIN), + }, ) def _validate_resubmit(self, event_data): @@ -322,15 +296,13 @@ def _notify_cancel(self, event_data): return for contributor in self.guid.referent.contributors: - mails.send_mail( - to_addr=contributor.username, - mail=mails.COLLECTION_SUBMISSION_CANCEL(self.collection, self.guid.referent), + NotificationType.objects.get( + name=NotificationType.Type.COLLECTION_SUBMISSION_CANCEL + ).emit( user=contributor, - remover=event_data.kwargs.get('user'), - is_admin=self.guid.referent.has_permission(contributor, ADMIN), - collection=self.collection, - node=self.guid.referent, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + 'is_admin': self.collection.has_permission(contributor, ADMIN), + }, ) def _make_public(self, event_data): diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index a45bee7126f..bcb6f9f25d9 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -197,12 +197,13 @@ def desk_types(cls): help_text='Template used to render the subject line of email. Supports Django template syntax.' ) - def emit(self, user, subscribed_object=None, message_frequency=None, event_context=None): + def emit(self, user, subscribed_object=None, message_frequency='instantly', event_context=None): """Emit a notification to a user by creating Notification and NotificationSubscription objects. Args: user (OSFUser): The recipient of the notification. subscribed_object (optional): The object the subscription is related to. + message_frequency (optional): Initializing message frequency. event_context (dict, optional): Context for rendering the notification template. """ from osf.models.notification_subscription import NotificationSubscription diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index baaa810527a..f436b80c768 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -20,7 +20,7 @@ from osf.utils import tokens from osf.utils.machines import ApprovalsMachine from osf.utils.workflows import ApprovalStates, SanctionTypes -from osf.models import NotificationType +from osf.models.notification_type import NotificationType VIEW_PROJECT_URL_TEMPLATE = osf_settings.DOMAIN + '{node_id}/' diff --git a/osf/utils/machines.py b/osf/utils/machines.py index 7c7ff055511..d29abdbb0b4 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -238,7 +238,7 @@ def notify_submit(self, ev): context = self.get_context() context['contributors_url'] = f'{self.machineable.target.absolute_url}contributors/' context['project_settings_url'] = f'{self.machineable.target.absolute_url}settings/' - from osf.models import NotificationType + from osf.models.notification_type import NotificationType if not self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST.value: for admin in self.machineable.target.get_users_with_perm(permissions.ADMIN): @@ -261,7 +261,7 @@ def notify_resubmit(self, ev): def notify_accept_reject(self, ev): """ Notify requester that admins have approved/denied """ - from osf.models import NotificationType + from osf.models.notification_type import NotificationType if ev.event.name == DefaultTriggers.REJECT.value: context = self.get_context() diff --git a/website/archiver/utils.py b/website/archiver/utils.py index 3e76b6014a9..72bffee47f8 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -5,7 +5,7 @@ from django.db.models import CharField, OuterRef, Subquery from framework.auth import Auth from framework.utils import sanitize_html -from osf.models import NotificationType +from osf.models.notification_type import NotificationType from website import settings from website.archiver import ( diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 591b6b716c7..6ef27729ebb 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -17,13 +17,20 @@ from framework.sessions import get_session from framework.transactions.handlers import no_auto_transaction from framework.utils import get_timestamp, throttle_period_expired -from osf.models import Tag, NotificationType +from osf.models import Tag from osf.exceptions import NodeStateError -from osf.models import AbstractNode, DraftRegistration, OSFUser, Preprint, PreprintProvider, RecentlyAddedContributor +from osf.models import ( + AbstractNode, + DraftRegistration, + OSFUser, + Preprint, + PreprintProvider, + RecentlyAddedContributor, + NotificationType +) from osf.utils import sanitize from osf.utils.permissions import ADMIN -from website import mails, language, settings -from website.notifications.utils import check_if_all_global_subscriptions_are_none +from website import language, settings from website.profile import utils as profile_utils from website.project.decorators import (must_have_permission, must_be_valid_project, must_not_be_registration, must_be_contributor_or_public, must_be_contributor) @@ -421,31 +428,47 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 ) # Send mail to referrer, telling them to forward verification link to claimer - mails.send_mail( - referrer.username, - mails.FORWARD_INVITE_REGISTERED, - user=unclaimed_user, - referrer=referrer, - node=node, - claim_url=claim_url, - fullname=unclaimed_record['name'], - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.USER_FORWARD_INVITE_REGISTERED + ).emit( + user=referrer, + event_context={ + 'claim_url': claim_url, + 'fullname': unclaimed_record['name'], + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) - unclaimed_record['last_sent'] = get_timestamp() - unclaimed_user.save() + referrer.contributor_added_email_records = {node._id: {'last_sent': get_timestamp()}} + referrer.save() # Send mail to claimer, telling them to wait for referrer - mails.send_mail( - claimer.username, - mails.PENDING_VERIFICATION_REGISTERED, - fullname=claimer.fullname, - referrer=referrer, - node=node, - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.USER_PENDING_VERIFICATION_REGISTERED + ).emit( + user=claimer, + event_context={ + 'claim_url': claim_url, + 'fullname': unclaimed_record['name'], + 'referrer': referrer.username, + 'node': node.title, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) +def check_email_throttle_claim_email(node, contributor): + contributor_record = contributor.contributor_added_email_records.get(node._id, {}) + if contributor_record: + timestamp = contributor_record.get('last_sent', None) + if timestamp: + if not throttle_period_expired( + timestamp, + settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE + ): + return True + else: + contributor.contributor_added_email_records[node._id] = {} # TODO: consider moving this into utils def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 3600, email_template='default'): @@ -469,8 +492,6 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 claimer_email = email.lower().strip() unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) referrer = OSFUser.load(unclaimed_record['referrer_id']) - claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True) - # Option 1: # When adding the contributor, the referrer provides both name and email. # The given email is the same provided by user, just send to that email. @@ -479,15 +500,15 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 # check email template for branded preprints if email_template == 'preprint': if node.provider.is_default: - mail_tpl = mails.INVITE_OSF_PREPRINT + notification_type = NotificationType.Type.USER_INVITE_OSF_PREPRINT logo = settings.OSF_PREPRINTS_LOGO else: - mail_tpl = mails.INVITE_PREPRINT(node.provider) + notification_type = NotificationType.Type.PROVIDER_USER_INVITE_PREPRINT logo = node.provider._id elif email_template == 'draft_registration': - mail_tpl = mails.INVITE_DRAFT_REGISTRATION + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION else: - mail_tpl = mails.INVITE_DEFAULT + notification_type = NotificationType.Type.USER_INVITE_DEFAULT to_addr = claimer_email unclaimed_record['claimer_email'] = claimer_email @@ -512,112 +533,139 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 unclaimed_record['claimer_email'] = claimer_email unclaimed_user.save() - claim_url = unclaimed_user.get_claim_url(node._primary_key, external=True) - # send an email to the invited user without `claim_url` if notify: - pending_mail = mails.PENDING_VERIFICATION - mails.send_mail( - claimer_email, - pending_mail, + NotificationType.objects.get( + name=NotificationType.Type.USER_PENDING_VERIFICATION + ).emit( user=unclaimed_user, - referrer=referrer, - fullname=unclaimed_record['name'], - node=node, - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + event_context={ + 'user': unclaimed_user.id, + 'referrer': referrer.id, + 'fullname': unclaimed_record['name'], + 'node': node.id, + 'logo': logo, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) - mail_tpl = mails.FORWARD_INVITE + notification_type = NotificationType.Type.USER_FORWARD_INVITE to_addr = referrer.username - # Send an email to the claimer (Option 1) or to the referrer (Option 2) with `claim_url` - mails.send_mail( - to_addr, - mail_tpl, - user=unclaimed_user, - referrer=referrer, - node=node, - claim_url=claim_url, - email=claimer_email, - fullname=unclaimed_record['name'], - branded_service=node.provider, - can_change_preferences=False, - logo=logo if logo else settings.OSF_LOGO, - osf_contact_email=settings.OSF_CONTACT_EMAIL, + NotificationType.objects.get(name=notification_type).emit( + user=referrer, + event_context={ + 'user': unclaimed_user.id, + 'referrer': referrer.id, + 'fullname': unclaimed_record['name'], + 'node': node.id, + 'logo': logo, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) return to_addr def check_email_throttle(node, contributor, throttle=None): - throttle = throttle or settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE - contributor_record = contributor.contributor_added_email_records.get(node._id, {}) - if contributor_record: - timestamp = contributor_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired(timestamp, throttle): - return True - else: - contributor.contributor_added_email_records[node._id] = {} + """ + Check whether a 'contributor added' notification was sent recently + (within the throttle period) for the given node and contributor. + Args: + node (AbstractNode): The node to check. + contributor (OSFUser): The contributor being notified. + throttle (int, optional): Throttle period in seconds (defaults to CONTRIBUTOR_ADDED_EMAIL_THROTTLE setting). -@contributor_added.connect -def notify_added_contributor(node, contributor, auth=None, email_template='default', throttle=None, *args, **kwargs): + Returns: + bool: True if throttled (email was sent recently), False otherwise. + """ + from osf.models import Notification, NotificationType, NotificationSubscription + from website import settings + + throttle = throttle or settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE + + try: + notification_type = NotificationType.objects.get( + name=NotificationType.Type.NODE_COMMENT.value # or whatever event type you're using for 'contributor added' + ) + except NotificationType.DoesNotExist: + return False # Fail-safe: if the notification type isn't set up, don't throttle + from django.contrib.contenttypes.models import ContentType + from datetime import timedelta + + # Check for an active subscription for this contributor and this node + subscription = NotificationSubscription.objects.filter( + user=contributor, + notification_type=notification_type, + content_type=ContentType.objects.get_for_model(node), + object_id=str(node.id) + ).first() + + if not subscription: + return False # No subscription means no previous notifications, so no throttling + + # Check the most recent Notification for this subscription + last_notification = Notification.objects.filter( + subscription=subscription, + sent__isnull=False + ).order_by('-sent').first() + + if last_notification and last_notification.sent: + cutoff_time = timezone.now() - timedelta(seconds=throttle) + return last_notification.sent > cutoff_time + + return False # No previous sent notification, not throttled + +def notify_added_contributor(node, contributor, auth=None, email_template=None, *args, **kwargs): + """Send a notification to a contributor who was just added to a node. + + Handles: + - Unregistered contributor invitations. + - Registered contributor notifications. + - Throttle checks to avoid repeated emails. + + Args: + node (AbstractNode): The node to which the contributor was added. + contributor (OSFUser): The user being added. + auth (Auth, optional): Authorization context. + email_template (str, optional): Template identifier (default: 'default'). + throttle (int, optional): Throttle period in seconds. + """ logo = settings.OSF_LOGO - if check_email_throttle(node, contributor, throttle=throttle): + if check_email_throttle_claim_email(node, contributor): return if email_template == 'false': return - if not getattr(node, 'is_published', True): - return - if not contributor.is_registered: - unreg_contributor_added.send( - node, - contributor=contributor, - auth=auth, - email_template=email_template - ) - return - - # Email users for projects, or for components where they are not contributors on the parent node. - contrib_on_parent_node = isinstance(node, (Preprint, DraftRegistration)) or \ - (not node.parent_node or (node.parent_node and not node.parent_node.is_contributor(contributor))) - if contrib_on_parent_node: - if email_template == 'preprint': - if node.provider.is_default: - email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT - logo = settings.OSF_PREPRINTS_LOGO - else: - email_template = NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT - logo = node.provider._id - elif email_template == 'draft_registration': - email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION - elif email_template == 'access_request': - email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST - elif node.has_linked_published_preprints: - # Project holds supplemental materials for a published preprint - email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF - logo = settings.OSF_PREPRINTS_LOGO - else: - email_template = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT - - NotificationType.objects.get(name=email_template).emit( - user=contributor, - event_context={ - 'node': node.id, - 'referrer_name': auth.user.fullname if auth else '', - 'is_initiator': getattr(auth, 'user', False) == contributor, - 'all_global_subscriptions_none': check_if_all_global_subscriptions_are_none(contributor), - 'branded_service': node.provider, - 'can_change_preferences': False, - 'logo': logo, - 'osf_contact_email': settings.OSF_CONTACT_EMAIL, - 'published_preprints': [] if isinstance(node, (Preprint, DraftRegistration)) else serialize_preprints(node, contributor) - } - ) - - contributor.contributor_added_email_records[node._id]['last_sent'] = get_timestamp() - contributor.save() + notification_type = email_template or NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT + if notification_type == 'default': + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT + if notification_type == 'draft_registration': + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + + if node and getattr(node, 'has_linked_published_preprints', None): + notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF + logo = settings.OSF_PREPRINTS_LOGO + + provider = node.provider + NotificationType.objects.get( + name=notification_type + ).emit( + user=contributor, + event_context={ + 'user': contributor.id, + 'node': node.id, + 'referrer_name': auth.user.fullname if auth else '', + 'is_initiator': getattr(auth, 'user', False) == contributor.id, + 'all_global_subscriptions_none': False, + 'branded_service': getattr(provider, 'id', None), + 'can_change_preferences': False, + 'logo': logo, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + 'published_preprints': [] if isinstance(node, (Preprint, DraftRegistration)) else serialize_preprints(node, user=None), + } + ) @contributor_added.connect def add_recently_added_contributor(node, contributor, auth=None, *args, **kwargs): @@ -732,7 +780,6 @@ def claim_user_registered(auth, node, **kwargs): if should_claim: node.replace_contributor(old=unreg_user, new=current_user) node.save() - status.push_status_message( 'You are now a contributor to this project.', kind='success', From ccaf2c46e4f688cf72f673359199a77ebbbba545 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 18 Jul 2025 12:27:31 -0400 Subject: [PATCH 086/176] update crossref messages to new notification system --- api/crossref/views.py | 16 ++++++++-------- notifications.yaml | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/api/crossref/views.py b/api/crossref/views.py index 48edb378559..d93d5b43ef2 100644 --- a/api/crossref/views.py +++ b/api/crossref/views.py @@ -6,8 +6,7 @@ from rest_framework.views import APIView from api.crossref.permissions import RequestComesFromMailgun -from framework.auth.views import mails -from osf.models import Preprint +from osf.models import Preprint, NotificationType from website import settings from website.preprints.tasks import mint_doi_on_crossref_fail @@ -77,13 +76,14 @@ def post(self, request): if dois_processed != record_count or status != 'completed': if unexpected_errors: - batch_id = crossref_email_content.find('batch_id').text email_error_text = request.POST['body-plain'] - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.CROSSREF_ERROR, - batch_id=batch_id, - email_content=email_error_text, + batch_id = crossref_email_content.find('batch_id').text + NotificationType.objects.get(name=NotificationType.Type.DESK_OSF_SUPPORT_EMAIL).emit( + user=type('staff', (), {'username': settings.OSF_SUPPORT_EMAIL}), + event_context={ + 'batch_id': batch_id, + 'email_content': request.POST['body-plain'], + }, ) logger.error(f'Error submitting metadata for batch_id {batch_id} with CrossRef, email sent to help desk: {email_error_text}') diff --git a/notifications.yaml b/notifications.yaml index da31b544be1..29739fe7fa0 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -285,5 +285,5 @@ notification_types: - name: desk_osf_support_email __docs__: ... object_content_type_model_name: desk - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/crossref_error.html.mako' notification_freq_default: instantly From d0f671ffe1fd5d954ae1b092ab1da9fb5bdeca50 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 18 Jul 2025 12:31:43 -0400 Subject: [PATCH 087/176] update messages for primary_email and exports to new notification system --- notifications.yaml | 2 +- osf/models/notification.py | 7 ++- tests/test_adding_contributor_views.py | 82 +++++++++++++------------- website/profile/views.py | 31 +++++----- website/settings/defaults.py | 2 +- 5 files changed, 64 insertions(+), 60 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index 29739fe7fa0..34ae734c46e 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -285,5 +285,5 @@ notification_types: - name: desk_osf_support_email __docs__: ... object_content_type_model_name: desk - template: 'website/templates/emails/crossref_error.html.mako' + template: 'website/templates/emails/crossref_doi_error.html.mako' notification_freq_default: instantly diff --git a/osf/models/notification.py b/osf/models/notification.py index 557712b81a5..04a7b2026ae 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -1,6 +1,8 @@ import logging from django.db import models +from django.utils import timezone + from website import settings from api.base import settings as api_settings from osf import email @@ -50,9 +52,8 @@ def send(self, protocol_type='email', recipient=None): self.mark_sent() def mark_sent(self) -> None: - raise NotImplementedError('mark_sent must be implemented by subclasses.') - # self.sent = timezone.now() - # self.save(update_fields=['sent']) + self.sent = timezone.now() + self.save(update_fields=['sent']) def mark_seen(self) -> None: raise NotImplementedError('mark_seen must be implemented by subclasses.') diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index b6015e194bf..e8459920de7 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -49,7 +49,7 @@ send_claim_registered_email, ) from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag -from conftest import start_mock_send_grid +from conftest import start_mock_notification_send @pytest.mark.enable_implicit_clean @mock.patch('website.mails.settings.USE_EMAIL', True) @@ -64,7 +64,7 @@ def setUp(self): # Authenticate all requests contributor_added.connect(notify_added_contributor) - self.mock_send_grid = start_mock_send_grid(self) + self.mock_notification_send = start_mock_notification_send(self) def test_serialize_unregistered_without_record(self): name, email = fake.name(), fake_email() @@ -241,7 +241,7 @@ def test_add_contributors_post_only_sends_one_email_to_registered_user(self): self.app.post(url, json=payload, auth=self.creator.auth) # send_mail should only have been called once - assert self.mock_send_grid.call_count == 1 + assert self.mock_notification_send.call_count == 1 def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self): # Project has a component with a sub-component @@ -268,7 +268,7 @@ def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_nod self.app.post(url, json=payload, auth=self.creator.auth) # send_mail is called for both the project and the sub-component - assert self.mock_send_grid.call_count == 2 + assert self.mock_notification_send.call_count == 2 @mock.patch('website.project.views.contributor.send_claim_email') def test_email_sent_when_unreg_user_is_added(self, send_mail): @@ -299,8 +299,8 @@ def test_email_sent_when_reg_user_is_added(self): project = ProjectFactory(creator=self.auth.user) project.add_contributors(contributors, auth=self.auth) project.save() - assert self.mock_send_grid.called - + assert self.mock_notification_send.called + contributor.refresh_from_db() assert contributor.contributor_added_email_records[project._id]['last_sent'] == approx(int(time.time()), rel=1) def test_contributor_added_email_sent_to_unreg_user(self): @@ -308,17 +308,17 @@ def test_contributor_added_email_sent_to_unreg_user(self): project = ProjectFactory() project.add_unregistered_contributor(fullname=unreg_user.fullname, email=unreg_user.email, auth=Auth(project.creator)) project.save() - assert self.mock_send_grid.called + assert self.mock_notification_send.called def test_forking_project_does_not_send_contributor_added_email(self): project = ProjectFactory() project.fork_node(auth=Auth(project.creator)) - assert not self.mock_send_grid.called + assert not self.mock_notification_send.called def test_templating_project_does_not_send_contributor_added_email(self): project = ProjectFactory() project.use_as_template(auth=Auth(project.creator)) - assert not self.mock_send_grid.called + assert not self.mock_notification_send.called @mock.patch('website.archiver.tasks.archive') def test_registering_project_does_not_send_contributor_added_email(self, mock_archive): @@ -331,18 +331,18 @@ def test_registering_project_does_not_send_contributor_added_email(self, mock_ar None, provider=provider ) - assert not self.mock_send_grid.called + assert not self.mock_notification_send.called def test_notify_contributor_email_does_not_send_before_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) notify_added_contributor(project, contributor, auth) - assert self.mock_send_grid.called + assert self.mock_notification_send.called # 2nd call does not send email because throttle period has not expired notify_added_contributor(project, contributor, auth) - assert self.mock_send_grid.call_count == 1 + assert self.mock_notification_send.call_count == 1 def test_notify_contributor_email_sends_after_throttle_expires(self): throttle = 0.5 @@ -351,37 +351,37 @@ def test_notify_contributor_email_sends_after_throttle_expires(self): project = ProjectFactory() auth = Auth(project.creator) notify_added_contributor(project, contributor, auth, throttle=throttle) - assert self.mock_send_grid.called + assert self.mock_notification_send.called time.sleep(1) # throttle period expires notify_added_contributor(project, contributor, auth, throttle=throttle) - assert self.mock_send_grid.call_count == 2 + assert self.mock_notification_send.call_count == 2 def test_add_contributor_to_fork_sends_email(self): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) fork.add_contributor(contributor, auth=Auth(self.creator)) fork.save() - assert self.mock_send_grid.called - assert self.mock_send_grid.call_count == 1 + assert self.mock_notification_send.called + assert self.mock_notification_send.call_count == 1 def test_add_contributor_to_template_sends_email(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) template.add_contributor(contributor, auth=Auth(self.creator)) template.save() - assert self.mock_send_grid.called - assert self.mock_send_grid.call_count == 1 + assert self.mock_notification_send.called + assert self.mock_notification_send.call_count == 1 def test_creating_fork_does_not_email_creator(self): contributor = UserFactory() fork = self.project.fork_node(auth=Auth(self.creator)) - assert not self.mock_send_grid.called + assert not self.mock_notification_send.called def test_creating_template_does_not_email_creator(self): contributor = UserFactory() template = self.project.use_as_template(auth=Auth(self.creator)) - assert not self.mock_send_grid.called + assert not self.mock_notification_send.called def test_add_multiple_contributors_only_adds_one_log(self): n_logs_pre = self.project.logs.count() @@ -447,7 +447,7 @@ def setUp(self): self.project = ProjectFactory(creator=self.user) self.invite_url = f'/api/v1/project/{self.project._primary_key}/invite_contributor/' - self.mock_send_grid = start_mock_send_grid(self) + self.mock_notification_send = start_mock_notification_send(self) def test_invite_contributor_post_if_not_in_db(self): name, email = fake.name(), fake_email() @@ -527,7 +527,7 @@ def test_send_claim_email_to_given_email(self): project.save() send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project) - self.mock_send_grid.assert_called() + self.mock_notification_send.assert_called() def test_send_claim_email_to_referrer(self): project = ProjectFactory() @@ -540,7 +540,7 @@ def test_send_claim_email_to_referrer(self): project.save() send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project) - assert self.mock_send_grid.called + assert self.mock_notification_send.called def test_send_claim_email_before_throttle_expires(self): project = ProjectFactory() @@ -552,11 +552,11 @@ def test_send_claim_email_before_throttle_expires(self): ) project.save() send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - self.mock_send_grid.reset_mock() + self.mock_notification_send.reset_mock() # 2nd call raises error because throttle hasn't expired with pytest.raises(HTTPError): send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - assert not self.mock_send_grid.called + assert not self.mock_notification_send.called @pytest.mark.enable_implicit_clean @@ -593,7 +593,7 @@ def setUp(self): ) self.project.save() - self.mock_send_grid = start_mock_send_grid(self) + self.mock_notification_send = start_mock_notification_send(self) @mock.patch('website.project.views.contributor.send_claim_email') def test_claim_user_already_registered_redirects_to_claim_user_registered(self, claim_email): @@ -704,10 +704,10 @@ def test_claim_user_post_with_registered_user_id(self): res = self.app.post(url, json=payload) # mail was sent - assert self.mock_send_grid.call_count == 2 + assert self.mock_notification_send.call_count == 2 # ... to the correct address - referrer_call = self.mock_send_grid.call_args_list[0] - claimer_call = self.mock_send_grid.call_args_list[1] + referrer_call = self.mock_notification_send.call_args_list[0] + claimer_call = self.mock_notification_send.call_args_list[1] assert referrer_call[1]['to_addr'] == self.referrer.email assert claimer_call[1]['to_addr'] == reg_user.email @@ -726,10 +726,12 @@ def test_send_claim_registered_email(self): unclaimed_user=self.user, node=self.project ) - assert self.mock_send_grid.call_count == 2 - first_call_args = self.mock_send_grid.call_args_list[0][1] - assert first_call_args['to_addr'] == self.referrer.email - second_call_args = self.mock_send_grid.call_args_list[1][1] + assert self.mock_notification_send.call_count == 2 + first_call_args = self.mock_notification_send.call_args_list[0][1] + print(first_call_args) + second_call_args = self.mock_notification_send.call_args_list[1][1] + print(second_call_args) + assert second_call_args['to_addr'] == reg_user.email def test_send_claim_registered_email_before_throttle_expires(self): @@ -739,7 +741,7 @@ def test_send_claim_registered_email_before_throttle_expires(self): unclaimed_user=self.user, node=self.project, ) - self.mock_send_grid.reset_mock() + self.mock_notification_send.reset_mock() # second call raises error because it was called before throttle period with pytest.raises(HTTPError): send_claim_registered_email( @@ -747,7 +749,7 @@ def test_send_claim_registered_email_before_throttle_expires(self): unclaimed_user=self.user, node=self.project, ) - assert not self.mock_send_grid.called + assert not self.mock_notification_send.called @mock.patch('website.project.views.contributor.send_claim_registered_email') def test_claim_user_post_with_email_already_registered_sends_correct_email( @@ -935,17 +937,17 @@ def test_claim_user_post_returns_fullname(self): }, ) assert res.json['fullname'] == self.given_name - assert self.mock_send_grid.called + assert self.mock_notification_send.called def test_claim_user_post_if_email_is_different_from_given_email(self): email = fake_email() # email that is different from the one the referrer gave url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' self.app.post(url, json={'value': email, 'pk': self.user._primary_key} ) - assert self.mock_send_grid.called - assert self.mock_send_grid.call_count == 2 - call_to_invited = self.mock_send_grid.mock_calls[0] + assert self.mock_notification_send.called + assert self.mock_notification_send.call_count == 2 + call_to_invited = self.mock_notification_send.mock_calls[0] call_to_invited.assert_called_with(to_addr=email) - call_to_referrer = self.mock_send_grid.mock_calls[1] + call_to_referrer = self.mock_notification_send.mock_calls[1] call_to_referrer.assert_called_with(to_addr=self.given_email) def test_claim_url_with_bad_token_returns_400(self): diff --git a/website/profile/views.py b/website/profile/views.py index c4306b92125..bc8e91765d8 100644 --- a/website/profile/views.py +++ b/website/profile/views.py @@ -26,10 +26,9 @@ from framework.utils import throttle_period_expired from osf import features -from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken, OSFUser +from osf.models import ApiOAuth2Application, ApiOAuth2PersonalToken, OSFUser, NotificationType from osf.exceptions import BlockedEmailError, OSFError from osf.utils.requests import string_type_request_headers -from website import mails from website import mailchimp_utils from website import settings from website import language @@ -188,16 +187,16 @@ def update_user(auth): # make sure the new username has already been confirmed if username and username != user.username and user.emails.filter(address=username).exists(): - - mails.send_mail( - user.username, - mails.PRIMARY_EMAIL_CHANGED, + NotificationType.objects.get( + name=NotificationType.Type.USER_PRIMARY_EMAIL_CHANGED + ).emit( user=user, - new_address=username, - can_change_preferences=False, - osf_contact_email=settings.OSF_CONTACT_EMAIL + event_context={ + 'new_address': username, + 'can_change_preferences': False, + 'osf_contact_email': settings.OSF_CONTACT_EMAIL, + } ) - # Remove old primary email from subscribed mailing lists for list_name, subscription in user.mailchimp_mailing_lists.items(): if subscription: @@ -806,11 +805,13 @@ def request_export(auth): data={'message_long': 'Too many requests. Please wait a while before sending another account export request.', 'error_type': 'throttle_error'}) - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_EXPORT, - user=auth.user, - can_change_preferences=False, + NotificationType.objects.get( + name=NotificationType.Type.DESK_REQUEST_EXPORT + ).emit( + user=user, + event_context={ + 'can_change_preferences': False + } ) user.email_last_sent = timezone.now() user.save() diff --git a/website/settings/defaults.py b/website/settings/defaults.py index a20a50c3e52..badafc32862 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -142,7 +142,7 @@ def parent_dir(path): USE_EMAIL = True FROM_EMAIL = 'openscienceframework-noreply@osf.io' - +ENABLE_TEST_EMAIL = False # support email OSF_SUPPORT_EMAIL = 'support@osf.io' # contact email From ecb3696a7ff7345bceae511837e6396ba2df8472 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 18 Jul 2025 15:28:31 -0400 Subject: [PATCH 088/176] clean-up mailhog and populate notification types code --- api_tests/mailhog/test_mailhog.py | 18 ++++---- framework/auth/views.py | 22 +++++----- notifications.yaml | 5 +++ osf/email/__init__.py | 44 +++++++++++++++++-- .../commands/populate_notification_types.py | 2 +- osf/models/mixins.py | 4 +- osf/models/notification.py | 2 +- .../emails/initial_confirm.html.mako | 2 +- .../project_affiliation_changed.html.mako | 4 +- 9 files changed, 73 insertions(+), 30 deletions(-) diff --git a/api_tests/mailhog/test_mailhog.py b/api_tests/mailhog/test_mailhog.py index e7720e96afa..b911eea9b5c 100644 --- a/api_tests/mailhog/test_mailhog.py +++ b/api_tests/mailhog/test_mailhog.py @@ -19,14 +19,13 @@ OsfTestCase, ) from website.util import api_url_for -from conftest import start_mock_send_grid @pytest.mark.django_db @pytest.mark.usefixtures('mock_send_grid') class TestMailHog: - def test_mailhog_recived_mail(self, mock_send_grid): + def test_mailhog_received_mail(self, mock_send_grid): with override_switch(features.ENABLE_MAILHOG, active=True): mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' @@ -43,6 +42,7 @@ def test_mailhog_recived_mail(self, mock_send_grid): @pytest.mark.django_db @mock.patch('website.mails.settings.USE_EMAIL', True) +@mock.patch('website.mails.settings.ENABLE_TEST_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthMailhog(OsfTestCase): @@ -51,15 +51,12 @@ def setUp(self): self.user = AuthUserFactory() self.auth = self.user.auth - self.mock_send_grid = start_mock_send_grid(self) - - def test_recived_confirmation(self): + def test_received_confirmation(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' requests.delete(mailhog_v1) - with override_switch(features.ENABLE_MAILHOG, active=True): with capture_signals() as mock_signals: self.app.post( @@ -74,10 +71,13 @@ def test_recived_confirmation(self): res = requests.get(mailhog_v2).json() assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} - assert self.mock_send_grid.called user = OSFUser.objects.get(username=email) + assert res['total'] == 1 + full_email = f"{res['items'][0]['To'][0]['Mailbox']}@{res['items'][0]['To'][0]['Domain']}" + assert full_email == user.username + decoded_body = res['items'][0]['Content']['Body'] + user_token = list(user.email_verifications.keys())[0] ideal_link_path = f'/confirm/{user._id}/{user_token}/' - - assert ideal_link_path in res['items'][0]['Content']['Body'] + assert ideal_link_path in decoded_body diff --git a/framework/auth/views.py b/framework/auth/views.py index 0938bb58510..8ef5d5d29b3 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -823,14 +823,14 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte :return: :raises: KeyError if user does not have a confirmation token for the given email. """ - # confirmation_url = user.get_confirmation_url( - # email, - # external=True, - # force=True, - # renew=renew, - # external_id_provider=external_id_provider, - # destination=destination - # ) + confirmation_url = user.get_confirmation_url( + email, + external=True, + force=True, + renew=renew, + external_id_provider=external_id_provider, + destination=destination + ) try: merge_target = OSFUser.objects.get(emails__address=email) @@ -858,11 +858,11 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte # Account creation confirmation: from OSF notificaton_type = NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL - NotificationType.objects.get( - name=notificaton_type.value, - ).emit( + NotificationType.objects.get(name=notificaton_type).emit( user=user, event_context={ + 'user_fullname': user.fullname, + 'confirmation_url': confirmation_url, 'can_change_preferences': False, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, }, diff --git a/notifications.yaml b/notifications.yaml index 34ae734c46e..b6bc1b9e13e 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -103,6 +103,11 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/forward_invite.html.mako' notification_freq_default: instantly + - name: user_initial_confirm_email + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/initial_confirm.html.mako' + notification_freq_default: instantly - name: external_confirm_success __docs__: ... object_content_type_model_name: osfuser diff --git a/osf/email/__init__.py b/osf/email/__init__.py index 689519bdeb5..2d35db074c1 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -1,9 +1,15 @@ import logging import smtplib from email.mime.text import MIMEText + +import waffle from sendgrid import SendGridAPIClient from sendgrid.helpers.mail import Mail + +from osf import features from website import settings +from django.core.mail import EmailMessage, get_connection + def send_email_over_smtp(to_addr, notification_type, context): """Send an email notification using SMTP. This is typically not used in productions as other 3rd party mail services @@ -19,16 +25,25 @@ def send_email_over_smtp(to_addr, notification_type, context): if not settings.MAIL_USERNAME and settings.MAIL_PASSWORD: raise NotImplementedError('MAIL_USERNAME and MAIL_PASSWORD are required for STMP') + if waffle.switch_is_active(features.ENABLE_MAILHOG): + send_to_mailhog( + subject=notification_type.subject, + message=notification_type.template.format(**context), + to_email=to_addr, + from_email=settings.MAIL_USERNAME, + ) + return + msg = MIMEText( notification_type.template.format(**context), 'html', _charset='utf-8' ) - msg['Subject'] = notification_type.email_subject_line_template.format(context=context) + + if notification_type.subject: + msg['Subject'] = notification_type.subject.format(**context) with smtplib.SMTP(settings.MAIL_SERVER) as server: - server.ehlo() - server.starttls() server.ehlo() server.login(settings.MAIL_USERNAME, settings.MAIL_PASSWORD) server.sendmail( @@ -66,3 +81,26 @@ def send_email_with_send_grid(to_addr, notification_type, context): except Exception as exc: logging.error(f'Failed to send email notification to {to_addr}: {exc}') raise exc + +def send_to_mailhog(subject, message, from_email, to_email, attachment_name=None, attachment_content=None): + email = EmailMessage( + subject=subject, + body=message, + from_email=from_email, + to=[to_email], + connection=get_connection( + backend='django.core.mail.backends.smtp.EmailBackend', + host=settings.MAILHOG_HOST, + port=settings.MAILHOG_PORT, + username='', + password='', + use_tls=False, + use_ssl=False, + ) + ) + email.content_subtype = 'html' + + if attachment_name and attachment_content: + email.attach(attachment_name, attachment_content) + + email.send() diff --git a/osf/management/commands/populate_notification_types.py b/osf/management/commands/populate_notification_types.py index 8f20531f06a..26fc02f5dd6 100644 --- a/osf/management/commands/populate_notification_types.py +++ b/osf/management/commands/populate_notification_types.py @@ -54,13 +54,13 @@ def populate_notification_types(*args, **kwargs): with open(notification_type['template']) as stream: template = stream.read() - notification_types['template'] = template notification_types['notification_freq'] = notification_freq nt, _ = NotificationType.objects.update_or_create( name=notification_type['name'], defaults=notification_type, ) nt.object_content_type = content_type + nt.template = template nt.save() diff --git a/osf/models/mixins.py b/osf/models/mixins.py index b8e7e8e8778..5ad76499f3d 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -315,8 +315,8 @@ def add_affiliated_institution(self, inst, user, log=True, ignore_user_affiliati ).emit( user=user, event_context={ - 'user': user, - 'node': self, + 'user_fullname': user.fullname, + 'node_title': self.title, } ) if log: diff --git a/osf/models/notification.py b/osf/models/notification.py index 04a7b2026ae..5d339150111 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -24,7 +24,7 @@ def send(self, protocol_type='email', recipient=None): if not protocol_type == 'email': raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') - recipient_address = getattr(recipient, 'username', None) or self.subscription.user + recipient_address = getattr(recipient, 'username', None) or self.subscription.user.username if protocol_type == 'email' and settings.DEV_MODE and settings.ENABLE_TEST_EMAIL: email.send_email_over_smtp( diff --git a/website/templates/emails/initial_confirm.html.mako b/website/templates/emails/initial_confirm.html.mako index 29222a03ef6..acc3ab42505 100644 --- a/website/templates/emails/initial_confirm.html.mako +++ b/website/templates/emails/initial_confirm.html.mako @@ -3,7 +3,7 @@ <%def name="content()"> - Hello ${user.fullname},
    + Hello ${user_fullname},

    Thank you for registering for an account on the Open Science Framework.

    diff --git a/website/templates/emails/project_affiliation_changed.html.mako b/website/templates/emails/project_affiliation_changed.html.mako index cb13ecb98f9..30c08ae7e69 100644 --- a/website/templates/emails/project_affiliation_changed.html.mako +++ b/website/templates/emails/project_affiliation_changed.html.mako @@ -3,10 +3,10 @@ <%def name="content()"> - Hello ${user.fullname},
    + Hello ${user_fullname},

    An Institutional admin has made changes to the affiliations of your project: - ${node.title}.
    + ${node_title}.

    Want more information? Visit OSF to learn about OSF, or COS for information about its supporting organization, From c2a7299d87a8171cceb74361a55029aead296773 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 08:03:05 -0400 Subject: [PATCH 089/176] clean up contributor tests and save behavior for notifications --- api/nodes/serializers.py | 12 ++-- ...est_draft_registration_contributor_list.py | 44 ++++++------- api_tests/logs/views/test_log_params.py | 2 +- .../views/test_node_contributors_detail.py | 5 +- .../views/test_node_contributors_list.py | 50 ++++++++------- api_tests/nodes/views/test_node_list.py | 6 +- .../test_preprint_contributors_detail.py | 7 ++- .../views/test_preprint_contributors_list.py | 15 +++-- .../views/test_registration_detail.py | 1 - api_tests/users/views/test_user_claim.py | 1 - notifications.yaml | 16 +++++ osf/models/mixins.py | 62 ++++++++++++++----- osf_tests/conftest.py | 12 ---- osf_tests/test_comment.py | 8 ++- osf_tests/test_node.py | 6 +- tests/base.py | 4 -- tests/test_adding_contributor_views.py | 4 -- tests/test_preprints.py | 6 +- tests/test_project_contibutor_views.py | 5 +- tests/utils.py | 34 ++++++++++ website/project/views/contributor.py | 38 +++++++----- 21 files changed, 215 insertions(+), 123 deletions(-) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index e4e5e01e983..4154dfbb5f8 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -1256,11 +1256,15 @@ def create(self, validated_data): try: contributor_dict = { - 'auth': auth, 'user_id': id, 'email': email, 'full_name': full_name, 'send_email': send_email, - 'bibliographic': bibliographic, 'index': index, 'save': True, + 'auth': auth, + 'user_id': id, + 'email': email, + 'full_name': full_name, + 'send_email': send_email, + 'bibliographic': bibliographic, + 'index': index, + 'permissions': permissions, } - - contributor_dict['permissions'] = permissions contributor_obj = node.add_contributor_registered_or_not(**contributor_dict) except ValidationError as e: raise exceptions.ValidationError(detail=e.messages[0]) diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index 71fe7450b6d..71940714d48 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -25,6 +25,7 @@ ) from osf.utils import permissions from tests.base import capture_signals +from tests.utils import capture_notifications from website.project.signals import contributor_added @@ -142,7 +143,7 @@ def test_contributors_order_is_the_same_over_multiple_requests( project_public.add_unregistered_contributor( 'Robert Jackson', 'robert@gmail.com', - auth=Auth(user), save=True + auth=Auth(user) ) for i in range(0, 10): @@ -208,37 +209,36 @@ def create_serializer(self): return DraftRegistrationContributorsCreateSerializer -@pytest.mark.usefixtures('mock_send_grid') class TestDraftContributorCreateEmail(DraftRegistrationCRUDTestCase, TestNodeContributorCreateEmail): @pytest.fixture() def url_project_contribs(self, project_public): # Overrides TestNodeContributorCreateEmail return f'/{API_BASE}draft_registrations/{project_public._id}/contributors/' - def test_add_contributor_sends_email( - self, app, user, user_two, - url_project_contribs, mock_send_grid): + def test_add_contributor_sends_email(self, app, user, user_two, url_project_contribs): # Overrides TestNodeContributorCreateEmail - url = f'{url_project_contribs}?send_email=draft_registration' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - }, - 'relationships': { - 'users': { - 'data': { - 'type': 'users', - 'id': user_two._id + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_project_contribs}?send_email=draft_registration', + { + 'data': { + 'type': 'contributors', + 'attributes': { + }, + 'relationships': { + 'users': { + 'data': { + 'type': 'users', + 'id': user_two._id + } + } } } - } - } - } - - res = app.post_json_api(url, payload, auth=user.auth) + }, + auth=user.auth + ) assert res.status_code == 201 - assert mock_send_grid.call_count == 1 + assert len(notifications) == 1 # Overrides TestNodeContributorCreateEmail def test_add_contributor_signal_if_default( diff --git a/api_tests/logs/views/test_log_params.py b/api_tests/logs/views/test_log_params.py index 2df940838ce..39851684cbb 100644 --- a/api_tests/logs/views/test_log_params.py +++ b/api_tests/logs/views/test_log_params.py @@ -37,7 +37,7 @@ def test_unregistered_contributor_added_has_contributor_info_in_params( project.add_unregistered_contributor( 'Robert Jackson', 'robert@gmail.com', - auth=Auth(user_one), save=True + auth=Auth(user_one) ) relevant_log = project.logs.latest() url = f'/{API_BASE}logs/{relevant_log._id}/' diff --git a/api_tests/nodes/views/test_node_contributors_detail.py b/api_tests/nodes/views/test_node_contributors_detail.py index 57f7e41444f..f4319cfe6ba 100644 --- a/api_tests/nodes/views/test_node_contributors_detail.py +++ b/api_tests/nodes/views/test_node_contributors_detail.py @@ -124,8 +124,8 @@ def test_unregistered_contributor_detail_show_up_as_name_associated_with_project project_public.add_unregistered_contributor( 'Rheisen Dennis', 'reason@gmail.com', - auth=Auth(user), - save=True) + auth=Auth(user) + ) unregistered_contributor = project_public.contributors[1] url = self.make_resource_url(project_public._id, unregistered_contributor._id) @@ -138,7 +138,6 @@ def test_unregistered_contributor_detail_show_up_as_name_associated_with_project 'Nesiehr Sinned', 'reason@gmail.com', auth=Auth(user), - save=True ) url = self.make_resource_url(project_private._id, unregistered_contributor._id) res = app.get(url, auth=user.auth) diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index 81910a6ef55..fab0d4913f9 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -16,6 +16,7 @@ from osf.utils import permissions from rest_framework import exceptions from tests.base import capture_signals, fake +from tests.utils import capture_notifications from website.project.signals import contributor_added, contributor_removed from api_tests.utils import disconnected_from_listeners @@ -284,7 +285,9 @@ def test_unregistered_contributors_show_up_as_name_associated_with_project( ): project = ProjectFactory(creator=user, is_public=True) project.add_unregistered_contributor( - 'Robert Jackson', 'robert@gmail.com', auth=Auth(user), save=True + 'Robert Jackson', + 'robert@gmail.com', + auth=Auth(user) ) url = f'/{API_BASE}nodes/{project._id}/contributors/' res = app.get(url, auth=user.auth, expect_errors=True) @@ -301,7 +304,9 @@ def test_unregistered_contributors_show_up_as_name_associated_with_project( project_two = ProjectFactory(creator=user, is_public=True) project_two.add_unregistered_contributor( - 'Bob Jackson', 'robert@gmail.com', auth=Auth(user), save=True + 'Bob Jackson', + 'robert@gmail.com', + auth=Auth(user), ) url = f'/{API_BASE}nodes/{project_two._id}/contributors/' res = app.get(url, auth=user.auth, expect_errors=True) @@ -312,16 +317,15 @@ def test_unregistered_contributors_show_up_as_name_associated_with_project( res.json['data'][1]['embeds']['users']['data']['attributes']['full_name'] == 'Robert Jackson' ) - assert ( - res.json['data'][1]['attributes'].get('unregistered_contributor') - == 'Bob Jackson' - ) + assert res.json['data'][1]['attributes'].get('unregistered_contributor') == 'Bob Jackson' def test_contributors_order_is_the_same_over_multiple_requests( self, app, user, project_public, url_public ): project_public.add_unregistered_contributor( - 'Robert Jackson', 'robert@gmail.com', auth=Auth(user), save=True + 'Robert Jackson', + 'robert@gmail.com', + auth=Auth(user), ) for i in range(0, 10): @@ -956,7 +960,9 @@ def test_add_unregistered_contributor_already_contributor( ): name, email = fake.name(), fake_email() project_public.add_unregistered_contributor( - auth=Auth(user), fullname=name, email=email + auth=Auth(user), + fullname=name, + email=email ) payload = { 'data': { @@ -1226,20 +1232,22 @@ def test_add_contributor_no_email_if_false( def test_add_contributor_sends_email( self, mock_send_grid, app, user, user_two, url_project_contribs ): - url = f'{url_project_contribs}?send_email=default' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': {}, - 'relationships': { - 'users': {'data': {'type': 'users', 'id': user_two._id}} + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_project_contribs}?send_email=default', + { + 'data': { + 'type': 'contributors', + 'attributes': {}, + 'relationships': { + 'users': {'data': {'type': 'users', 'id': user_two._id}} + }, + } }, - } - } - - res = app.post_json_api(url, payload, auth=user.auth) - assert res.status_code == 201 - assert mock_send_grid.call_count == 1 + auth=user.auth + ) + assert res.status_code == 201 + assert len(notifications) == 1 @mock.patch('website.project.signals.contributor_added.send') def test_add_contributor_signal_if_default( diff --git a/api_tests/nodes/views/test_node_list.py b/api_tests/nodes/views/test_node_list.py index 15398613ea3..2e1990574ae 100644 --- a/api_tests/nodes/views/test_node_list.py +++ b/api_tests/nodes/views/test_node_list.py @@ -1603,9 +1603,11 @@ def test_create_component_inherit_contributors_with_blocked_email( self, app, user_one, title, category): parent_project = ProjectFactory(creator=user_one) parent_project.add_unregistered_contributor( - fullname='far', email='foo@bar.baz', + fullname='far', + email='foo@bar.baz', permissions=permissions.READ, - auth=Auth(user=user_one), save=True) + auth=Auth(user=user_one) + ) contributor = parent_project.contributors.filter(fullname='far').first() contributor.username = 'foo@example.com' contributor.save() diff --git a/api_tests/preprints/views/test_preprint_contributors_detail.py b/api_tests/preprints/views/test_preprint_contributors_detail.py index 16c05911d23..726656fda75 100644 --- a/api_tests/preprints/views/test_preprint_contributors_detail.py +++ b/api_tests/preprints/views/test_preprint_contributors_detail.py @@ -122,7 +122,7 @@ def test_unregistered_contributor_detail_show_up_as_name_associated_with_preprin 'Rheisen Dennis', 'reason@gmail.com', auth=Auth(user), - save=True) + ) unregistered_contributor = preprint_published.contributors[1] url = '/{}preprints/{}/contributors/{}/'.format( API_BASE, preprint_published._id, unregistered_contributor._id) @@ -134,7 +134,10 @@ def test_unregistered_contributor_detail_show_up_as_name_associated_with_preprin preprint_two = PreprintFactory(creator=user, is_public=True) preprint_two.add_unregistered_contributor( - 'Nesiehr Sinned', 'reason@gmail.com', auth=Auth(user), save=True) + 'Nesiehr Sinned', + 'reason@gmail.com', + auth=Auth(user), + ) url = '/{}preprints/{}/contributors/{}/'.format( API_BASE, preprint_two._id, unregistered_contributor._id) res = app.get(url, auth=user.auth, expect_errors=True) diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index 6676b542b60..e7a3d5d739f 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -307,7 +307,8 @@ def test_unregistered_contributors_show_up_as_name_associated_with_preprint( preprint.add_unregistered_contributor( 'Robert Jackson', 'robert@gmail.com', - auth=Auth(user), save=True) + auth=Auth(user) + ) url = f'/{API_BASE}preprints/{preprint._id}/contributors/' res = app.get(url, auth=user.auth, expect_errors=True) assert res.status_code == 200 @@ -318,7 +319,10 @@ def test_unregistered_contributors_show_up_as_name_associated_with_preprint( preprint_two = PreprintFactory(creator=user, is_published=True) preprint_two.add_unregistered_contributor( - 'Bob Jackson', 'robert@gmail.com', auth=Auth(user), save=True) + 'Bob Jackson', + 'robert@gmail.com', + auth=Auth(user) + ) url = f'/{API_BASE}preprints/{preprint_two._id}/contributors/' res = app.get(url, auth=user.auth, expect_errors=True) assert res.status_code == 200 @@ -333,7 +337,7 @@ def test_contributors_order_is_the_same_over_multiple_requests( preprint_published.add_unregistered_contributor( 'Robert Jackson', 'robert@gmail.com', - auth=Auth(user), save=True + auth=Auth(user) ) for i in range(0, 10): @@ -1058,7 +1062,10 @@ def test_add_unregistered_contributor_already_contributor( self, app, user, preprint_published, url_published): name, email = fake.name(), fake_email() preprint_published.add_unregistered_contributor( - auth=Auth(user), fullname=name, email=email) + auth=Auth(user), + fullname=name, + email=email + ) payload = { 'data': { 'type': 'contributors', diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index b9917bd70f9..39348e1f3c4 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -795,7 +795,6 @@ def test_withdraw_request_does_not_send_email_to_unregistered_admins( auth=Auth(user), permissions=permissions.ADMIN, existing_user=unreg, - save=True ) res = app.put_json_api(public_url, public_payload, auth=user.auth) diff --git a/api_tests/users/views/test_user_claim.py b/api_tests/users/views/test_user_claim.py index 0e265021c5c..d5f5967df57 100644 --- a/api_tests/users/views/test_user_claim.py +++ b/api_tests/users/views/test_user_claim.py @@ -37,7 +37,6 @@ def unreg_user(self, referrer, project): 'David Davidson', 'david@david.son', auth=Auth(referrer), - save=True ) @pytest.fixture() diff --git a/notifications.yaml b/notifications.yaml index b6bc1b9e13e..5c2fc55f770 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -48,6 +48,11 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_draft_registration.html.mako' notification_freq_default: instantly + - name: user_contributor_added_osf_preprint + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/contributor_added_preprint_node_from_osf.html.mako' + notification_freq_default: instantly - name: user_external_login_link_success __docs__: ... object_content_type_model_name: osfuser @@ -216,6 +221,17 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/access_request_submitted.html.mako' notification_freq_default: instantly + - name: node_fork_failed + __docs__: This email is sent when a fork fails to be created, this could be due to addons or network outages or + technical errors. + object_content_type_model_name: abstractnode + template: 'website/templates/emails/fork_failed.html.mako' + notification_freq_default: instantly + - name: node_fork_completed + __docs__: This email is sent when a fork is successfully created, + object_content_type_model_name: abstractnode + template: 'website/templates/emails/fork_completed.html.mako' + notification_freq_default: instantly #### PREPRINT - name: pending_retraction_admin diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 5ad76499f3d..cb43c5c87d7 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1505,8 +1505,16 @@ def add_contributors(self, contributors, auth=None, log=True, save=False): if save: self.save() - def add_unregistered_contributor(self, fullname, email, auth, send_email=None, - visible=True, permissions=None, save=False, existing_user=None): + def add_unregistered_contributor( + self, + fullname, + email, + auth, + send_email=None, + visible=True, + permissions=None, + existing_user=None + ): """Add a non-registered contributor to the project. :param str fullname: The full name of the person. @@ -1553,16 +1561,27 @@ def add_unregistered_contributor(self, fullname, email, auth, send_email=None, raise e self.add_contributor( - contributor, permissions=permissions, auth=auth, - visible=visible, send_email=send_email, log=True, save=False + contributor, + permissions=permissions, + auth=auth, + visible=visible, + send_email=send_email, + log=True, + save=False ) self._add_related_source_tags(contributor) self.save() return contributor - def add_contributor_registered_or_not(self, auth, user_id=None, - full_name=None, email=None, send_email=None, - permissions=None, bibliographic=True, index=None, save=False): + def add_contributor_registered_or_not(self, + auth, + user_id=None, + full_name=None, + email=None, + send_email=None, + permissions=None, + bibliographic=True, + index=None): OSFUser = apps.get_model('osf.OSFUser') send_email = send_email or self.contributor_email_template @@ -1575,8 +1594,14 @@ def add_contributor_registered_or_not(self, auth, user_id=None, raise ValidationValueError(f'{contributor.fullname} is already a contributor.') if contributor.is_registered: - contributor = self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic, - permissions=permissions, send_email=send_email, save=True) + contributor = self.add_contributor( + contributor=contributor, + auth=auth, + visible=bibliographic, + permissions=permissions, + send_email=send_email, + save=True + ) else: if not full_name: raise ValueError( @@ -1584,9 +1609,13 @@ def add_contributor_registered_or_not(self, auth, user_id=None, .format(user_id, self._id) ) contributor = self.add_unregistered_contributor( - fullname=full_name, email=contributor.username, auth=auth, - send_email=send_email, permissions=permissions, - visible=bibliographic, existing_user=contributor, save=True + fullname=full_name, + email=contributor.username, + auth=auth, + send_email=send_email, + permissions=permissions, + visible=bibliographic, + existing_user=contributor, ) else: @@ -1599,9 +1628,12 @@ def add_contributor_registered_or_not(self, auth, user_id=None, send_email=send_email, permissions=permissions, save=True) else: contributor = self.add_unregistered_contributor( - fullname=full_name, email=email, auth=auth, - send_email=send_email, permissions=permissions, - visible=bibliographic, save=True + fullname=full_name, + email=email, + auth=auth, + send_email=send_email, + permissions=permissions, + visible=bibliographic ) auth.user.email_last_sent = timezone.now() diff --git a/osf_tests/conftest.py b/osf_tests/conftest.py index af71872cb41..a0fafde4231 100644 --- a/osf_tests/conftest.py +++ b/osf_tests/conftest.py @@ -4,8 +4,6 @@ from framework.django.handlers import handlers as django_handlers from framework.flask import rm_handlers from website.app import init_app -from website.project.signals import contributor_added -from website.project.views.contributor import notify_added_contributor # NOTE: autouse so that ADDONS_REQUESTED gets set on website.settings @@ -37,13 +35,3 @@ def request_context(app): context.push() yield context context.pop() - -DISCONNECTED_SIGNALS = { - # disconnect notify_add_contributor so that add_contributor does not send "fake" emails in tests - contributor_added: [notify_added_contributor] -} -@pytest.fixture(autouse=True) -def disconnected_signals(): - for signal in DISCONNECTED_SIGNALS: - for receiver in DISCONNECTED_SIGNALS[signal]: - signal.disconnect(receiver) diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py index 7f247d403d5..bb11d34591c 100644 --- a/osf_tests/test_comment.py +++ b/osf_tests/test_comment.py @@ -66,8 +66,12 @@ def comment(user, project): def unreg_contributor(project): unreg_user = UnregUserFactory() unreg_user.save() - project.add_unregistered_contributor(unreg_user.fullname, unreg_user.email, Auth(project.creator), - permissions=permissions.READ, save=True) + project.add_unregistered_contributor( + unreg_user.fullname, + unreg_user.email, + Auth(project.creator), + permissions=permissions.READ + ) return unreg_user diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index c5e25b4b30e..f00f822704a 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -1212,7 +1212,7 @@ class TestNodeAddContributorRegisteredOrNot: def test_add_contributor_user_id(self, user, node): registered_user = UserFactory() - contributor_obj = node.add_contributor_registered_or_not(auth=Auth(user), user_id=registered_user._id, save=True) + contributor_obj = node.add_contributor_registered_or_not(auth=Auth(user), user_id=registered_user._id) contributor = contributor_obj.user assert contributor in node.contributors assert contributor.is_registered is True @@ -1229,12 +1229,12 @@ def test_add_contributor_registered_or_not_unreg_user_without_unclaimed_records( def test_add_contributor_user_id_already_contributor(self, user, node): with pytest.raises(ValidationError) as excinfo: - node.add_contributor_registered_or_not(auth=Auth(user), user_id=user._id, save=True) + node.add_contributor_registered_or_not(auth=Auth(user), user_id=user._id) assert 'is already a contributor' in str(excinfo.value) def test_add_contributor_invalid_user_id(self, user, node): with pytest.raises(ValueError) as excinfo: - node.add_contributor_registered_or_not(auth=Auth(user), user_id='abcde', save=True) + node.add_contributor_registered_or_not(auth=Auth(user), user_id='abcde') assert 'was not found' in str(excinfo.value) def test_add_contributor_fullname_email(self, user, node): diff --git a/tests/base.py b/tests/base.py index 2c36dd801eb..448dc517a39 100644 --- a/tests/base.py +++ b/tests/base.py @@ -100,8 +100,6 @@ class AppTestCase(unittest.TestCase): PUSH_CONTEXT = True DISCONNECTED_SIGNALS = { - # disconnect notify_add_contributor so that add_contributor does not send "fake" emails in tests - contributor_added: [notify_added_contributor] } def setUp(self): @@ -280,8 +278,6 @@ class NotificationTestCase(OsfTestCase): """ DISCONNECTED_SIGNALS = { # disconnect signals so that add_contributor does not send "fake" emails in tests - contributor_added: [notify_added_contributor, subscribe_contributor], - project_created: [subscribe_creator] } def setUp(self): diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index e8459920de7..003a8f886ad 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -432,10 +432,6 @@ def test_add_contribs_to_multiple_nodes(self): child.reload() assert child.contributors.count() == n_contributors_pre + len(payload['users']) - def tearDown(self): - super().tearDown() - contributor_added.disconnect(notify_added_contributor) - @mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 13d44d362b5..728fb1fe1c8 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -570,19 +570,19 @@ class TestPreprintAddContributorRegisteredOrNot: def test_add_contributor_user_id(self, user, preprint): registered_user = UserFactory() - contributor_obj = preprint.add_contributor_registered_or_not(auth=Auth(user), user_id=registered_user._id, save=True) + contributor_obj = preprint.add_contributor_registered_or_not(auth=Auth(user), user_id=registered_user._id) contributor = contributor_obj.user assert contributor in preprint.contributors assert contributor.is_registered is True def test_add_contributor_user_id_already_contributor(self, user, preprint): with pytest.raises(ValidationError) as excinfo: - preprint.add_contributor_registered_or_not(auth=Auth(user), user_id=user._id, save=True) + preprint.add_contributor_registered_or_not(auth=Auth(user), user_id=user._id) assert 'is already a contributor' in excinfo.value.message def test_add_contributor_invalid_user_id(self, user, preprint): with pytest.raises(ValueError) as excinfo: - preprint.add_contributor_registered_or_not(auth=Auth(user), user_id='abcde', save=True) + preprint.add_contributor_registered_or_not(auth=Auth(user), user_id='abcde') assert 'was not found' in str(excinfo.value) def test_add_contributor_fullname_email(self, user, preprint): diff --git a/tests/test_project_contibutor_views.py b/tests/test_project_contibutor_views.py index 4b33c890784..4f393007b2b 100644 --- a/tests/test_project_contibutor_views.py +++ b/tests/test_project_contibutor_views.py @@ -326,9 +326,9 @@ def test_contributor_manage_reorder(self): ) # Add a non-registered user unregistered_user = project.add_unregistered_contributor( - fullname=fake.name(), email=fake_email(), + fullname=fake.name(), + email=fake_email(), auth=self.consolidate_auth1, - save=True, ) url = project.api_url + 'contributors/manage/' @@ -554,7 +554,6 @@ def test_get_contributors_abbrev(self): fullname='Jalen Hurts', email='gobirds@eagle.fly', auth=self.consolidate_auth1, - save=True, ) res = self.app.get( diff --git a/tests/utils.py b/tests/utils.py index 6d5f934d8ba..6e3ec4fdfcd 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,6 +3,7 @@ import functools from unittest import mock +from django.apps import apps from django.http import HttpRequest from django.utils import timezone @@ -251,3 +252,36 @@ def wrapper(): def run_celery_tasks(): yield celery_teardown_request() + + +@contextlib.contextmanager +def capture_notifications(): + """ + Context manager to capture NotificationType emits without interfering with ORM calls. + Yields a list of captured emits: + [{'type': , 'args': ..., 'kwargs': ...}, ...] + """ + NotificationType = apps.get_model('osf', 'NotificationType') + real_get = NotificationType.objects.get # Save the real .get() + + captured = [] + + def side_effect(*args, **kwargs): + notifier = real_get(*args, **kwargs) # Call the real .get() + original_emit = notifier.emit + + def wrapped_emit(*emit_args, **emit_kwargs): + captured.append({ + 'type': notifier.name, + 'args': emit_args, + 'kwargs': emit_kwargs + }) + return original_emit(*emit_args, **emit_kwargs) + + notifier.emit = wrapped_emit + return notifier + + with mock.patch('osf.models.notification_type.NotificationType.objects.get', side_effect=side_effect): + yield captured + + diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 6ef27729ebb..f6dfa27aca4 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -617,6 +617,7 @@ def check_email_throttle(node, contributor, throttle=None): return False # No previous sent notification, not throttled +@contributor_added.connect def notify_added_contributor(node, contributor, auth=None, email_template=None, *args, **kwargs): """Send a notification to a contributor who was just added to a node. @@ -629,37 +630,42 @@ def notify_added_contributor(node, contributor, auth=None, email_template=None, node (AbstractNode): The node to which the contributor was added. contributor (OSFUser): The user being added. auth (Auth, optional): Authorization context. - email_template (str, optional): Template identifier (default: 'default'). + email_template (str, optional): Template identifier. throttle (int, optional): Throttle period in seconds. """ - logo = settings.OSF_LOGO if check_email_throttle_claim_email(node, contributor): return if email_template == 'false': return + # Default values notification_type = email_template or NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT - if notification_type == 'default': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT - if notification_type == 'draft_registration': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + logo = settings.OSF_LOGO - if node and getattr(node, 'has_linked_published_preprints', None): - notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF - logo = settings.OSF_PREPRINTS_LOGO + # Use match for notification type/logic + match (getattr(node, 'has_linked_published_preprints', None), notification_type): + case (True, _): + notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF + logo = settings.OSF_PREPRINTS_LOGO + case (_, 'default'): + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT + case (_, 'preprint'): + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + case (_, 'draft_registration'): + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + case _: + # use whatever was passed or default above + raise NotImplementedError(f'email_template: {email_template} not implemented.') - provider = node.provider - NotificationType.objects.get( - name=notification_type - ).emit( + NotificationType.objects.get(name=notification_type).emit( user=contributor, event_context={ 'user': contributor.id, 'node': node.id, - 'referrer_name': auth.user.fullname if auth else '', - 'is_initiator': getattr(auth, 'user', False) == contributor.id, + 'referrer_name': getattr(getattr(auth, 'user', None), 'fullname', '') if auth else '', + 'is_initiator': getattr(getattr(auth, 'user', None), 'id', None) == contributor.id if auth else False, 'all_global_subscriptions_none': False, - 'branded_service': getattr(provider, 'id', None), + 'branded_service': getattr(getattr(node, 'provider', None), 'id', None), 'can_change_preferences': False, 'logo': logo, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, From 7f3a6681d8430bff02866cd69ab37ce2d6cc83bd Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Mon, 21 Jul 2025 16:59:37 +0300 Subject: [PATCH 090/176] [ENG-8936] API: Allow /v2/users/me/preprints list view to filter by tags (#11232) --- api/base/filters.py | 3 +++ api/preprints/serializers.py | 1 + 2 files changed, 4 insertions(+) diff --git a/api/base/filters.py b/api/base/filters.py index 61f0a2dd36c..7a5e6ed2450 100644 --- a/api/base/filters.py +++ b/api/base/filters.py @@ -605,6 +605,9 @@ def postprocess_query_param(self, key, field_name, operation): if field_name == 'subjects': self.postprocess_subject_query_param(operation) + if field_name == 'tags': + super().postprocess_query_param(key, field_name, operation) + def preprints_queryset(self, base_queryset, auth_user, allow_contribs=True, public_only=False, latest_only=False): preprints = Preprint.objects.can_view( base_queryset=base_queryset, diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index e9dc8f0cbdf..73e28d217d2 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -102,6 +102,7 @@ class PreprintSerializer(TaxonomizableSerializerMixin, MetricsSerializerMixin, J 'subjects', 'reviews_state', 'node_is_public', + 'tags', ]) available_metrics = frozenset([ 'downloads', From d0cf3c76c43740f50b00f981305824d7ae2cb295 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 09:31:40 -0400 Subject: [PATCH 091/176] clean-up syntax for tests --- addons/base/views.py | 1 + .../views/test_node_contributors_list.py | 53 +++++--- api_tests/nodes/views/test_node_forks_list.py | 19 ++- notifications.yaml | 15 ++- osf/models/collection_submission.py | 2 +- osf/models/mixins.py | 4 +- osf/models/notification_type.py | 2 +- osf/utils/machines.py | 6 +- ...cation_subscriptions_from_registrations.py | 39 ------ tests/base.py | 24 ---- tests/test_events.py | 14 +- website/project/views/contributor.py | 121 +++++++++--------- .../emails/access_request_submitted.html.mako | 4 +- .../project_affiliation_changed.html.mako | 2 +- 14 files changed, 128 insertions(+), 178 deletions(-) delete mode 100644 scripts/remove_notification_subscriptions_from_registrations.py diff --git a/addons/base/views.py b/addons/base/views.py index e6d19d02758..c621658287c 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -598,6 +598,7 @@ def create_waterbutler_log(payload, **kwargs): # Action failed but our function succeeded # Bail out to avoid file_signals return {'status': 'success'} + else: node.create_waterbutler_log(auth, action, payload) diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index fab0d4913f9..d2ab7dd1084 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -6,6 +6,7 @@ from api.base.settings.defaults import API_BASE from api.nodes.serializers import NodeContributorsCreateSerializer from framework.auth.core import Auth +from osf.models import NotificationType from osf_tests.factories import ( fake_email, AuthUserFactory, @@ -1291,32 +1292,42 @@ def test_add_contributor_signal_preprint_email_disallowed( def test_add_unregistered_contributor_sends_email( self, mock_send_grid, app, user, url_project_contribs ): - url = f'{url_project_contribs}?send_email=default' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': {'full_name': 'Kanye West', 'email': 'kanye@west.com'}, - } - } - res = app.post_json_api(url, payload, auth=user.auth) - assert res.status_code == 201 - assert mock_send_grid.call_count == 1 + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_project_contribs}?send_email=default', + { + 'data': { + 'type': 'contributors', + 'attributes': {'full_name': 'Kanye West', 'email': 'kanye@west.com'}, + } + }, + auth=user.auth + ) + assert res.status_code == 201 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT @mock.patch('website.project.signals.unreg_contributor_added.send') def test_add_unregistered_contributor_signal_if_default( self, mock_send, app, user, url_project_contribs ): - url = f'{url_project_contribs}?send_email=default' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': {'full_name': 'Kanye West', 'email': 'kanye@west.com'}, - } - } - res = app.post_json_api(url, payload, auth=user.auth) - args, kwargs = mock_send.call_args - assert res.status_code == 201 - assert 'default' == kwargs['email_template'] + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_project_contribs}?send_email=default', + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Kanye West', + 'email': 'kanye@west.com' + } + } + }, + auth=user.auth + ) + assert res.status_code == 201 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_unregistered_contributor_signal_preprint_email_disallowed( self, app, user, url_project_contribs diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index 8fc9f9eb35b..aa0c5570320 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -3,6 +3,7 @@ from api.base.settings.defaults import API_BASE from framework.auth.core import Auth +from osf.models import NotificationType from osf_tests.factories import ( NodeFactory, ProjectFactory, @@ -14,6 +15,7 @@ from osf.utils import permissions from api.nodes.serializers import NodeForksSerializer +from tests.utils import capture_notifications @pytest.fixture() @@ -421,15 +423,18 @@ def test_send_email_success( self, app, user, public_project_url, fork_data_with_title, public_project, mock_send_grid): - res = app.post_json_api( - public_project_url, - fork_data_with_title, - auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api( + public_project_url, + fork_data_with_title, + auth=user.auth + ) assert res.status_code == 201 assert res.json['data']['id'] == public_project.forks.first()._id - call_args = mock_send_grid.call_args[1] - assert call_args['to_addr'] == user.email - assert call_args['subject'] == 'Your fork has completed' + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['kwargs']['event_context']['guid'] == public_project.forks.first()._id + assert notifications[0]['type'] == NotificationType.Type.NODE_FORK_COMPLETED def test_send_email_failed( self, app, user, public_project_url, diff --git a/notifications.yaml b/notifications.yaml index 5c2fc55f770..e262c2f8268 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -23,11 +23,6 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/password_reset.html.mako' notification_freq_default: instantly - - name: user_contributor_added_default - __docs__: ... - object_content_type_model_name: osfuser - template: 'website/templates/emails/contributor_added_default.html.mako' - notification_freq_default: instantly - name: user_contributor_added_draft_registration __docs__: ... object_content_type_model_name: osfuser @@ -125,12 +120,12 @@ notification_types: notification_freq_default: instantly #### PROVIDER - - name: new_pending_submissions + - name: provider_new_pending_submissions __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/new_pending_submissions.html.mako' notification_freq_default: instantly - - name: new_pending_withdraw_requests + - name: provider_new_pending_withdraw_requests __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/new_pending_submissions.html.mako' @@ -140,6 +135,7 @@ notification_types: object_content_type_model_name: abstractprovider template: 'website/templates/emails/contributor_added_preprints.html.mako' notification_freq_default: instantly + #### NODE - name: node_file_updated __docs__: ... @@ -186,6 +182,11 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/node_request_institutional_access_request.html.mako' notification_freq_default: instantly + - name: node_contributor_added_default + __docs__: This email notifies the user that they have been added as a contributor to a node. + object_content_type_model_name: abstractnode + template: 'website/templates/emails/contributor_added_default.html.mako' + notification_freq_default: instantly - name: node_contributor_added_access_request __docs__: ... object_content_type_model_name: abstractnode diff --git a/osf/models/collection_submission.py b/osf/models/collection_submission.py index 42976319369..41b05862a02 100644 --- a/osf/models/collection_submission.py +++ b/osf/models/collection_submission.py @@ -123,7 +123,7 @@ def _notify_contributors_pending(self, event_data): def _notify_moderators_pending(self, event_data): user = event_data.kwargs.get('user', None) NotificationType.objects.get( - name=NotificationType.Type.NEW_PENDING_SUBMISSIONS, + name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, ).emit( user=user, subscribed_object=self.guid.referent, diff --git a/osf/models/mixins.py b/osf/models/mixins.py index cb43c5c87d7..e22fd7b97f5 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -357,8 +357,8 @@ def remove_affiliated_institution(self, inst, user, save=False, log=True, notify ).emit( user=user, event_context={ - 'user': user, - 'node': self, + 'user_fullname': user.fullname, + 'node_title': self.title, } ) diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index bcb6f9f25d9..c71e0b45200 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -48,7 +48,6 @@ class Type(str, Enum): USER_FORGOT_PASSWORD_INSTITUTION = 'user_forgot_password_institution' USER_REQUEST_EXPORT = 'user_request_export' USER_CONTRIBUTOR_ADDED_OSF_PREPRINT = 'user_contributor_added_osf_preprint' - USER_CONTRIBUTOR_ADDED_DEFAULT = 'user_contributor_added_default' USER_DUPLICATE_ACCOUNTS_OSF4I = 'user_duplicate_accounts_osf4i' USER_EXTERNAL_LOGIN_LINK_SUCCESS = 'user_external_login_link_success' USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL = 'user_registration_bulk_upload_failure_all' @@ -90,6 +89,7 @@ class Type(str, Enum): NODE_FORK_FAILED = 'node_fork_failed' NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = 'node_request_institutional_access_request' NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'node_contributor_added_access_request' + NODE_CONTRIBUTOR_ADDED_DEFAULT = 'node_contributor_added_default' NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' diff --git a/osf/utils/machines.py b/osf/utils/machines.py index d29abdbb0b4..2e45827ad0a 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -236,6 +236,7 @@ def notify_submit(self, ev): """ Notify admins that someone is requesting access """ context = self.get_context() + print('context', context) context['contributors_url'] = f'{self.machineable.target.absolute_url}contributors/' context['project_settings_url'] = f'{self.machineable.target.absolute_url}settings/' from osf.models.notification_type import NotificationType @@ -285,8 +286,9 @@ def notify_edit_comment(self, ev): def get_context(self): return { - 'node': self.machineable.target, - 'requester': self.machineable.creator + 'node_title': self.machineable.target.title, + 'node_absolute_url': self.machineable.target.absolute_url, + 'requester_absolute_url': self.machineable.creator.absolute_url, } diff --git a/scripts/remove_notification_subscriptions_from_registrations.py b/scripts/remove_notification_subscriptions_from_registrations.py deleted file mode 100644 index 94b20a19a93..00000000000 --- a/scripts/remove_notification_subscriptions_from_registrations.py +++ /dev/null @@ -1,39 +0,0 @@ -""" Script for removing NotificationSubscriptions from registrations. - Registrations shouldn't have them! -""" -import logging -import sys - -import django -django.setup() - -from website.app import init_app -from django.apps import apps - -logger = logging.getLogger(__name__) - - -def remove_notification_subscriptions_from_registrations(dry_run=True): - Registration = apps.get_model('osf.Registration') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - - notifications_to_delete = NotificationSubscriptionLegacy.objects.filter(node__type='osf.registration') - registrations_affected = Registration.objects.filter( - id__in=notifications_to_delete.values_list( - 'node_id', flat=True - ) - ) - logger.info(f'{notifications_to_delete.count()} NotificationSubscriptions will be deleted.') - logger.info('{} Registrations will be affected: {}'.format( - registrations_affected.count(), - list(registrations_affected.values_list('guids___id', flat=True))) - ) - - if not dry_run: - notifications_to_delete.delete() - logger.info('Registration Notification Subscriptions removed.') - -if __name__ == '__main__': - dry_run = '--dry' in sys.argv - init_app(routes=False) - remove_notification_subscriptions_from_registrations(dry_run=dry_run) diff --git a/tests/base.py b/tests/base.py index 448dc517a39..e1024f8e266 100644 --- a/tests/base.py +++ b/tests/base.py @@ -99,8 +99,6 @@ class AppTestCase(unittest.TestCase): """ PUSH_CONTEXT = True - DISCONNECTED_SIGNALS = { - } def setUp(self): super().setUp() @@ -120,9 +118,6 @@ def setUp(self): self.context.push() with self.context: celery_before_request() - for signal in self.DISCONNECTED_SIGNALS: - for receiver in self.DISCONNECTED_SIGNALS[signal]: - signal.disconnect(receiver) def tearDown(self): super().tearDown() @@ -130,9 +125,6 @@ def tearDown(self): return with mock.patch('website.mailchimp_utils.get_mailchimp_api'): self.context.pop() - for signal in self.DISCONNECTED_SIGNALS: - for receiver in self.DISCONNECTED_SIGNALS[signal]: - signal.connect(receiver) class ApiAppTestCase(unittest.TestCase): @@ -271,22 +263,6 @@ class AdminTestCase(DbTestCase, DjangoTestCase, SearchTestCase): pass -class NotificationTestCase(OsfTestCase): - """An `OsfTestCase` to use when testing specific subscription behavior. - Use when you'd like to manually create all Node subscriptions and subscriptions - for added contributors yourself, and not rely on automatically added ones. - """ - DISCONNECTED_SIGNALS = { - # disconnect signals so that add_contributor does not send "fake" emails in tests - } - - def setUp(self): - super().setUp() - - def tearDown(self): - super().tearDown() - - class ApiWikiTestCase(ApiTestCase): def setUp(self): diff --git a/tests/test_events.py b/tests/test_events.py index c9e30273b49..e06559ebbb4 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -12,7 +12,7 @@ from framework.auth import Auth from osf_tests import factories from osf.utils.permissions import WRITE -from tests.base import OsfTestCase, NotificationTestCase +from tests.base import OsfTestCase email_transactional = 'email_transactional' email_digest = 'email_digest' @@ -151,7 +151,7 @@ def test_file_updated(self, mock_notify): assert mock_notify.called -class TestFileAdded(NotificationTestCase): +class TestFileAdded(OsfTestCase): def setUp(self): super().setUp() self.user = factories.UserFactory() @@ -178,7 +178,7 @@ def test_file_added(self, mock_notify): assert mock_notify.called -class TestFileRemoved(NotificationTestCase): +class TestFileRemoved(OsfTestCase): def setUp(self): super().setUp() self.user = factories.UserFactory() @@ -213,7 +213,7 @@ def test_file_removed(self, mock_notify): assert mock_notify.called -class TestFolderCreated(NotificationTestCase): +class TestFolderCreated(OsfTestCase): def setUp(self): super().setUp() self.user = factories.UserFactory() @@ -286,7 +286,7 @@ def test_rename_folder_text(self): assert self.event.text_message == 'renamed folder "/One/Two/Three" to "/One/Two/Four".' -class TestFileMoved(NotificationTestCase): +class TestFileMoved(OsfTestCase): def setUp(self): super().setUp() self.user_1 = factories.AuthUserFactory() @@ -379,7 +379,7 @@ def test_remove_user_sent_once(self, mock_store): assert 1 == mock_store.call_count -class TestFileCopied(NotificationTestCase): +class TestFileCopied(OsfTestCase): # Test the copying of files def setUp(self): super().setUp() @@ -460,7 +460,7 @@ def test_user_performing_action_no_email(self, mock_store): assert 0 == mock_store.call_count -class TestCategorizeUsers(NotificationTestCase): +class TestCategorizeUsers(OsfTestCase): def setUp(self): super().setUp() self.user_1 = factories.AuthUserFactory() diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index f6dfa27aca4..d44d1b949b3 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -388,7 +388,6 @@ def project_remove_contributor(auth, **kwargs): return redirect_url -# TODO: consider moving this into utils def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 3600): """ A registered user claiming the unclaimed user account as an contributor to a project. @@ -470,67 +469,64 @@ def check_email_throttle_claim_email(node, contributor): else: contributor.contributor_added_email_records[node._id] = {} -# TODO: consider moving this into utils -def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 3600, email_template='default'): +def send_claim_email( + email, unclaimed_user, node, notify=True, throttle=24 * 3600, email_template='default' +): """ - Unregistered user claiming a user account as an contributor to a project. Send an email for claiming the account. - Either sends to the given email or the referrer's email, depending on the email address provided. - - :param str email: The address given in the claim user form - :param User unclaimed_user: The User record to claim. - :param Node node: The node where the user claimed their account. - :param bool notify: If True and an email is sent to the referrer, an email - will also be sent to the invited user about their pending verification. - :param int throttle: Time period (in seconds) after the referrer is - emailed during which the referrer will not be emailed again. - :param str email_template: the email template to use - :return - :raise http_status.HTTP_400_BAD_REQUEST + Send a claim email to an unregistered contributor or the referrer, depending on the scenario. + Args: + email (str): Email address provided for claim. + unclaimed_user (User): The user record to claim. + node (Node): The node where the user claimed their account. + notify (bool): Whether to notify the invited user about their pending verification. + throttle (int): Throttle period (in seconds) to prevent repeated emails. + email_template (str): The email template identifier. + Returns: + str: The address the notification was sent to. + Raises: + HTTPError: If the throttle period has not expired. """ claimer_email = email.lower().strip() unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) referrer = OSFUser.load(unclaimed_record['referrer_id']) - # Option 1: - # When adding the contributor, the referrer provides both name and email. - # The given email is the same provided by user, just send to that email. logo = None + + # Option 1: Referrer provided name and email (send to claimer) if unclaimed_record.get('email') == claimer_email: - # check email template for branded preprints - if email_template == 'preprint': - if node.provider.is_default: - notification_type = NotificationType.Type.USER_INVITE_OSF_PREPRINT - logo = settings.OSF_PREPRINTS_LOGO - else: - notification_type = NotificationType.Type.PROVIDER_USER_INVITE_PREPRINT - logo = node.provider._id - elif email_template == 'draft_registration': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION - else: - notification_type = NotificationType.Type.USER_INVITE_DEFAULT + # Select notification type and logo using match + match email_template: + case 'preprint': + if getattr(node.provider, 'is_default', False): + notification_type = NotificationType.Type.USER_INVITE_OSF_PREPRINT + logo = settings.OSF_PREPRINTS_LOGO + else: + notification_type = NotificationType.Type.PROVIDER_USER_INVITE_PREPRINT + logo = getattr(node.provider, '_id', None) + case 'draft_registration': + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + case _: + notification_type = NotificationType.Type.USER_INVITE_DEFAULT - to_addr = claimer_email unclaimed_record['claimer_email'] = claimer_email unclaimed_user.save() - # Option 2: - # TODO: [new improvement ticket] this option is disabled from preprint but still available on the project page - # When adding the contributor, the referred only provides the name. - # The account is later claimed by some one who provides the email. - # Send email to the referrer and ask her/him to forward the email to the user. + + # Option 2: Referrer only provided name (send to referrer) else: - # check throttle timestamp = unclaimed_record.get('last_sent') if not throttle_period_expired(timestamp, throttle): - raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data=dict( - message_long='User account can only be claimed with an existing user once every 24 hours' - )) - # roll the valid token for each email, thus user cannot change email and approve a different email address + raise HTTPError( + http_status.HTTP_400_BAD_REQUEST, + data={'message_long': 'User account can only be claimed with an existing user once every 24 hours'} + ) verification_key = generate_verification_key(verification_type='claim') - unclaimed_record['last_sent'] = get_timestamp() - unclaimed_record['token'] = verification_key['token'] - unclaimed_record['expires'] = verification_key['expires'] - unclaimed_record['claimer_email'] = claimer_email + unclaimed_record.update({ + 'last_sent': get_timestamp(), + 'token': verification_key['token'], + 'expires': verification_key['expires'], + 'claimer_email': claimer_email, + }) unclaimed_user.save() if notify: @@ -548,8 +544,8 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 'osf_contact_email': settings.OSF_CONTACT_EMAIL, } ) + notification_type = NotificationType.Type.USER_FORWARD_INVITE - to_addr = referrer.username NotificationType.objects.get(name=notification_type).emit( user=referrer, @@ -564,8 +560,6 @@ def send_claim_email(email, unclaimed_user, node, notify=True, throttle=24 * 360 } ) - return to_addr - def check_email_throttle(node, contributor, throttle=None): """ @@ -631,7 +625,6 @@ def notify_added_contributor(node, contributor, auth=None, email_template=None, contributor (OSFUser): The user being added. auth (Auth, optional): Authorization context. email_template (str, optional): Template identifier. - throttle (int, optional): Throttle period in seconds. """ if check_email_throttle_claim_email(node, contributor): return @@ -639,23 +632,23 @@ def notify_added_contributor(node, contributor, auth=None, email_template=None, return # Default values - notification_type = email_template or NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT + notification_type = email_template or NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT logo = settings.OSF_LOGO # Use match for notification type/logic - match (getattr(node, 'has_linked_published_preprints', None), notification_type): - case (True, _): - notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF - logo = settings.OSF_PREPRINTS_LOGO - case (_, 'default'): - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DEFAULT - case (_, 'preprint'): - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT - case (_, 'draft_registration'): - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION - case _: - # use whatever was passed or default above - raise NotImplementedError(f'email_template: {email_template} not implemented.') + if notification_type == 'default': + notification_type = NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + elif notification_type == 'preprint': + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + elif notification_type == 'draft_registration': + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + elif notification_type == 'access_request': + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST + elif getattr(node, 'has_linked_published_preprints', None): + notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF + logo = settings.OSF_PREPRINTS_LOGO + else: + raise NotImplementedError(f'email_template: {email_template} not implemented.') NotificationType.objects.get(name=notification_type).emit( user=contributor, diff --git a/website/templates/emails/access_request_submitted.html.mako b/website/templates/emails/access_request_submitted.html.mako index 0839a4e2b41..b3a5e831a02 100644 --- a/website/templates/emails/access_request_submitted.html.mako +++ b/website/templates/emails/access_request_submitted.html.mako @@ -6,9 +6,9 @@ <%! from website import settings %> - Hello ${admin.fullname},
    + Hello ${admin_fullname},

    - ${requester.fullname} has requested access to your ${node.project_or_component} "${node.title}."
    + ${requester_fullname} has requested access to your ${node_project_or_component} "${node_title}."

    To review the request, click here to allow or deny access and configure permissions.

    diff --git a/website/templates/emails/project_affiliation_changed.html.mako b/website/templates/emails/project_affiliation_changed.html.mako index 30c08ae7e69..78f00ff2317 100644 --- a/website/templates/emails/project_affiliation_changed.html.mako +++ b/website/templates/emails/project_affiliation_changed.html.mako @@ -6,7 +6,7 @@ Hello ${user_fullname},

    An Institutional admin has made changes to the affiliations of your project: - ${node_title}.
    + ${node_title}.

    Want more information? Visit OSF to learn about OSF, or COS for information about its supporting organization, From 8c56b037f0448a718e3bb52cd113d0a1b35b2b02 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 12:47:18 -0400 Subject: [PATCH 092/176] add Schema response to notificationTypes --- api/institutions/authentication.py | 29 ++++++++++++++++------------- notifications.yaml | 20 ++++++++++++++++++++ osf/models/notification_type.py | 10 ++++------ osf/models/schema_response.py | 26 ++++++++++++++++---------- 4 files changed, 56 insertions(+), 29 deletions(-) diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index c28905fae9c..9e02a7fc7ec 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -23,7 +23,6 @@ from osf.models import Institution, NotificationType from osf.models.institution import SsoFilterCriteriaAction -from website.mails import send_mail, DUPLICATE_ACCOUNTS_OSF4I, ADD_SSO_EMAIL_OSF4I from website.settings import OSF_SUPPORT_EMAIL, DOMAIN from website.util.metrics import institution_source_tag @@ -350,13 +349,15 @@ def authenticate(self, request): if email_to_add: assert not is_created and email_to_add == sso_email user.emails.create(address=email_to_add) - send_mail( - to_addr=user.username, - mail=ADD_SSO_EMAIL_OSF4I, + NotificationType.objects.get( + name=NotificationType.Type.USER_WELCOME_OSF4I, + ).emit( user=user, - email_to_add=email_to_add, - domain=DOMAIN, - osf_support_email=OSF_SUPPORT_EMAIL, + event_context={ + 'email_to_add': email_to_add, + 'domain': DOMAIN, + 'osf_support_email': OSF_SUPPORT_EMAIL, + }, ) # Inform the user that a potential duplicate account is found @@ -367,13 +368,15 @@ def authenticate(self, request): duplicate_user.remove_sso_identity_from_affiliation(institution) if secondary_institution: duplicate_user.remove_sso_identity_from_affiliation(secondary_institution) - send_mail( - to_addr=user.username, - mail=DUPLICATE_ACCOUNTS_OSF4I, + NotificationType.objects.get( + name=NotificationType.Type.USER_DUPLICATE_ACCOUNTS_OSF4I, + ).emit( user=user, - duplicate_user=duplicate_user, - domain=DOMAIN, - osf_support_email=OSF_SUPPORT_EMAIL, + event_context={ + 'duplicate_user': duplicate_user, + 'domain': DOMAIN, + 'osf_support_email': OSF_SUPPORT_EMAIL, + }, ) # Affiliate the user to the primary institution if not previously affiliated diff --git a/notifications.yaml b/notifications.yaml index e262c2f8268..e181b5c8ac9 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -233,6 +233,26 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/fork_completed.html.mako' notification_freq_default: instantly + - name: node_schema_response_initiated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/updates_initiated.html.mako' + notification_freq_default: instantly + - name: node_schema_response_submitted + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/updates_pending_approval.html.mako' + notification_freq_default: instantly + - name: node_schema_response_approved + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/updates_approved.html.mako' + notification_freq_default: instantly + - name: node_schema_response_rejected + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/updates_rejected.html.mako' + notification_freq_default: instantly #### PREPRINT - name: pending_retraction_admin diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index c71e0b45200..d9fc80b4925 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -105,6 +105,10 @@ class Type(str, Enum): NODE_ADDON_FILE_RENAMED = 'node_addon_file_renamed' NODE_ADDON_FILE_MOVED = 'node_addon_file_moved' NODE_ADDON_FILE_REMOVED = 'node_addon_file_removed' + NODE_SCHEMA_RESPONSE_REJECTED = 'node_schema_response_rejected' + NODE_SCHEMA_RESPONSE_APPROVED = 'node_schema_response_approved' + NODE_SCHEMA_RESPONSE_SUBMITTED = 'node_schema_response_submitted' + NODE_SCHEMA_RESPONSE_INITIATED = 'node_schema_response_initiated' # Provider notifications PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' @@ -135,12 +139,6 @@ class Type(str, Enum): COLLECTION_SUBMISSION_REJECTED = 'collection_submission_rejected' COLLECTION_SUBMISSION_CANCEL = 'collection_submission_cancel' - # Schema Response notifications - SCHEMA_RESPONSE_REJECTED = 'schema_response_rejected' - SCHEMA_RESPONSE_APPROVED = 'schema_response_approved' - SCHEMA_RESPONSE_SUBMITTED = 'schema_response_submitted' - SCHEMA_RESPONSE_INITIATED = 'schema_response_initiated' - REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' @property diff --git a/osf/models/schema_response.py b/osf/models/schema_response.py index 4fa5289f2d4..8d305254804 100644 --- a/osf/models/schema_response.py +++ b/osf/models/schema_response.py @@ -9,6 +9,7 @@ from framework.exceptions import PermissionsError from osf.exceptions import PreviousSchemaResponseError, SchemaResponseStateError, SchemaResponseUpdateError +from . import NotificationType from .base import BaseModel, ObjectIDMixin from .metaschema import RegistrationSchemaBlock from .schema_response_block import SchemaResponseBlock @@ -17,16 +18,15 @@ from osf.utils.machines import ApprovalsMachine from osf.utils.workflows import ApprovalStates, SchemaResponseTriggers -from website.mails import mails from website.reviews.signals import reviews_email_submit_moderators_notifications from website.settings import DOMAIN EMAIL_TEMPLATES_PER_EVENT = { - 'create': mails.SCHEMA_RESPONSE_INITIATED, - 'submit': mails.SCHEMA_RESPONSE_SUBMITTED, - 'accept': mails.SCHEMA_RESPONSE_APPROVED, - 'reject': mails.SCHEMA_RESPONSE_REJECTED, + 'create': NotificationType.Type.NODE_SCHEMA_RESPONSE_INITIATED, + 'submit': NotificationType.Type.NODE_SCHEMA_RESPONSE_SUBMITTED, + 'accept': NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED, + 'reject': NotificationType.Type.NODE_SCHEMA_RESPONSE_REJECTED, } class SchemaResponse(ObjectIDMixin, BaseModel): @@ -500,11 +500,17 @@ def _notify_users(self, event, event_initiator): } for contributor, _ in self.parent.get_active_contributors_recursive(unique_users=True): - email_context['user'] = contributor - email_context['can_write'] = self.parent.has_permission(contributor, 'write') - email_context['is_approver'] = contributor in self.pending_approvers.all(), - email_context['is_initiator'] = contributor == event_initiator - mails.send_mail(to_addr=contributor.username, mail=template, **email_context) + email_context.update( + { + 'can_write': self.parent.has_permission(contributor, 'write'), + 'is_approver': contributor in self.pending_approvers.all(), + 'is_initiator': contributor == event_initiator, + } + ) + NotificationType.objects.get(name=template).emit( + user=contributor, + event_context=email_context + ) def _is_updated_response(response_block, new_response): From 4db559adc7f50c8fbb1cf1010893aa77f315fc72 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 13:05:35 -0400 Subject: [PATCH 093/176] add NotificationTypes for desk messages --- notifications.yaml | 20 ++++++++++++++ .../commands/deactivate_requested_accounts.py | 26 ++++++++++--------- .../send_storage_exceeded_announcement.py | 17 ++++++------ 3 files changed, 43 insertions(+), 20 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index e181b5c8ac9..4ddb8758490 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -108,6 +108,21 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/initial_confirm.html.mako' notification_freq_default: instantly + - name: user_export_data_request + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/initial_confirm.html.mako' + notification_freq_default: instantly + - name: user_request_deactivation + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/initial_confirm.html.mako' + notification_freq_default: instantly + - name: user_storage_cap_exceeded_announcement + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/storage_cap_exceeded_announcement.html.mako' + notification_freq_default: instantly - name: external_confirm_success __docs__: ... object_content_type_model_name: osfuser @@ -329,3 +344,8 @@ notification_types: object_content_type_model_name: desk template: 'website/templates/emails/crossref_doi_error.html.mako' notification_freq_default: instantly + - name: desk_request_deactivation + __docs__: ... + object_content_type_model_name: desk + template: 'website/templates/emails/support_request.html.mako' + notification_freq_default: instantly diff --git a/osf/management/commands/deactivate_requested_accounts.py b/osf/management/commands/deactivate_requested_accounts.py index 512fb34eeef..88919f6ffac 100644 --- a/osf/management/commands/deactivate_requested_accounts.py +++ b/osf/management/commands/deactivate_requested_accounts.py @@ -1,13 +1,11 @@ import logging -from website import mails from django.utils import timezone from framework.celery_tasks import app as celery_app from website.app import setup_django setup_django() -from osf.models import OSFUser -from website.settings import OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL +from osf.models import OSFUser, NotificationType from django.core.management.base import BaseCommand logger = logging.getLogger(__name__) @@ -21,22 +19,26 @@ def deactivate_requested_accounts(dry_run=True): if user.has_resources: logger.info(f'OSF support is being emailed about deactivating the account of user {user._id}.') if not dry_run: - mails.send_mail( - to_addr=OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_DEACTIVATION, + NotificationType.objects.get( + name=NotificationType.Type.DESK_REQUEST_DEACTIVATION, + ).emit( user=user, - can_change_preferences=False, + event_context={ + 'can_change_preferences': False, + } ) else: logger.info(f'Disabling user {user._id}.') if not dry_run: user.deactivate_account() - mails.send_mail( - to_addr=user.username, - mail=mails.REQUEST_DEACTIVATION_COMPLETE, + user.is_registered = False + NotificationType.objects.get( + name=NotificationType.Type.USER_REQUEST_DEACTIVATION_COMPLETE + ).emit( user=user, - contact_email=OSF_CONTACT_EMAIL, - can_change_preferences=False, + event_context={ + 'can_change_preferences': False, + } ) user.contacted_deactivation = True diff --git a/osf/management/commands/send_storage_exceeded_announcement.py b/osf/management/commands/send_storage_exceeded_announcement.py index 4cee3ec6573..8c4a687f3ce 100644 --- a/osf/management/commands/send_storage_exceeded_announcement.py +++ b/osf/management/commands/send_storage_exceeded_announcement.py @@ -2,10 +2,9 @@ import json from tqdm import tqdm -from website import mails from django.core.management.base import BaseCommand -from osf.models import Node, OSFUser +from osf.models import Node, OSFUser, NotificationType logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -40,13 +39,15 @@ def main(json_file, dry=False): if public_nodes or private_nodes: if not dry: try: - mails.send_mail( - to_addr=user.username, - mail=mails.STORAGE_CAP_EXCEEDED_ANNOUNCEMENT, + NotificationType.objects.get( + name=NotificationType.Type.USER_STORAGE_CAP_EXCEEDED_ANNOUNCEMENT + ).emit( user=user, - public_nodes=public_nodes, - private_nodes=private_nodes, - can_change_preferences=False, + event_context={ + 'public_nodes': public_nodes, + 'private_nodes': private_nodes, + 'can_change_preferences': False, + } ) except Exception: errors.append(user._id) From fd6e167b3fb6b9df8337b81e61167c009d57bff7 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 13:32:48 -0400 Subject: [PATCH 094/176] fix up draft registration tests --- .../views/test_draft_registration_list.py | 25 ++++++++----------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index d19c6d994d5..85842f1e0a6 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -6,7 +6,7 @@ from api.base.settings.defaults import API_BASE from osf.migrations import ensure_invisible_and_inactive_schema -from osf.models import DraftRegistration, NodeLicense, RegistrationProvider, RegistrationSchema +from osf.models import DraftRegistration, NodeLicense, RegistrationProvider, RegistrationSchema, NotificationType from osf_tests.factories import ( RegistrationFactory, CollectionFactory, @@ -16,6 +16,7 @@ DraftRegistrationFactory, ) from osf.utils.permissions import READ, WRITE, ADMIN +from tests.utils import capture_notifications from website import settings @@ -431,19 +432,15 @@ def test_admin_can_create_draft( def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload, mock_send_grid): # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor - app.post_json_api( - f'{url_draft_registrations}?embed=branched_from&embed=initiator', - payload, - auth=user.auth - ) - assert mock_send_grid.called - - # Python 3.6 does not support mock.call_args.args/kwargs - # Instead, mock.call_args[0] is positional args, mock.call_args[1] is kwargs - # (note, this is compatible with later versions) - mock_send_kwargs = mock_send_grid.call_args[1] - assert mock_send_kwargs['subject'] == 'You have a new registration draft.' - assert mock_send_kwargs['to_addr'] == user.email + with capture_notifications() as notifications: + app.post_json_api( + f'{url_draft_registrations}?embed=branched_from&embed=initiator', + payload, + auth=user.auth + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + assert notifications[0]['kwargs']['user'] == user def test_create_draft_with_provider( self, app, user, url_draft_registrations, non_default_provider, payload_with_non_default_provider From 1602dc96b3488a3d8f36070d3b18d7c260357157 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 14:05:05 -0400 Subject: [PATCH 095/176] simplify institutional access request notification code --- api/requests/serializers.py | 2 +- notifications.yaml | 2 +- osf/models/notification_type.py | 2 +- osf/utils/machines.py | 2 +- website/project/views/contributor.py | 2 ++ 5 files changed, 6 insertions(+), 4 deletions(-) diff --git a/api/requests/serializers.py b/api/requests/serializers.py index a8b5830031f..e5a07753741 100644 --- a/api/requests/serializers.py +++ b/api/requests/serializers.py @@ -188,7 +188,7 @@ def make_node_institutional_access_request(self, node, validated_data) -> NodeRe comment = validated_data.get('comment', '').strip() or language.EMPTY_REQUEST_INSTITUTIONAL_ACCESS_REQUEST_TEXT NotificationType.objects.get( - name=NotificationType.Type.NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST, + name=NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST, ).emit( user=recipient, message_frequency='instantly', diff --git a/notifications.yaml b/notifications.yaml index 4ddb8758490..6f391f8ec1f 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -192,7 +192,7 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' notification_freq_default: instantly - - name: node_request_institutional_access_request + - name: node_institutional_access_request __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/node_request_institutional_access_request.html.mako' diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index d9fc80b4925..66d438583aa 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -87,7 +87,7 @@ class Type(str, Enum): NODE_REQUEST_ACCESS_DENIED = 'node_request_access_denied' NODE_FORK_COMPLETED = 'node_fork_completed' NODE_FORK_FAILED = 'node_fork_failed' - NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = 'node_request_institutional_access_request' + NODE_INSTITUTIONAL_ACCESS_REQUEST = 'node_institutional_access_request' NODE_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'node_contributor_added_access_request' NODE_CONTRIBUTOR_ADDED_DEFAULT = 'node_contributor_added_default' NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' diff --git a/osf/utils/machines.py b/osf/utils/machines.py index 2e45827ad0a..44e11a9b3a3 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -220,7 +220,7 @@ def save_changes(self, ev): auth=Auth(ev.kwargs['user']), permissions=contributor_permissions, visible=visible, - send_email=f'{self.machineable.request_type}_request', + send_email=self.machineable.request_type, make_curator=make_curator, ) except IntegrityError as e: diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index d44d1b949b3..34335722a45 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -644,6 +644,8 @@ def notify_added_contributor(node, contributor, auth=None, email_template=None, notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION elif notification_type == 'access_request': notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST + elif notification_type == 'institutional_request': + notification_type = NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST elif getattr(node, 'has_linked_published_preprints', None): notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF logo = settings.OSF_PREPRINTS_LOGO From 707f05c45efb9019d9e38416dd870a8f5b4103ba Mon Sep 17 00:00:00 2001 From: antkryt Date: Mon, 21 Jul 2025 21:44:55 +0300 Subject: [PATCH 096/176] fix categories for sendgrid emails (#11236) --- framework/email/tasks.py | 10 ++++++++-- tests/framework_tests/test_email.py | 25 +++++++++++++++++++------ 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/framework/email/tasks.py b/framework/email/tasks.py index cf43395222e..7bee3ac2f90 100644 --- a/framework/email/tasks.py +++ b/framework/email/tasks.py @@ -171,7 +171,12 @@ def _send_with_sendgrid( # Personalization to handle To, CC, and BCC sendgrid client concept personalization = Personalization() - personalization.add_to(To(to_addr)) + to = To(to_addr) + if to.email is None: + sentry.log_message(f"Receiver email is not valid: {to_addr}. {subject} email won't be sent.") + return False + + personalization.add_to(to) if cc_addr: if isinstance(cc_addr, str): @@ -191,7 +196,8 @@ def _send_with_sendgrid( mail.add_personalization(personalization) if categories: - mail.add_category([Category(x) for x in categories]) + for category in categories: + mail.add_category(Category(category)) if attachment_name and attachment_content: attachment = Attachment( diff --git a/tests/framework_tests/test_email.py b/tests/framework_tests/test_email.py index c19596b7ed8..5e2216fc7bc 100644 --- a/tests/framework_tests/test_email.py +++ b/tests/framework_tests/test_email.py @@ -66,13 +66,14 @@ def test_send_with_sendgrid_success(self, mock_mail: MagicMock): mock_mail.return_value.add_personalization.assert_called_once() # Capture the categories added via add_category - mock_mail.return_value.add_category.assert_called_once() - added_categories = mock_mail.return_value.add_category.call_args.args[0] + # mock_mail.return_value.add_category.assert_called_once() + assert mock_mail.return_value.add_category.call_count == 2 + added_categories = mock_mail.return_value.add_category.call_args_list assert len(added_categories) == 2 - assert isinstance(added_categories[0], Category) - assert isinstance(added_categories[1], Category) - assert added_categories[0].get() == category1 - assert added_categories[1].get() == category2 + assert isinstance(added_categories[0].args[0], Category) + assert isinstance(added_categories[1].args[0], Category) + assert added_categories[0].args[0].get() == category1 + assert added_categories[1].args[0].get() == category2 mock_client.send.assert_called_once_with(mock_mail.return_value) @@ -103,6 +104,18 @@ def test_send_with_sendgrid_failure_returns_false(self, mock_mail, sentry_mock): mock_client.send.assert_called_once_with(mock_mail.return_value) + mock_client.send.return_value = mock.Mock(status_code=200, body='correct') + to_addr = 'not-an-email' + ret = _send_with_sendgrid( + from_addr=from_addr, + to_addr=to_addr, + subject=subject, + message=message, + client=mock_client + ) + assert not ret + sentry_mock.assert_called() + if __name__ == '__main__': unittest.main() From b7c2e98c306287e5c4688f036a4f4bf948c2c14a Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 14:25:32 -0400 Subject: [PATCH 097/176] use NotificationType model for withdrawal requests --- .../views/test_node_contributors_list.py | 2 +- api_tests/nodes/views/test_node_forks_list.py | 2 +- .../commands/populate_notification_types.py | 2 +- osf/utils/machines.py | 24 +++++++++++-------- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index d2ab7dd1084..c4c7d63c7f5 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -6,7 +6,7 @@ from api.base.settings.defaults import API_BASE from api.nodes.serializers import NodeContributorsCreateSerializer from framework.auth.core import Auth -from osf.models import NotificationType +from osf.models.notification_type import NotificationType from osf_tests.factories import ( fake_email, AuthUserFactory, diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index aa0c5570320..632c178bb2e 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -3,7 +3,7 @@ from api.base.settings.defaults import API_BASE from framework.auth.core import Auth -from osf.models import NotificationType +from osf.models.notification_type import NotificationType from osf_tests.factories import ( NodeFactory, ProjectFactory, diff --git a/osf/management/commands/populate_notification_types.py b/osf/management/commands/populate_notification_types.py index 26fc02f5dd6..3d2d1157563 100644 --- a/osf/management/commands/populate_notification_types.py +++ b/osf/management/commands/populate_notification_types.py @@ -4,7 +4,7 @@ import logging from django.contrib.contenttypes.models import ContentType -from osf.models import NotificationType +from osf.models.notification_type import NotificationType from django.core.management.base import BaseCommand from django.db import transaction diff --git a/osf/utils/machines.py b/osf/utils/machines.py index 44e11a9b3a3..ee875c9114a 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -6,6 +6,7 @@ from framework.auth import Auth from osf.exceptions import InvalidTransitionError +from osf.models.notification_type import NotificationType from osf.models.preprintlog import PreprintLog from osf.models.action import ReviewAction, NodeRequestAction, PreprintRequestAction from osf.utils import permissions @@ -21,7 +22,6 @@ COLLECTION_SUBMISSION_TRANSITIONS, NodeRequestTypes ) -from website.mails import mails from website.reviews import signals as reviews_signals from website.settings import DOMAIN, OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL @@ -181,11 +181,14 @@ def notify_withdraw(self, ev): context['contributor'] = contributor if context.get('requester', None): context['is_requester'] = context['requester'].username == contributor.username - mails.send_mail( - contributor.username, - mails.WITHDRAWAL_REQUEST_GRANTED, - document_type=self.machineable.provider.preprint_word, - **context + NotificationType.objects.get( + name=NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_APPROVED + ).emit( + user=contributor, + event_context={ + **{'document_type': self.machineable.provider.preprint_word}, + **context + } ) def get_context(self): @@ -321,10 +324,11 @@ def notify_submit(self, ev): def notify_accept_reject(self, ev): if ev.event.name == DefaultTriggers.REJECT.value: context = self.get_context() - mails.send_mail( - self.machineable.creator.username, - mails.WITHDRAWAL_REQUEST_DECLINED, - **context + NotificationType.objects.get( + name=NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_DECLINED + ).emit( + user=self.machineable.creator, + event_context=context ) else: pass From 8d6582f1c4c481404ab632bf04192b8f3614a84c Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 15:54:43 -0400 Subject: [PATCH 098/176] fix preprint contributor notification tests and withdraw request notifications --- ...est_draft_registration_contributor_list.py | 63 ++++---- .../views/test_preprint_contributors_list.py | 145 +++++++++--------- notifications.yaml | 76 ++------- .../commands/populate_notification_types.py | 2 - osf/models/notification_type.py | 2 +- osf/utils/machines.py | 25 +-- osf/utils/notifications.py | 24 +-- .../emails/access_request_rejected.html.mako | 2 +- .../emails/new_pending_submissions.html.mako | 2 +- .../withdrawal_request_declined.html.mako | 2 +- .../withdrawal_request_granted.html.mako | 8 +- 11 files changed, 155 insertions(+), 196 deletions(-) diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index 71940714d48..bf4d211a8d7 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -1,5 +1,4 @@ import pytest -from unittest import mock import random from framework.auth.core import Auth @@ -17,6 +16,7 @@ TestNodeContributorFiltering, ) from api_tests.nodes.views.utils import NodeCRUDTestCase +from osf.models.notification_type import NotificationType from osf_tests.factories import ( DraftRegistrationFactory, AuthUserFactory, @@ -239,6 +239,7 @@ def test_add_contributor_sends_email(self, app, user, user_two, url_project_cont ) assert res.status_code == 201 assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION # Overrides TestNodeContributorCreateEmail def test_add_contributor_signal_if_default( @@ -266,38 +267,42 @@ def test_add_contributor_signal_if_default( # Overrides TestNodeContributorCreateEmail def test_add_unregistered_contributor_sends_email( self, mock_send_grid, app, user, url_project_contribs): - url = f'{url_project_contribs}?send_email=draft_registration' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - 'full_name': 'Kanye West', - 'email': 'kanye@west.com' - } - } - } - res = app.post_json_api(url, payload, auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_project_contribs}?send_email=draft_registration', + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Brian Dawkins', + 'email': 'b@dawk.com' + } + } + }, + auth=user.auth + ) assert res.status_code == 201 - assert mock_send_grid.call_count == 1 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION # Overrides TestNodeContributorCreateEmail - @mock.patch('website.project.signals.unreg_contributor_added.send') - def test_add_unregistered_contributor_signal_if_default( - self, mock_send, app, user, url_project_contribs): - url = f'{url_project_contribs}?send_email=draft_registration' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - 'full_name': 'Kanye West', - 'email': 'kanye@west.com' - } - } - } - res = app.post_json_api(url, payload, auth=user.auth) - args, kwargs = mock_send.call_args + def test_add_unregistered_contributor_signal_if_default(self, app, user, url_project_contribs): + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_project_contribs}?send_email=draft_registration', + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Jalen Hurts', + 'email': 'numberone@eagles.com' + } + } + }, auth=user.auth + ) assert res.status_code == 201 - assert 'draft_registration' == kwargs['email_template'] + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION # Overrides TestNodeContributorCreateEmail def test_add_unregistered_contributor_without_email_no_email( diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index e7a3d5d739f..ce96d8d308c 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -7,7 +7,7 @@ from api.base.settings.defaults import API_BASE from api.nodes.serializers import NodeContributorsCreateSerializer from framework.auth.core import Auth -from osf.models import PreprintLog +from osf.models import PreprintLog, NotificationType from osf_tests.factories import ( fake_email, AuthUserFactory, @@ -20,7 +20,7 @@ from osf.utils.workflows import DefaultStates from rest_framework import exceptions from tests.base import capture_signals, fake -from tests.utils import assert_latest_log, assert_equals +from tests.utils import assert_latest_log, assert_equals, capture_notifications from website.project.signals import contributor_added, contributor_removed from api_tests.utils import disconnected_from_listeners @@ -1402,67 +1402,70 @@ def test_add_contributor_needs_preprint_filter_to_send_email( assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' assert mock_send_grid.call_count == 0 - @mock.patch('website.project.signals.contributor_added.send') def test_add_contributor_signal_if_preprint( - self, mock_send, app, user, user_two, url_preprint_contribs): - url = f'{url_preprint_contribs}?send_email=preprint' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - }, - 'relationships': { - 'users': { - 'data': { - 'type': 'users', - 'id': user_two._id + self, app, user, user_two, url_preprint_contribs): + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_preprint_contribs}?send_email=preprint', + { + 'data': { + 'type': 'contributors', + 'attributes': { + }, + 'relationships': { + 'users': { + 'data': { + 'type': 'users', + 'id': user_two._id + } + } } } - } - } - } - res = app.post_json_api(url, payload, auth=user.auth) - args, kwargs = mock_send.call_args + }, + auth=user.auth + ) assert res.status_code == 201 - assert mock_send.call_count == 1 - assert 'preprint' == kwargs['email_template'] + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT def test_add_unregistered_contributor_sends_email( - self, mock_send_grid, app, user, url_preprint_contribs): - url = f'{url_preprint_contribs}?send_email=preprint' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - 'full_name': 'Kanye West', - 'email': 'kanye@west.com' - } - } - } - - mock_send_grid.reset_mock() - res = app.post_json_api(url, payload, auth=user.auth) + self, app, user, url_preprint_contribs): + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_preprint_contribs}?send_email=preprint', + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Kanye West', + 'email': 'kanye@west.com' + } + } + }, + auth=user.auth + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT assert res.status_code == 201 - assert mock_send_grid.call_count == 1 - @mock.patch('website.project.signals.unreg_contributor_added.send') - def test_add_unregistered_contributor_signal_if_preprint( - self, mock_send, app, user, url_preprint_contribs): - url = f'{url_preprint_contribs}?send_email=preprint' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - 'full_name': 'Kanye West', - 'email': 'kanye@west.com' - } - } - } - res = app.post_json_api(url, payload, auth=user.auth) - args, kwargs = mock_send.call_args + def test_add_unregistered_contributor_signal_if_preprint(self, app, user, url_preprint_contribs): + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_preprint_contribs}?send_email=preprint', + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Jason Kelece', + 'email': 'best@kelece.com' + } + } + }, + auth=user.auth + ) assert res.status_code == 201 - assert 'preprint' == kwargs['email_template'] - assert mock_send.call_count == 1 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT def test_add_contributor_invalid_send_email_param( self, mock_send_grid, app, user, url_preprint_contribs): @@ -1524,24 +1527,24 @@ def test_publishing_preprint_sends_emails_to_contributors( assert contributor_added in mock_signal.signals_sent() assert mock_update.called - @mock.patch('website.project.signals.unreg_contributor_added.send') - def test_contributor_added_signal_not_specified( - self, mock_send, app, user, url_preprint_contribs): - - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - 'full_name': 'Kanye West', - 'email': 'kanye@west.com' - } - } - } - res = app.post_json_api(url_preprint_contribs, payload, auth=user.auth) - args, kwargs = mock_send.call_args + def test_contributor_added_signal_not_specified(self, app, user, url_preprint_contribs): + with capture_notifications() as notifications: + res = app.post_json_api( + url_preprint_contribs, + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Kanye West', + 'email': 'kanye@west.com' + } + } + }, + auth=user.auth + ) assert res.status_code == 201 - assert 'preprint' == kwargs['email_template'] - assert mock_send.call_count == 1 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT def test_contributor_added_not_sent_if_unpublished( self, mock_send_grid, app, user, preprint_unpublished): diff --git a/notifications.yaml b/notifications.yaml index 6f391f8ec1f..41b945617a0 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -12,340 +12,282 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/pending_registered.html.mako' - notification_freq_default: instantly - name: user_pending_verification __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/pending_invite.html.mako' - notification_freq_default: instantly - name: user_password_reset __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/password_reset.html.mako' - notification_freq_default: instantly - name: user_contributor_added_draft_registration __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_draft_registration.html.mako' - notification_freq_default: instantly - name: user_contributor_added_preprint_node_from_osf __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_preprint_node_from_osf.html.mako' - notification_freq_default: instantly - name: user_contributor_added_access_request __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_access_request.html.mako' - notification_freq_default: instantly - name: user_contributor_added_draft_registration __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_draft_registration.html.mako' - notification_freq_default: instantly - name: user_contributor_added_osf_preprint __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_preprint_node_from_osf.html.mako' - notification_freq_default: instantly - name: user_external_login_link_success __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_success.html.mako' - notification_freq_default: instantly - name: user_confirm_email __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/confirm.html.mako' - notification_freq_default: instantly - name: forgot_password __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password.html.mako' - notification_freq_default: instantly - name: user_welcome_osf4i __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/welcome_osf4i.html.mako' - notification_freq_default: instantly - name: user_invite_preprints_osf __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_preprints_osf.html.mako' - notification_freq_default: instantly - name: user_invite_preprints __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_preprints.html.mako' - notification_freq_default: instantly - name: invite_draft_registration __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_draft_registration.html.mako' - notification_freq_default: instantly - name: user_invite_default __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_default.html.mako' - notification_freq_default: instantly - name: user_pending_invite __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/pending_invite.html.mako' - notification_freq_default: instantly - name: user_forward_invite_registered __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forward_invite.html.mako' - notification_freq_default: instantly - name: user_forward_invite __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forward_invite.html.mako' - notification_freq_default: instantly - name: user_initial_confirm_email __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/initial_confirm.html.mako' - notification_freq_default: instantly - name: user_export_data_request __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/initial_confirm.html.mako' - notification_freq_default: instantly - name: user_request_deactivation __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/initial_confirm.html.mako' - notification_freq_default: instantly - name: user_storage_cap_exceeded_announcement __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/storage_cap_exceeded_announcement.html.mako' - notification_freq_default: instantly - name: external_confirm_success __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_success.html.mako' - notification_freq_default: instantly - name: forgot_password_institution __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password_institution.html.mako' - notification_freq_default: instantly #### PROVIDER - name: provider_new_pending_submissions __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: provider_new_pending_withdraw_requests __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: provider_contributor_added_preprint __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/contributor_added_preprints.html.mako' - notification_freq_default: instantly #### NODE - name: node_file_updated __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_file_added __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_file_removed __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_addon_file_renamed __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_addon_file_copied __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_addon_file_moved __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_addon_file_removed __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_wiki_updated __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - notification_freq_default: instantly - name: node_institutional_access_request __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/node_request_institutional_access_request.html.mako' - notification_freq_default: instantly - name: node_contributor_added_default __docs__: This email notifies the user that they have been added as a contributor to a node. object_content_type_model_name: abstractnode template: 'website/templates/emails/contributor_added_default.html.mako' - notification_freq_default: instantly - name: node_contributor_added_access_request __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/contributor_added_access_request.html.mako' - notification_freq_default: instantly - name: node_pending_registration_admin __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_registration_admin.html.mako' - notification_freq_default: instantly - name: node_embargo_admin __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_embargo_admin.html.mako' - notification_freq_default: instantly - name: node_embargo_nonadmin __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_embargo_non_admin.html.mako' - notification_freq_default: instantly - name: node_affiliation_changed __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/project_affiliation_changed.html.mako' - notification_freq_default: instantly - name: node_request_access_denied __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/access_request_rejected.html.mako' - notification_freq_default: instantly - name: node_access_request_submitted __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/access_request_submitted.html.mako' - notification_freq_default: instantly - name: node_fork_failed __docs__: This email is sent when a fork fails to be created, this could be due to addons or network outages or technical errors. object_content_type_model_name: abstractnode template: 'website/templates/emails/fork_failed.html.mako' - notification_freq_default: instantly - name: node_fork_completed __docs__: This email is sent when a fork is successfully created, object_content_type_model_name: abstractnode template: 'website/templates/emails/fork_completed.html.mako' - notification_freq_default: instantly - name: node_schema_response_initiated __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/updates_initiated.html.mako' - notification_freq_default: instantly - name: node_schema_response_submitted __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/updates_pending_approval.html.mako' - notification_freq_default: instantly - name: node_schema_response_approved __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/updates_approved.html.mako' - notification_freq_default: instantly - name: node_schema_response_rejected __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/updates_rejected.html.mako' - notification_freq_default: instantly #### PREPRINT - name: pending_retraction_admin __docs__: ... object_content_type_model_name: preprint - template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly + template: 'website/templates/emails/pending_retraction_admin.html.mako' + - name: preprint_request_withdrawal_approved + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/withdrawal_request_granted.html.mako' + - name: preprint_request_withdrawal_declined + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/withdrawal_request_declined.html.mako' #### SUPPORT - name: crossref_error __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly #### Collection Submissions - name: collection_submission_removed_moderator __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: collection_submission_removed_private __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: collection_submission_removed_admin __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: collection_submission_submitted __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: collection_submission_cancel __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: collection_submission_accepted __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: collection_submission_rejected __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly #### DESK - name: desk_archive_job_exceeded __docs__: Archive job failed due to size exceeded. Sent to support desk. object_content_type_model_name: desk template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: desk_archive_job_copy_error __docs__: Archive job failed due to copy error. Sent to support desk. object_content_type_model_name: desk template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: desk_archive_job_file_not_found __docs__: Archive job failed because files were not found. Sent to support desk. object_content_type_model_name: desk template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: desk_archive_job_uncaught_error __docs__: Archive job failed due to an uncaught error. Sent to support desk. object_content_type_model_name: desk template: 'website/templates/emails/new_pending_submissions.html.mako' - notification_freq_default: instantly - name: desk_osf_support_email __docs__: ... object_content_type_model_name: desk template: 'website/templates/emails/crossref_doi_error.html.mako' - notification_freq_default: instantly - name: desk_request_deactivation __docs__: ... object_content_type_model_name: desk template: 'website/templates/emails/support_request.html.mako' - notification_freq_default: instantly diff --git a/osf/management/commands/populate_notification_types.py b/osf/management/commands/populate_notification_types.py index 3d2d1157563..a65b3f081ff 100644 --- a/osf/management/commands/populate_notification_types.py +++ b/osf/management/commands/populate_notification_types.py @@ -23,7 +23,6 @@ def populate_notification_types(*args, **kwargs): for notification_type in notification_types['notification_types']: notification_type.pop('__docs__') object_content_type_model_name = notification_type.pop('object_content_type_model_name') - notification_freq = notification_type.pop('notification_freq_default') if object_content_type_model_name == 'desk': content_type = None @@ -54,7 +53,6 @@ def populate_notification_types(*args, **kwargs): with open(notification_type['template']) as stream: template = stream.read() - notification_types['notification_freq'] = notification_freq nt, _ = NotificationType.objects.update_or_create( name=notification_type['name'], defaults=notification_type, diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 66d438583aa..14bfa97eac2 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -93,7 +93,6 @@ class Type(str, Enum): NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' - NODE_PENDING_RETRACTION_ADMIN = 'node_pending_retraction_admin' NODE_PENDING_REGISTRATION_NON_ADMIN = 'node_pending_registration_non_admin' NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' @@ -129,6 +128,7 @@ class Type(str, Enum): PREPRINT_REQUEST_WITHDRAWAL_APPROVED = 'preprint_request_withdrawal_approved' PREPRINT_REQUEST_WITHDRAWAL_DECLINED = 'preprint_request_withdrawal_declined' PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' + PREPRINT_PENDING_RETRACTION_ADMIN = 'preprint_pending_retraction_admin' # Collections Submission notifications COLLECTION_SUBMISSION_REMOVED_ADMIN = 'collection_submission_removed_admin' diff --git a/osf/utils/machines.py b/osf/utils/machines.py index ee875c9114a..c2a869eee64 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -168,19 +168,23 @@ def notify_withdraw(self, ev): context = self.get_context() context['ever_public'] = self.machineable.ever_public try: - preprint_request_action = PreprintRequestAction.objects.get(target__target__id=self.machineable.id, - from_state='pending', - to_state='accepted', - trigger='accept') - context['requester'] = preprint_request_action.target.creator + preprint_request_action = PreprintRequestAction.objects.get( + target__target__id=self.machineable.id, + from_state='pending', + to_state='accepted', + trigger='accept' + ) + requester = preprint_request_action.target.creator except PreprintRequestAction.DoesNotExist: # If there is no preprint request action, it means the withdrawal is directly initiated by admin/moderator context['force_withdrawal'] = True + context['requester_fullname'] = requester.fullname for contributor in self.machineable.contributors.all(): - context['contributor'] = contributor - if context.get('requester', None): - context['is_requester'] = context['requester'].username == contributor.username + context['contributor_fullname'] = contributor.fullname + if context.get('requester_fullname', None): + context['is_requester'] = requester == contributor + NotificationType.objects.get( name=NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_APPROVED ).emit( @@ -194,7 +198,8 @@ def notify_withdraw(self, ev): def get_context(self): return { 'domain': DOMAIN, - 'reviewable': self.machineable, + 'reviewable_title': self.machineable.title, + 'reviewable_absolute_url': self.machineable.absolute_url, 'workflow': self.machineable.provider.reviews_workflow, 'provider_url': self.machineable.provider.domain or f'{DOMAIN}preprints/{self.machineable.provider._id}', 'provider_contact_email': self.machineable.provider.email_contact or OSF_CONTACT_EMAIL, @@ -348,7 +353,7 @@ def notify_resubmit(self, ev): def get_context(self): return { 'reviewable': self.machineable.target, - 'requester': self.machineable.creator, + 'requester_fullname': self.machineable.creator.fullname, 'is_request_email': True, 'document_type': self.machineable.target.provider.preprint_word } diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 92ea38fcf70..7a0f6a251b6 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -1,4 +1,6 @@ from django.utils import timezone + +from osf.models.notification_type import NotificationType from website.mails import mails from website.reviews import signals as reviews_signals from website.settings import DOMAIN, OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL @@ -64,7 +66,7 @@ def notify_accept_reject(resource, user, action, states, *args, **kwargs): context['notify_comment'] = not resource.provider.reviews_comments_private and action.comment context['comment'] = action.comment - context['requester'] = action.creator + context['requester_fullname'] = action.creator.fullname context['is_rejected'] = action.to_state == states.REJECTED.db_name context['was_pending'] = action.from_state == states.PENDING.db_name reviews_signals.reviews_email.send( @@ -89,11 +91,11 @@ def notify_edit_comment(resource, user, action, *args, **kwargs): def notify_reject_withdraw_request(resource, action, *args, **kwargs): context = get_email_template_context(resource) - context['requester'] = action.creator + context['requester_fullname'] = action.creator.fullname for contributor in resource.contributors.all(): context['contributor'] = contributor - context['requester'] = action.creator + context['requester_fullname'] = action.creator.fullname context['is_requester'] = action.creator == contributor mails.send_mail( @@ -116,15 +118,19 @@ def notify_withdraw_registration(resource, action, *args, **kwargs): context = get_email_template_context(resource) context['force_withdrawal'] = action.trigger == RegistrationModerationTriggers.FORCE_WITHDRAW.db_name - context['requester'] = resource.retraction.initiated_by + context['requester_fullname'] = resource.retraction.initiated_by.fullname context['comment'] = action.comment context['notify_comment'] = not resource.provider.reviews_comments_private and action.comment for contributor in resource.contributors.all(): context['contributor'] = contributor - context['is_requester'] = context['requester'] == contributor - mails.send_mail( - contributor.username, - mails.WITHDRAWAL_REQUEST_GRANTED, - **context + context['is_requester'] = resource.retraction.initiated_by == contributor + NotificationType.objects.get( + name=NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_APPROVED + ).emit( + user=contributor, + event_context={ + 'is_requester': contributor, + + }, ) diff --git a/website/templates/emails/access_request_rejected.html.mako b/website/templates/emails/access_request_rejected.html.mako index d4d3bbb2f5c..924f7f56bd3 100644 --- a/website/templates/emails/access_request_rejected.html.mako +++ b/website/templates/emails/access_request_rejected.html.mako @@ -6,7 +6,7 @@ <%! from website import settings %> - Hello ${requester.fullname},
    + Hello ${requester_fullname},

    This email is to inform you that your request for access to the project at ${node.absolute_url} has been declined.

    diff --git a/website/templates/emails/new_pending_submissions.html.mako b/website/templates/emails/new_pending_submissions.html.mako index 12208b272fe..067148e2437 100644 --- a/website/templates/emails/new_pending_submissions.html.mako +++ b/website/templates/emails/new_pending_submissions.html.mako @@ -5,7 +5,7 @@ At ${localized_timestamp}: % if is_request_email: - ${requester.fullname} + ${requester_fullname} % else: ${', '.join(reviewable.contributors.values_list('fullname', flat=True))} % endif diff --git a/website/templates/emails/withdrawal_request_declined.html.mako b/website/templates/emails/withdrawal_request_declined.html.mako index 79cbf7b197e..4c1a693136e 100644 --- a/website/templates/emails/withdrawal_request_declined.html.mako +++ b/website/templates/emails/withdrawal_request_declined.html.mako @@ -16,7 +16,7 @@ ${comment} % endif % else: - Dear ${requester.fullname}, + Dear ${requester_fullname},

    Your request to withdraw your ${document_type} "${reviewable.title}" from ${reviewable.provider.name} has been declined by the service moderators. Login and visit your ${document_type} to view their feedback. The ${document_type} is still publicly available on ${reviewable.provider.name}. % endif diff --git a/website/templates/emails/withdrawal_request_granted.html.mako b/website/templates/emails/withdrawal_request_granted.html.mako index 78d49617d0b..15c4cb4b1a7 100644 --- a/website/templates/emails/withdrawal_request_granted.html.mako +++ b/website/templates/emails/withdrawal_request_granted.html.mako @@ -29,9 +29,9 @@ The ${document_type} has been removed from ${reviewable.provider.name}.
    % else: - ${requester.fullname} has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. + ${requester_fullname} has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. % if reviewable.withdrawal_justification: - ${requester.fullname} provided the following justification: "${reviewable.withdrawal_justification}" + ${requester_fullname} provided the following justification: "${reviewable.withdrawal_justification}" % endif
    The ${document_type} has been removed from ${reviewable.provider.name}. @@ -53,11 +53,11 @@ % endif
    % else: - ${requester.fullname} has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. + ${requester_fullname} has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}.
    The ${document_type} has been removed from ${reviewable.provider.name}, but its metadata is still available: title of the withdrawn ${document_type}, its contributor list, abstract, tags, and DOI. % if reviewable.withdrawal_justification: - ${requester.fullname} provided the following justification: "${reviewable.withdrawal_justification}". + ${requester_fullname} provided the following justification: "${reviewable.withdrawal_justification}".
    % endif
    From dcaf9bd9254049b4c15cf4f503e5e1af9612bf29 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 21 Jul 2025 16:51:08 -0400 Subject: [PATCH 099/176] Add NotificationTypes for duplicate accounts --- api/institutions/authentication.py | 7 ++++++- api/users/views.py | 21 +++++++++++-------- framework/auth/views.py | 3 +-- notifications.yaml | 12 +++++++++-- .../duplicate_accounts_sso_osf4i.html.mako | 8 +++---- .../emails/external_confirm_success.html.mako | 2 +- 6 files changed, 34 insertions(+), 19 deletions(-) diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index 9e02a7fc7ec..8ae64ddc447 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -373,7 +373,12 @@ def authenticate(self, request): ).emit( user=user, event_context={ - 'duplicate_user': duplicate_user, + 'user_fullname': user.fullname, + 'user_username': user.username, + 'user__id': user._id, + 'duplicate_user_fullname': duplicate_user.fullname, + 'duplicate_user_username': duplicate_user.username, + 'duplicate_user__id': duplicate_user._id, 'domain': DOMAIN, 'osf_support_email': OSF_SUPPORT_EMAIL, }, diff --git a/api/users/views.py b/api/users/views.py index 39b7c847469..590216ade10 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -786,7 +786,7 @@ def post(self, request, *args, **kwargs): # Don't go anywhere return JsonResponse( { - 'external_id_provider': external_id_provider, + 'external_id_provider': external_id_provider.name, 'auth_user_fullname': fullname, }, status=status.HTTP_200_OK, @@ -1071,7 +1071,7 @@ def _process_external_identity(self, user, external_identity, service_url): message_frequency='instantly', event_context={ 'can_change_preferences': False, - 'external_id_provider': provider, + 'external_id_provider': provider.name, }, ) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) @@ -1387,13 +1387,16 @@ def post(self, request, *args, **kwargs): if external_status == 'CREATE': service_url += '&{}'.format(urlencode({'new': 'true'})) elif external_status == 'LINK': - notification_type = NotificationType.objects.filter(name='external_confirm_success') - if not notification_type.exists(): - raise NotificationType.DoesNotExist( - 'NotificationType with name external_confirm_success does not exist.', - ) - notification_type = notification_type.first() - notification_type.emit(user=user, message_frequency='instantly', event_context={'can_change_preferences': False, 'external_id_provider': provider}) + NotificationType.objects.filter( + name=NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK, + ).emit( + user=user, + message_frequency='instantly', + event_context={ + 'can_change_preferences': False, + 'external_id_provider': provider.name, + }, + ) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) diff --git a/framework/auth/views.py b/framework/auth/views.py index 8ef5d5d29b3..e00df8679cd 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -665,9 +665,8 @@ def external_login_confirm_email_get(auth, uid, token): name=NotificationType.Type.USER_EXTERNAL_LOGIN_LINK_SUCCESS, ).emit( user=user, - subscribed_object=user, # or whatever the correct related object is event_context={ - 'external_id_provider': getattr(provider, 'id', None), + 'external_id_provider': provider.name, 'can_change_preferences': False, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, }, diff --git a/notifications.yaml b/notifications.yaml index 41b945617a0..284bb6b6992 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -100,11 +100,19 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/storage_cap_exceeded_announcement.html.mako' - - name: external_confirm_success + - name: user_duplicate_accounts_sso_osf4i + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/duplicate_accounts_sso_osf4i.html.mako' + - name: user_external_confirm_success_lik __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_success.html.mako' - - name: forgot_password_institution + - name: user_duplicate_accounts_osf4i + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/duplicate_accounts_sso_osf4i.html.mako' + - name: user_forgot_password_institution __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password_institution.html.mako' diff --git a/website/templates/emails/duplicate_accounts_sso_osf4i.html.mako b/website/templates/emails/duplicate_accounts_sso_osf4i.html.mako index 8577bdb4aa0..64e60d46a91 100644 --- a/website/templates/emails/duplicate_accounts_sso_osf4i.html.mako +++ b/website/templates/emails/duplicate_accounts_sso_osf4i.html.mako @@ -3,13 +3,13 @@ <%def name="content()"> - Hello ${user.fullname},
    + Hello ${user_fullname},

    - Thank you for connecting to OSF through your institution. We have found two OSF accounts associated with your institutional identity: <${user.username}>(${user._id}) and <${duplicate_user.username}>(${duplicate_user._id}). We have made <${user.username}> the account primarily associated with your institution.
    + Thank you for connecting to OSF through your institution. We have found two OSF accounts associated with your institutional identity: <${user_username}>(${user__id}) and <${duplicate_user_username}>(${duplicate_user__id}). We have made <${user_username}> the account primarily associated with your institution.

    - If <${duplicate_user.username}> is also your account, we would encourage you to merge it into your primary account. Instructions for merging your accounts can be found at: Merge Your Accounts. This action will move all projects and components associated with <${duplicate_user.username}> into the <${user.username}> account.
    + If <${duplicate_user.username}> is also your account, we would encourage you to merge it into your primary account. Instructions for merging your accounts can be found at: Merge Your Accounts. This action will move all projects and components associated with <${duplicate_user_username}> into the <${user_username}> account.

    - If you want to keep <${duplicate_user.username}> separate from <${user.username}> you will need to log into that account with your email and OSF password instead of the institutional authentication.
    + If you want to keep <${duplicate_user_username}> separate from <${user_username}> you will need to log into that account with your email and OSF password instead of the institutional authentication.

    If you have any issues, questions or need our help, contact ${osf_support_email} and we will be happy to assist.

    diff --git a/website/templates/emails/external_confirm_success.html.mako b/website/templates/emails/external_confirm_success.html.mako index 99365f6edd6..b487f64fb6d 100644 --- a/website/templates/emails/external_confirm_success.html.mako +++ b/website/templates/emails/external_confirm_success.html.mako @@ -3,7 +3,7 @@ <%def name="content()"> - Hello ${user.fullname},
    + Hello ${user_fullname},

    Congratulations! You have successfully linked your ${external_id_provider} account to the Open Science Framework (OSF).

    From 918f6cf639649cbe1d916f4a88280c2ac6d514f6 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 22 Jul 2025 11:35:21 -0400 Subject: [PATCH 100/176] fix notification tests for node and preprint requests --- .../test_node_relationship_institutions.py | 191 +++++++++++------- .../requests/views/test_node_request_list.py | 27 ++- .../views/test_request_actions_create.py | 33 +-- notifications.yaml | 5 +- osf/models/preprint.py | 15 +- osf/models/sanctions.py | 28 ++- osf/models/schema_response.py | 3 +- osf/utils/machines.py | 4 +- osf/utils/notifications.py | 18 +- website/project/views/contributor.py | 2 + website/reviews/listeners.py | 25 ++- .../emails/pending_embargo_admin.html.mako | 4 +- .../pending_embargo_non_admin.html.mako | 2 +- ...ending_embargo_termination_admin.html.mako | 4 +- ...ng_embargo_termination_non_admin.html.mako | 2 +- .../pending_registration_admin.html.mako | 4 +- .../pending_registration_non_admin.html.mako | 2 +- .../emails/pending_retraction_admin.html.mako | 4 +- .../pending_retraction_non_admin.html.mako | 2 +- ...eviews_resubmission_confirmation.html.mako | 4 +- .../reviews_submission_confirmation.html.mako | 28 ++- .../reviews_submission_status.html.mako | 12 +- .../emails/reviews_update_comment.html.mako | 4 +- .../withdrawal_request_declined.html.mako | 4 +- .../withdrawal_request_granted.html.mako | 14 +- 25 files changed, 259 insertions(+), 182 deletions(-) diff --git a/api_tests/nodes/views/test_node_relationship_institutions.py b/api_tests/nodes/views/test_node_relationship_institutions.py index 3bf25dc5adf..c19c4e79d4b 100644 --- a/api_tests/nodes/views/test_node_relationship_institutions.py +++ b/api_tests/nodes/views/test_node_relationship_institutions.py @@ -1,12 +1,14 @@ import pytest from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from osf_tests.factories import ( InstitutionFactory, AuthUserFactory, NodeFactory, ) from osf.utils import permissions +from tests.utils import capture_notifications @pytest.mark.django_db @@ -202,43 +204,52 @@ def test_user_with_institution_and_permissions( assert institution_one in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() - def test_user_with_institution_and_permissions_through_patch(self, app, user, institution_one, institution_two, - node, node_institutions_url, mock_send_grid): - - mock_send_grid.reset_mock() - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_one, institution_two]), - auth=user.auth - ) + def test_user_with_institution_and_permissions_through_patch( + self, + app, + user, + institution_one, + institution_two, + node, + node_institutions_url + ): + with capture_notifications() as notifications: + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_one, institution_two]), + auth=user.auth + ) assert res.status_code == 200 - assert mock_send_grid.call_count == 2 - - first_call_args = mock_send_grid.call_args_list[0][1] - assert first_call_args['to_addr'] == user.email - assert first_call_args['subject'] == 'Project Affiliation Changed' - - second_call_args = mock_send_grid.call_args_list[1][1] - assert second_call_args['to_addr'] == user.email - assert second_call_args['subject'] == 'Project Affiliation Changed' - - def test_remove_institutions_with_affiliated_user(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): + assert len(notifications) == 2 + + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED + assert notifications[1]['kwargs']['user'] == user + assert notifications[1]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED + + def test_remove_institutions_with_affiliated_user( + self, + app, + user, + institution_one, + node, + node_institutions_url + ): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() - - mock_send_grid.reset_mock() - res = app.put_json_api( - node_institutions_url, - { - 'data': [] - }, - auth=user.auth - ) - - first_call_args = mock_send_grid.call_args_list[0][1] - assert first_call_args['to_addr'] == user.email - assert first_call_args['subject'] == 'Project Affiliation Changed' + with capture_notifications() as notifications: + res = app.put_json_api( + node_institutions_url, + { + 'data': [] + }, + auth=user.auth + ) + + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED assert res.status_code == 200 assert node.affiliated_institutions.count() == 0 @@ -276,69 +287,87 @@ def test_put_not_admin_but_affiliated(self, app, institution_one, node, node_ins assert institution_one in node.affiliated_institutions.all() def test_add_through_patch_one_inst_to_node_with_inst( - self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): + self, + app, + user, + institution_one, + institution_two, + node, + node_institutions_url + ): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - mock_send_grid.reset_mock() - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_one, institution_two]), - auth=user.auth - ) - assert mock_send_grid.call_count == 1 - first_call_args = mock_send_grid.call_args_list[0][1] - assert first_call_args['to_addr'] == user.email - assert first_call_args['subject'] == 'Project Affiliation Changed' + with capture_notifications() as notifications: + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_one, institution_two]), + auth=user.auth + ) + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED assert res.status_code == 200 assert institution_one in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() def test_add_through_patch_one_inst_while_removing_other( - self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): + self, + app, + user, + institution_one, + institution_two, + node, + node_institutions_url, + ): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - mock_send_grid.reset_mock() - res = app.patch_json_api( - node_institutions_url, - self.create_payload([institution_two]), - auth=user.auth - ) - assert mock_send_grid.call_count == 2 - - first_call_args = mock_send_grid.call_args_list[0][1] - assert first_call_args['to_addr'] == user.email - assert first_call_args['subject'] == 'Project Affiliation Changed' + with capture_notifications() as notifications: + res = app.patch_json_api( + node_institutions_url, + self.create_payload([institution_two]), + auth=user.auth + ) + assert len(notifications) == 2 + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED - second_call_args = mock_send_grid.call_args_list[1][1] - assert second_call_args['to_addr'] == user.email - assert second_call_args['subject'] == 'Project Affiliation Changed' + assert notifications[1]['kwargs']['user'] == user + assert notifications[1]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED assert res.status_code == 200 assert institution_one not in node.affiliated_institutions.all() assert institution_two in node.affiliated_institutions.all() def test_add_one_inst_with_post_to_node_with_inst( - self, app, user, institution_one, institution_two, node, node_institutions_url, mock_send_grid): + self, + app, + user, + institution_one, + institution_two, + node, + node_institutions_url, + ): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() assert institution_two not in node.affiliated_institutions.all() - res = app.post_json_api( - node_institutions_url, - self.create_payload([institution_two]), - auth=user.auth - ) - call_args = mock_send_grid.call_args[1] - assert call_args['to_addr'] == user.email - assert call_args['subject'] == 'Project Affiliation Changed' + with capture_notifications() as notifications: + res = app.post_json_api( + node_institutions_url, + self.create_payload([institution_two]), + auth=user.auth + ) + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED assert res.status_code == 201 assert institution_one in node.affiliated_institutions.all() @@ -352,19 +381,27 @@ def test_delete_nothing(self, app, user, node_institutions_url): ) assert res.status_code == 204 - def test_delete_existing_inst(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): + def test_delete_existing_inst( + self, + app, + user, + institution_one, + node, + node_institutions_url, + ): node.affiliated_institutions.add(institution_one) node.save() - res = app.delete_json_api( - node_institutions_url, - self.create_payload([institution_one]), - auth=user.auth - ) + with capture_notifications() as notifications: + res = app.delete_json_api( + node_institutions_url, + self.create_payload([institution_one]), + auth=user.auth + ) - call_args = mock_send_grid.call_args[1] - assert call_args['to_addr'] == user.email - assert call_args['subject'] == 'Project Affiliation Changed' + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'] == user + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED assert res.status_code == 204 assert institution_one not in node.affiliated_institutions.all() diff --git a/api_tests/requests/views/test_node_request_list.py b/api_tests/requests/views/test_node_request_list.py index 41ee66747d4..4e16d5ce1c2 100644 --- a/api_tests/requests/views/test_node_request_list.py +++ b/api_tests/requests/views/test_node_request_list.py @@ -2,9 +2,11 @@ from api.base.settings.defaults import API_BASE from api_tests.requests.mixins import NodeRequestTestMixin +from osf.models import NotificationType from osf_tests.factories import NodeFactory, NodeRequestFactory, InstitutionFactory from osf.utils.workflows import DefaultStates, NodeRequestTypes +from tests.utils import capture_notifications @pytest.mark.django_db @@ -80,25 +82,32 @@ def test_requests_disabled_list(self, app, url, create_payload, project, admin): res = app.get(url, create_payload, auth=admin.auth, expect_errors=True) assert res.status_code == 403 - def test_email_sent_to_all_admins_on_submit(self, mock_send_grid, app, project, noncontrib, url, create_payload, second_admin): + def test_email_sent_to_all_admins_on_submit(self, app, project, noncontrib, url, create_payload, second_admin): project.is_public = True project.save() - mock_send_grid.reset_mock() - res = app.post_json_api(url, create_payload, auth=noncontrib.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, create_payload, auth=noncontrib.auth) + + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_SUBMITTED + assert notifications[1]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_SUBMITTED assert res.status_code == 201 - assert mock_send_grid.call_count == 2 - def test_email_not_sent_to_parent_admins_on_submit(self, mock_send_grid, app, project, noncontrib, url, create_payload, second_admin): + def test_email_not_sent_to_parent_admins_on_submit(self, app, project, noncontrib, url, create_payload, second_admin): component = NodeFactory(parent=project, creator=second_admin) component.is_public = True project.save() - url = f'/{API_BASE}nodes/{component._id}/requests/' - mock_send_grid.reset_mock() - res = app.post_json_api(url, create_payload, auth=noncontrib.auth) + with capture_notifications() as notifications: + res = app.post_json_api( + f'/{API_BASE}nodes/{component._id}/requests/', + create_payload, + auth=noncontrib.auth + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_SUBMITTED assert res.status_code == 201 assert component.parent_admin_contributors.count() == 1 assert component.contributors.count() == 1 - assert mock_send_grid.call_count == 1 def test_request_followed_by_added_as_contrib(elf, app, project, noncontrib, admin, url, create_payload): res = app.post_json_api(url, create_payload, auth=noncontrib.auth) diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index 30e579d3ab3..7396e1ec739 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -2,8 +2,11 @@ from api.base.settings.defaults import API_BASE from api_tests.requests.mixins import NodeRequestTestMixin, PreprintRequestTestMixin +from osf.models import NotificationType from osf.utils import permissions +from tests.utils import capture_notifications + @pytest.mark.django_db @pytest.mark.enable_enqueue_task @@ -190,29 +193,32 @@ def test_rejects_fail_with_requests_disabled(self, app, admin, url, node_request assert initial_state == node_request.machine_state assert node_request.creator not in node_request.target.contributors - def test_email_sent_on_approve(self, mock_send_grid, app, admin, url, node_request): - mock_send_grid.reset_mock() + def test_email_sent_on_approve(self, app, admin, url, node_request): initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='accept') - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST assert res.status_code == 201 node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator in node_request.target.contributors - assert mock_send_grid.call_count == 1 - def test_email_sent_on_reject(self, mock_send_grid, app, admin, url, node_request): - mock_send_grid.reset_mock() + def test_email_sent_on_reject(self, app, admin, url, node_request): initial_state = node_request.machine_state assert node_request.creator not in node_request.target.contributors payload = self.create_payload(node_request._id, trigger='reject') - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED + assert res.status_code == 201 node_request.reload() assert initial_state != node_request.machine_state assert node_request.creator not in node_request.target.contributors - assert mock_send_grid.call_count == 1 def test_email_not_sent_on_reject(self, mock_send_grid, app, requester, url, node_request): mock_send_grid.reset_mock() @@ -385,20 +391,21 @@ def test_write_contrib_and_noncontrib_cannot_edit_comment(self, app, write_contr assert initial_state == request.machine_state assert initial_comment == request.comment - def test_email_sent_on_approve(self, mock_send_grid, app, moderator, url, pre_request, post_request): - mock_send_grid.reset_mock() + def test_email_sent_on_approve(self, app, moderator, url, pre_request, post_request): for request in [pre_request, post_request]: initial_state = request.machine_state assert not request.target.is_retracted payload = self.create_payload(request._id, trigger='accept') - res = app.post_json_api(url, payload, auth=moderator.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=moderator.auth) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_APPROVED + assert notifications[1]['type'] == NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_APPROVED assert res.status_code == 201 request.reload() request.target.reload() assert initial_state != request.machine_state assert request.target.is_retracted - # There are two preprints withdrawn and each preprint have 2 contributors. So 4 emails are sent in total. - assert mock_send_grid.call_count == 4 @pytest.mark.skip('TODO: IN-331 -- add emails') def test_email_sent_on_reject(self, mock_send_grid, app, moderator, url, pre_request, post_request): diff --git a/notifications.yaml b/notifications.yaml index 284bb6b6992..6bd704f69cc 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -130,6 +130,10 @@ notification_types: __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/contributor_added_preprints.html.mako' + - name: provider_reviews_submission_confirmation + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/reviews_submission_confirmation.html.mako' #### NODE - name: node_file_updated @@ -239,7 +243,6 @@ notification_types: __docs__: ... object_content_type_model_name: preprint template: 'website/templates/emails/withdrawal_request_declined.html.mako' - #### SUPPORT - name: crossref_error __docs__: ... diff --git a/osf/models/preprint.py b/osf/models/preprint.py index 6765aa0a275..2dd469c5fa9 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -20,6 +20,7 @@ from framework.auth import Auth from framework.exceptions import PermissionsError, UnpublishedPendingPreprintVersionExists from framework.auth import oauth_scopes +from . import NotificationType from .subject import Subject from .tag import Tag @@ -41,7 +42,7 @@ from website.util import api_v2_url, api_url_for, web_url_for from website.util.metrics import provider_source_tag from website.citations.utils import datetime_to_csl -from website import settings, mails +from website import settings from website.preprints.tasks import update_or_enqueue_on_preprint_updated from .base import BaseModel, Guid, GuidVersionsThrough, GuidMixinQuerySet, VersionedGuidMixin, check_manually_assigned_guid @@ -1040,7 +1041,9 @@ def _send_preprint_confirmation(self, auth): context = { 'domain': settings.DOMAIN, - 'reviewable': self, + 'reviewable_title': self.title, + 'reviewable_absolute_url': self.absolute_url, + 'reviewable_provider_name': self.provider.name, 'workflow': self.provider.reviews_workflow, 'provider_url': '{domain}preprints/{provider_id}'.format( domain=self.provider.domain or settings.DOMAIN, @@ -1054,11 +1057,11 @@ def _send_preprint_confirmation(self, auth): 'document_type': self.provider.preprint_word } - mails.send_mail( - recipient.username, - mails.REVIEWS_SUBMISSION_CONFIRMATION, + NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION + ).emit( user=recipient, - **context + event_context=context, ) # FOLLOWING BEHAVIOR NOT SPECIFIC TO PREPRINTS diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index f436b80c768..a4fcfe17396 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -571,7 +571,8 @@ def _email_template_context(self, 'embargo_end_date': str(self.end_date), 'approval_time_span': approval_time_span, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': self._get_registration().title, + 'reviewable_absolute_url': self._get_registration().absolute_url, }) else: context.update({ @@ -580,7 +581,8 @@ def _email_template_context(self, 'embargo_end_date': str(self.end_date), 'approval_time_span': approval_time_span, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': self._get_registration().title, + 'reviewable_absolute_url': self._get_registration().absolute_url, }) return context @@ -719,7 +721,8 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): return { 'is_initiator': self.initiated_by == user, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': self._get_registration().title, + 'reviewable_absolute_url': self._get_registration().absolute_url, 'initiated_by': self.initiated_by.fullname, 'project_name': self.registrations.filter().values_list('title', flat=True).get(), 'registration_link': registration_link, @@ -732,7 +735,8 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': self._get_registration().title, + 'reviewable_absolute_url': self._get_registration().absolute_url, 'approval_time_span': approval_time_span, } @@ -855,7 +859,8 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'is_initiator': self.initiated_by == user, 'initiated_by': self.initiated_by.fullname, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': self._get_registration().title, + 'reviewable_absolute_url': self._get_registration().absolute_url, 'registration_link': registration_link, 'approval_link': approval_link, 'disapproval_link': disapproval_link, @@ -867,7 +872,8 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'initiated_by': self.initiated_by.fullname, 'registration_link': registration_link, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': self._get_registration().title, + 'reviewable_absolute_url': self._get_registration().absolute_url, 'approval_time_span': approval_time_span, }) return context @@ -1005,16 +1011,17 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): urls = urls or self.stashed_urls.get(user._id, {}) registration_link = urls.get('view', self._view_url(user._id, node)) approval_time_span = osf_settings.EMBARGO_TERMINATION_PENDING_TIME.days * 24 + registration = self._get_registration() + if is_authorizer: approval_link = urls.get('approve', '') disapproval_link = urls.get('reject', '') - registration = self._get_registration() - context.update({ 'is_initiator': self.initiated_by == user, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': registration.title, + 'reviewable_absolute_url': registration.absolute_url, 'initiated_by': self.initiated_by.fullname, 'approval_link': approval_link, 'project_name': registration.title, @@ -1030,7 +1037,8 @@ def _email_template_context(self, user, node, is_authorizer=False, urls=None): 'registration_link': registration_link, 'embargo_end_date': self.end_date, 'is_moderated': self.is_moderated, - 'reviewable': self._get_registration()._id, + 'reviewable_title': registration.title, + 'reviewable_absolute_url': registration.absolute_url, 'approval_time_span': approval_time_span, }) return context diff --git a/osf/models/schema_response.py b/osf/models/schema_response.py index 8d305254804..ec20bc50e6e 100644 --- a/osf/models/schema_response.py +++ b/osf/models/schema_response.py @@ -482,7 +482,8 @@ def _notify_users(self, event, event_initiator): email_context['revision_id'] = self._id email_context['referrer'] = self.initiator reviews_email_submit_moderators_notifications.send( - timestamp=timezone.now(), context=email_context + timestamp=timezone.now(), + context=email_context ) template = EMAIL_TEMPLATES_PER_EVENT.get(event) diff --git a/osf/utils/machines.py b/osf/utils/machines.py index c2a869eee64..d713f5264c0 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -352,7 +352,9 @@ def notify_resubmit(self, ev): def get_context(self): return { - 'reviewable': self.machineable.target, + 'reviewable_title': self.machineable.target.title, + 'reviewable_absolute_url': self.machineable.target.absolute_url, + 'reviewable_provicer_name': self.machineable.target.provider.name, 'requester_fullname': self.machineable.creator.fullname, 'is_request_email': True, 'document_type': self.machineable.target.provider.preprint_word diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 7a0f6a251b6..af90b6cb3fb 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -13,16 +13,20 @@ def get_email_template_context(resource): base_context = { 'domain': DOMAIN, - 'reviewable': resource, + 'reviewable_title': resource.title, + 'reviewable_absolute_url': resource.absolute_url, + 'reviewable_provider_name': resource.provider.name, 'workflow': resource.provider.reviews_workflow, 'provider_url': resource.provider.domain or f'{DOMAIN}{url_segment}/{resource.provider._id}', + 'provider_type': resource.provider.type, + 'provider_name': resource.provider.name, 'provider_contact_email': resource.provider.email_contact or OSF_CONTACT_EMAIL, 'provider_support_email': resource.provider.email_support or OSF_SUPPORT_EMAIL, 'document_type': document_type } if document_type == 'registration': - base_context['draft_registration'] = resource.draft_registration.get() + base_context['draft_registration_absolute_url'] = resource.draft_registration.get().absolute_url if document_type == 'registration' and resource.provider.brand: brand = resource.provider.brand base_context['logo_url'] = brand.hero_logo_image @@ -37,11 +41,13 @@ def notify_submit(resource, user, *args, **kwargs): recipients = list(resource.contributors) reviews_signals.reviews_email_submit.send( context=context, - recipients=recipients + recipients=recipients, + resource=resource, ) reviews_signals.reviews_email_submit_moderators_notifications.send( timestamp=timezone.now(), - context=context + context=context, + resource=resource, ) @@ -54,10 +60,12 @@ def notify_resubmit(resource, user, *args, **kwargs): recipients=recipients, context=context, template=mails.REVIEWS_RESUBMISSION_CONFIRMATION, + resource=resource, ) reviews_signals.reviews_email_submit_moderators_notifications.send( timestamp=timezone.now(), - context=context + context=context, + resource=resource, ) diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 34335722a45..766ffb088e5 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -642,6 +642,8 @@ def notify_added_contributor(node, contributor, auth=None, email_template=None, notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT elif notification_type == 'draft_registration': notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + elif notification_type == 'access': + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST elif notification_type == 'access_request': notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST elif notification_type == 'institutional_request': diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index d6f3471dac7..8c03174657f 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -1,7 +1,7 @@ from django.utils import timezone +from osf.models import NotificationType from website.notifications import utils -from website.mails import mails from website.reviews import signals as reviews_signals from website.settings import OSF_PREPRINTS_LOGO, OSF_REGISTRIES_LOGO, DOMAIN @@ -29,19 +29,19 @@ def reviews_notification(self, creator, template, context, action): @reviews_signals.reviews_email_submit.connect -def reviews_submit_notification(self, recipients, context, template=None): +def reviews_submit_notification(self, recipients, context, resource, template=None): """ Handle email notifications for a new submission or a resubmission """ if not template: - template = mails.REVIEWS_SUBMISSION_CONFIRMATION + template = NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION # Avoid AppRegistryNotReady error from website.notifications.emails import get_user_subscriptions event_type = utils.find_subscription_type('global_reviews') - provider = context['reviewable'].provider + provider = resource.provider if provider._id == 'osf': if provider.type == 'osf.preprintprovider': context['logo'] = OSF_PREPRINTS_LOGO @@ -50,23 +50,23 @@ def reviews_submit_notification(self, recipients, context, template=None): else: raise NotImplementedError() else: - context['logo'] = context['reviewable'].provider._id + context['logo'] = resource.provider._id for recipient in recipients: user_subscriptions = get_user_subscriptions(recipient, event_type) context['no_future_emails'] = user_subscriptions['none'] - context['is_creator'] = recipient == context['reviewable'].creator - context['provider_name'] = context['reviewable'].provider.name - mails.send_mail( - recipient.username, - template, + context['is_creator'] = recipient == resource.creator + context['provider_name'] = resource.provider.name + NotificationType.objects.get( + name=template + ).emit( user=recipient, - **context + event_context=context ) @reviews_signals.reviews_email_submit_moderators_notifications.connect -def reviews_submit_notification_moderators(self, timestamp, context): +def reviews_submit_notification_moderators(self, timestamp, resource, context): """ Handle email notifications to notify moderators of new submissions or resubmission. """ @@ -75,7 +75,6 @@ def reviews_submit_notification_moderators(self, timestamp, context): from website.profile.utils import get_profile_image_url from website.notifications.emails import store_emails - resource = context['reviewable'] provider = resource.provider # Set submission url diff --git a/website/templates/emails/pending_embargo_admin.html.mako b/website/templates/emails/pending_embargo_admin.html.mako index fb0ab8cf72e..740994680a3 100644 --- a/website/templates/emails/pending_embargo_admin.html.mako +++ b/website/templates/emails/pending_embargo_admin.html.mako @@ -8,10 +8,10 @@

    % if is_initiator: You have requested final approvals to submit your registration - titled ${reviewable.title}. + titled ${reviewable_title}. % else: ${initiated_by} has requested final approvals to submit your registration - titled ${reviewable.title}. + titled ${reviewable_title}. % endif

    diff --git a/website/templates/emails/pending_embargo_non_admin.html.mako b/website/templates/emails/pending_embargo_non_admin.html.mako index 2d707eb8035..32b1b64e47e 100644 --- a/website/templates/emails/pending_embargo_non_admin.html.mako +++ b/website/templates/emails/pending_embargo_non_admin.html.mako @@ -7,7 +7,7 @@ Hello ${user.fullname},

    ${initiated_by} has requested final approvals to submit your registration - titled ${reviewable.title} + titled ${reviewable_title}

    % if is_moderated: diff --git a/website/templates/emails/pending_embargo_termination_admin.html.mako b/website/templates/emails/pending_embargo_termination_admin.html.mako index cfe2642d521..b8660112f4e 100644 --- a/website/templates/emails/pending_embargo_termination_admin.html.mako +++ b/website/templates/emails/pending_embargo_termination_admin.html.mako @@ -8,10 +8,10 @@

    % if is_initiator: You have requested final approvals to end the embargo for your registration - titled ${reviewable.title} + titled ${reviewable_title} % else: ${initiated_by} has requested final approvals to end the embargo for your registration - titled ${reviewable.title} + titled ${reviewable_title} % endif

    diff --git a/website/templates/emails/pending_embargo_termination_non_admin.html.mako b/website/templates/emails/pending_embargo_termination_non_admin.html.mako index f345adaae6a..3682a0c44c4 100644 --- a/website/templates/emails/pending_embargo_termination_non_admin.html.mako +++ b/website/templates/emails/pending_embargo_termination_non_admin.html.mako @@ -7,7 +7,7 @@ Hello ${user.fullname},

    ${initiated_by} has requested final approvals to end the embargo for your registration - titled ${reviewable.title} + titled ${reviewable_title}

    If all admins contributors appove, the registration will be made public as part of the diff --git a/website/templates/emails/pending_registration_admin.html.mako b/website/templates/emails/pending_registration_admin.html.mako index bbc1e7821f9..c0c669b4755 100644 --- a/website/templates/emails/pending_registration_admin.html.mako +++ b/website/templates/emails/pending_registration_admin.html.mako @@ -8,10 +8,10 @@

    % if is_initiator: You have requested final approvals to submit your registration - titled ${reviewable.title}. + titled ${reviewable_title}. % else: ${initiated_by} has requested final approvals to submit your registration - titled ${reviewable.title}. + titled ${reviewable_title}. % endif

    diff --git a/website/templates/emails/pending_registration_non_admin.html.mako b/website/templates/emails/pending_registration_non_admin.html.mako index 30a45aa7b3c..a738960cc90 100644 --- a/website/templates/emails/pending_registration_non_admin.html.mako +++ b/website/templates/emails/pending_registration_non_admin.html.mako @@ -7,7 +7,7 @@ Hello ${user.fullname},

    ${initiated_by} has requested final approvals to submit your registration - titled ${reviewable.title}. + titled ${reviewable_title}.

    % if is_moderated: diff --git a/website/templates/emails/pending_retraction_admin.html.mako b/website/templates/emails/pending_retraction_admin.html.mako index 38bb71d1a77..e14fe1c1e23 100644 --- a/website/templates/emails/pending_retraction_admin.html.mako +++ b/website/templates/emails/pending_retraction_admin.html.mako @@ -8,10 +8,10 @@

    % if is_initiator: You have requested final approvals to withdraw your registration - titled ${reviewable.title} + titled ${reviewable_title} % else: ${initiated_by} has requested final approvals to withdraw your registration - titled ${reviewable.title} + titled ${reviewable_title} % endif

    % if reviewable.withdrawal_justification: diff --git a/website/templates/emails/pending_retraction_non_admin.html.mako b/website/templates/emails/pending_retraction_non_admin.html.mako index 606af2481ea..f3d5f0f8975 100644 --- a/website/templates/emails/pending_retraction_non_admin.html.mako +++ b/website/templates/emails/pending_retraction_non_admin.html.mako @@ -7,7 +7,7 @@ Hello ${user.fullname},

    ${initiated_by} has requested final approval to withdraw your registration - titled ${reviewable.title} + titled ${reviewable_title}

    % if reviewable.withdrawal_justification:

    diff --git a/website/templates/emails/reviews_resubmission_confirmation.html.mako b/website/templates/emails/reviews_resubmission_confirmation.html.mako index 23ce18781ba..3c4499f7dbd 100644 --- a/website/templates/emails/reviews_resubmission_confirmation.html.mako +++ b/website/templates/emails/reviews_resubmission_confirmation.html.mako @@ -5,7 +5,7 @@ Hello ${referrer.fullname},

    - The ${document_type} ${reviewable.title} has been successfully + The ${document_type} ${reviewable_title} has been successfully resubmitted to ${reviewable.provider.name}.

    @@ -20,7 +20,7 @@ for this ${document_type}.

    - If you have been erroneously associated with "${reviewable.title}", then you may visit the ${document_type}'s + If you have been erroneously associated with "${reviewable_title}", then you may visit the ${document_type}'s "Edit" page and remove yourself as a contributor.

    diff --git a/website/templates/emails/reviews_submission_confirmation.html.mako b/website/templates/emails/reviews_submission_confirmation.html.mako index bd541714347..5a2d05e5b34 100644 --- a/website/templates/emails/reviews_submission_confirmation.html.mako +++ b/website/templates/emails/reviews_submission_confirmation.html.mako @@ -3,9 +3,7 @@ <%def name="content()"> <% from website import settings %> <% - isOsfSubmission = reviewable.provider.name == 'Open Science Framework' - if isOsfSubmission: - reviewable.provider.name = 'OSF Preprints' + isOsfSubmission = reviewable_provider_name == 'Open Science Framework' %> @@ -13,16 +11,16 @@

    Hello ${user.fullname},

    - Your ${document_type} ${reviewable.title} has been successfully submitted to ${reviewable.provider.name}. + Your ${document_type} ${reviewable_title} has been successfully submitted to ${reviewable_provider_name}.

    - ${reviewable.provider.name} has chosen to moderate their submissions using a pre-moderation workflow, which means your submission is pending until accepted by a moderator. + ${reviewable_provider_name} has chosen to moderate their submissions using a pre-moderation workflow, which means your submission is pending until accepted by a moderator.

    You will receive a separate notification informing you of any status changes.

    - Learn more about ${reviewable.provider.name} or OSF. + Learn more about ${reviewable_provider_name} or OSF.

    Sincerely, - The ${reviewable.provider.name} and OSF teams. + The ${reviewable_provider_name} and OSF teams.

    % else:
    @@ -30,23 +28,23 @@ % if is_creator:

    Your ${document_type} - ${reviewable.title} - has been successfully submitted to ${reviewable.provider.name}. + ${reviewable_title} + has been successfully submitted to ${reviewable_provider_name}.

    % else:

    ${referrer.fullname} has added you as a contributor to the ${document_type} - ${reviewable.title} - on ${reviewable.provider.name}, which is hosted on the OSF. + ${reviewable_title} + on ${reviewable_provider_name}, which is hosted on the OSF.

    % endif

    % if workflow == 'pre-moderation': - ${reviewable.provider.name} has chosen to moderate their submissions using a pre-moderation workflow, + ${reviewable_provider_name} has chosen to moderate their submissions using a pre-moderation workflow, which means your submission is pending until accepted by a moderator. % elif workflow == 'post-moderation': - ${reviewable.provider.name} has chosen to moderate their submissions using a + ${reviewable_provider_name} has chosen to moderate their submissions using a post-moderation workflow, which means your submission is public and discoverable, while still pending acceptance by a moderator. % else: @@ -94,11 +92,11 @@

    % if not is_creator:

    - If you have been erroneously associated with "${reviewable.title}," then you may visit the ${document_type} + If you have been erroneously associated with "${reviewable_title}," then you may visit the ${document_type} and remove yourself as a contributor.

    % endif -

    Learn more about ${reviewable.provider.name} or OSF.

    +

    Learn more about ${reviewable_provider_name} or OSF.


    Sincerely,
    diff --git a/website/templates/emails/reviews_submission_status.html.mako b/website/templates/emails/reviews_submission_status.html.mako index b0af1a88b45..a4b6c039656 100644 --- a/website/templates/emails/reviews_submission_status.html.mako +++ b/website/templates/emails/reviews_submission_status.html.mako @@ -5,11 +5,11 @@

    % if document_type == 'registration': % if is_rejected: - Your submission ${reviewable.title}, submitted to ${reviewable.provider.name}, + Your submission ${reviewable_title}, submitted to ${reviewable.provider.name}, has not been accepted. Your registration was returned as a draft so you can make the appropriate edits for resubmission. - Click here to view your draft. + Click here to view your draft. % else: - Your submission ${reviewable.title}, submitted to ${reviewable.provider.name}, has been accepted by the moderator. + Your submission ${reviewable_title}, submitted to ${reviewable.provider.name}, has been accepted by the moderator. % endif

    % if notify_comment: @@ -18,7 +18,7 @@ % endif % else: % if workflow == 'pre-moderation': - Your submission ${reviewable.title}, submitted to ${reviewable.provider.name} has + Your submission ${reviewable_title}, submitted to ${reviewable.provider.name} has % if is_rejected: not been accepted. Contributors with admin permissions may edit the ${document_type} and resubmit, at which time it will return to a pending state and be reviewed by a moderator. @@ -26,7 +26,7 @@ been accepted by the moderator and is now discoverable to others. % endif % elif workflow == 'post-moderation': - Your submission ${reviewable.title}, submitted to ${reviewable.provider.name} has + Your submission ${reviewable_title}, submitted to ${reviewable.provider.name} has % if is_rejected: not been accepted and will be made private and not discoverable by others. Contributors with admin permissions may edit the ${document_type} and contact @@ -93,7 +93,7 @@ % endif % if not is_creator:

    - If you have been erroneously associated with "${reviewable.title}," then you + If you have been erroneously associated with "${reviewable_title}," then you may visit the project's "Contributors" page and remove yourself as a contributor.

    % endif diff --git a/website/templates/emails/reviews_update_comment.html.mako b/website/templates/emails/reviews_update_comment.html.mako index 88511a042d6..2dae48ced76 100644 --- a/website/templates/emails/reviews_update_comment.html.mako +++ b/website/templates/emails/reviews_update_comment.html.mako @@ -2,7 +2,7 @@

    Hello ${recipient.fullname},

    - Your ${document_type} "${reviewable.title}" has an updated comment by the moderator:
    + Your ${document_type} "${reviewable_title}" has an updated comment by the moderator:
    ${comment}

    @@ -12,7 +12,7 @@ email notification preferences, visit your user settings.

    - If you have been erroneously associated with "${reviewable.title}", then you may visit the project's + If you have been erroneously associated with "${reviewable_title}", then you may visit the project's "Contributors" page and remove yourself as a contributor.

    diff --git a/website/templates/emails/withdrawal_request_declined.html.mako b/website/templates/emails/withdrawal_request_declined.html.mako index 4c1a693136e..b24ddd861a1 100644 --- a/website/templates/emails/withdrawal_request_declined.html.mako +++ b/website/templates/emails/withdrawal_request_declined.html.mako @@ -9,7 +9,7 @@ % if document_type == 'registration': Dear ${contributor.fullname},

    - Your request to withdraw your registration "${reviewable.title}" from ${reviewable.provider.name} has been declined by the service moderators. The registration is still publicly available on ${reviewable.provider.name}. + Your request to withdraw your registration "${reviewable_title}" from ${reviewable.provider.name} has been declined by the service moderators. The registration is still publicly available on ${reviewable.provider.name}.

    % if notify_comment: The moderator has provided the following comment:
    @@ -18,7 +18,7 @@ % else: Dear ${requester_fullname},

    - Your request to withdraw your ${document_type} "${reviewable.title}" from ${reviewable.provider.name} has been declined by the service moderators. Login and visit your ${document_type} to view their feedback. The ${document_type} is still publicly available on ${reviewable.provider.name}. + Your request to withdraw your ${document_type} "${reviewable_title}" from ${reviewable.provider.name} has been declined by the service moderators. Login and visit your ${document_type} to view their feedback. The ${document_type} is still publicly available on ${reviewable.provider.name}. % endif

    Sincerely,
    diff --git a/website/templates/emails/withdrawal_request_granted.html.mako b/website/templates/emails/withdrawal_request_granted.html.mako index 15c4cb4b1a7..837253f1c67 100644 --- a/website/templates/emails/withdrawal_request_granted.html.mako +++ b/website/templates/emails/withdrawal_request_granted.html.mako @@ -10,9 +10,9 @@

    % if document_type == 'registration': % if force_withdrawal: - A moderator has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. + A moderator has withdrawn your ${document_type} "${reviewable_title}" from ${reviewable.provider.name}. % else: - Your request to withdraw your ${document_type} "${reviewable.title}" has been approved by ${reviewable.provider.name} moderators. + Your request to withdraw your ${document_type} "${reviewable_title}" has been approved by ${reviewable.provider.name} moderators. % endif % if notify_comment:

    @@ -24,12 +24,12 @@ % else: % if not ever_public: % if is_requester: - You have withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. + You have withdrawn your ${document_type} "${reviewable_title}" from ${reviewable.provider.name}.
    The ${document_type} has been removed from ${reviewable.provider.name}.
    % else: - ${requester_fullname} has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. + ${requester_fullname} has withdrawn your ${document_type} "${reviewable_title}" from ${reviewable.provider.name}. % if reviewable.withdrawal_justification: ${requester_fullname} provided the following justification: "${reviewable.withdrawal_justification}" % endif @@ -39,12 +39,12 @@ % endif % else: % if is_requester: - Your request to withdraw your ${document_type} "${reviewable.title}" from ${reviewable.provider.name} has been approved by the service moderators. + Your request to withdraw your ${document_type} "${reviewable_title}" from ${reviewable.provider.name} has been approved by the service moderators.
    The ${document_type} has been removed from ${reviewable.provider.name}, but its metadata is still available: title of the withdrawn ${document_type}, its contributor list, abstract, tags, DOI, and reason for withdrawal (if provided).
    % elif force_withdrawal: - A moderator has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. + A moderator has withdrawn your ${document_type} "${reviewable_title}" from ${reviewable.provider.name}.
    The ${document_type} has been removed from ${reviewable.provider.name}, but its metadata is still available: title of the withdrawn ${document_type}, its contributor list, abstract, tags, and DOI. % if reviewable.withdrawal_justification: @@ -53,7 +53,7 @@ % endif
    % else: - ${requester_fullname} has withdrawn your ${document_type} "${reviewable.title}" from ${reviewable.provider.name}. + ${requester_fullname} has withdrawn your ${document_type} "${reviewable_title}" from ${reviewable.provider.name}.
    The ${document_type} has been removed from ${reviewable.provider.name}, but its metadata is still available: title of the withdrawn ${document_type}, its contributor list, abstract, tags, and DOI. % if reviewable.withdrawal_justification: From 7f6d5e96a77d5d57c4643579f54758a8480a7e21 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Tue, 22 Jul 2025 21:07:39 +0300 Subject: [PATCH 101/176] [ENG-8401] Fixed preprint downloading (#11238) * fixed preprint downloading * fixed nonetype --- addons/base/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/addons/base/views.py b/addons/base/views.py index a6c90860b98..2c61fdda232 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -1007,7 +1007,8 @@ def persistent_file_download(auth, **kwargs): if not file: guid = Guid.load(id_or_guid) if guid: - file = guid.referent + referent = guid.referent + file = referent.primary_file if type(referent) is Preprint else referent else: raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={ 'message_short': 'File Not Found', From 189854c80e0d87c41c66f8d85b8a3c9e4bbb740e Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Tue, 22 Jul 2025 21:10:04 +0300 Subject: [PATCH 102/176] [ENG-8216] Fixed children deletion on a node page in admin (#11237) * field children deletion in admin * improved performance and removed unused attributes --- admin/nodes/views.py | 12 +++++++----- admin/templates/nodes/children.html | 25 ++++++++----------------- 2 files changed, 15 insertions(+), 22 deletions(-) diff --git a/admin/nodes/views.py b/admin/nodes/views.py index 71c3f60e965..74321c8f908 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -14,7 +14,6 @@ View, FormView, ListView, - TemplateView, ) from django.shortcuts import redirect, reverse, get_object_or_404 from django.urls import reverse_lazy @@ -102,12 +101,16 @@ def get_context_data(self, **kwargs): node = self.get_object() detailed_duplicates = detect_duplicate_notifications(node_id=node.id) - + children = node.get_nodes(is_node_link=False) + # Annotate guid because django templates prohibit accessing attributes that start with underscores + children = AbstractNode.objects.filter( + id__in=[child.id for child in children] + ).prefetch_related('guids').annotate(guid=F('guids___id')) context.update({ 'SPAM_STATUS': SpamStatus, 'STORAGE_LIMITS': settings.StorageLimits, 'node': node, - 'children': node.get_nodes(is_node_link=False), + 'children': children, 'duplicates': detailed_duplicates }) @@ -194,10 +197,9 @@ def add_contributor_removed_log(self, node, user): ).save() -class NodeDeleteView(NodeMixin, TemplateView): +class NodeDeleteView(NodeMixin, View): """ Allows authorized users to mark nodes as deleted. """ - template_name = 'nodes/remove_node.html' permission_required = ('osf.view_node', 'osf.delete_node') raise_exception = True diff --git a/admin/templates/nodes/children.html b/admin/templates/nodes/children.html index 02f92a398ea..a24ba567c07 100644 --- a/admin/templates/nodes/children.html +++ b/admin/templates/nodes/children.html @@ -18,9 +18,7 @@ {% for child in children %} - - {{ child.id }} - + {{ child.guid }} {{ child.title }} {{ child.is_public }} @@ -29,26 +27,19 @@ {% if not child.is_registration %} {% if child.deleted %} -

    + {% csrf_token %}
    {% else %} - - Delete Node - - +
    + {% csrf_token %} + +
    {% endif %} {% endif %} From f756568b8c262590b988b2b8fdd3afeb5addef97 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 22 Jul 2025 13:53:12 -0400 Subject: [PATCH 103/176] update email templates to support JSON event context --- osf/models/schema_response.py | 4 ++-- osf/utils/notifications.py | 10 ++++++---- website/notifications/emails.py | 4 ++-- website/reviews/listeners.py | 15 +++++++------- .../collection_submission_accepted.html.mako | 2 +- .../templates/emails/confirm_erpc.html.mako | 2 +- .../emails/confirm_moderation.html.mako | 2 +- .../contributor_added_default.html.mako | 2 +- .../templates/emails/forward_invite.html.mako | 4 ++-- .../forward_invite_registered.html.mako | 4 ++-- .../templates/emails/invite_default.html.mako | 2 +- .../invite_draft_registration.html.mako | 2 +- .../emails/invite_preprints.html.mako | 2 +- .../emails/invite_preprints_osf.html.mako | 2 +- .../emails/moderator_added.html.mako | 2 +- .../templates/emails/pending_invite.html.mako | 2 +- .../emails/pending_registered.html.mako | 2 +- ...eviews_resubmission_confirmation.html.mako | 2 +- .../reviews_submission_confirmation.html.mako | 6 +++--- .../reviews_submission_status.html.mako | 20 +++++++++---------- ...torage_cap_exceeded_announcement.html.mako | 2 +- .../emails/support_request.html.mako | 6 +++--- 22 files changed, 51 insertions(+), 48 deletions(-) diff --git a/osf/models/schema_response.py b/osf/models/schema_response.py index ec20bc50e6e..84d0a8f46de 100644 --- a/osf/models/schema_response.py +++ b/osf/models/schema_response.py @@ -480,10 +480,10 @@ def _notify_users(self, event, event_initiator): if self.state is ApprovalStates.PENDING_MODERATION: email_context = notifications.get_email_template_context(resource=self.parent) email_context['revision_id'] = self._id - email_context['referrer'] = self.initiator reviews_email_submit_moderators_notifications.send( timestamp=timezone.now(), - context=email_context + context=email_context, + user=self.initiator ) template = EMAIL_TEMPLATES_PER_EVENT.get(event) diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index af90b6cb3fb..78a422e4451 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -37,8 +37,8 @@ def get_email_template_context(resource): def notify_submit(resource, user, *args, **kwargs): context = get_email_template_context(resource) - context['referrer'] = user recipients = list(resource.contributors) + context['referrer_fullname'] = user.fullname reviews_signals.reviews_email_submit.send( context=context, recipients=recipients, @@ -48,12 +48,13 @@ def notify_submit(resource, user, *args, **kwargs): timestamp=timezone.now(), context=context, resource=resource, + user=user ) def notify_resubmit(resource, user, *args, **kwargs): context = get_email_template_context(resource) - context['referrer'] = user + context['referrer_fullname'] = user.fullname context['resubmission'] = True recipients = list(resource.contributors) reviews_signals.reviews_email_submit.send( @@ -66,6 +67,7 @@ def notify_resubmit(resource, user, *args, **kwargs): timestamp=timezone.now(), context=context, resource=resource, + user=user ) @@ -115,10 +117,10 @@ def notify_reject_withdraw_request(resource, action, *args, **kwargs): def notify_moderator_registration_requests_withdrawal(resource, user, *args, **kwargs): context = get_email_template_context(resource) - context['referrer'] = user reviews_signals.reviews_withdraw_requests_notification_moderators.send( timestamp=timezone.now(), - context=context + context=context, + user=user ) diff --git a/website/notifications/emails.py b/website/notifications/emails.py index 56f513920af..d28352b2bdd 100644 --- a/website/notifications/emails.py +++ b/website/notifications/emails.py @@ -102,7 +102,7 @@ def store_emails(recipient_ids, notification_type, event, user, node, timestamp, template = f'{template or event}.html.mako' # user whose action triggered email sending - context['user'] = user + context['user_fullname'] = user.fullname node_lineage_ids = get_node_lineage(node) if node else [] for recipient_id in recipient_ids: @@ -112,7 +112,7 @@ def store_emails(recipient_ids, notification_type, event, user, node, timestamp, if recipient.is_disabled: continue context['localized_timestamp'] = localize_timestamp(timestamp, recipient) - context['recipient'] = recipient + context['recipient_fullname'] = recipient.fullname message = mails.render_message(template, **context) digest = NotificationDigest( timestamp=timestamp, diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 8c03174657f..b00548b326b 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -66,7 +66,7 @@ def reviews_submit_notification(self, recipients, context, resource, template=No @reviews_signals.reviews_email_submit_moderators_notifications.connect -def reviews_submit_notification_moderators(self, timestamp, resource, context): +def reviews_submit_notification_moderators(self, timestamp, resource, context, user): """ Handle email notifications to notify moderators of new submissions or resubmission. """ @@ -88,7 +88,7 @@ def reviews_submit_notification_moderators(self, timestamp, resource, context): raise NotImplementedError(f'unsupported provider type {provider.type}') # Set url for profile image of the submitter - context['profile_image_url'] = get_profile_image_url(context['referrer']) + context['profile_image_url'] = get_profile_image_url(user) # Set message revision_id = context.get('revision_id') @@ -126,7 +126,7 @@ def reviews_submit_notification_moderators(self, timestamp, resource, context): recipient_ids, subscription_type, 'new_pending_submissions', - context['referrer'], + user, resource, timestamp, abstract_provider=provider, @@ -135,11 +135,12 @@ def reviews_submit_notification_moderators(self, timestamp, resource, context): # Handle email notifications to notify moderators of new submissions. @reviews_signals.reviews_withdraw_requests_notification_moderators.connect -def reviews_withdraw_requests_notification_moderators(self, timestamp, context): +def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user): # imports moved here to avoid AppRegistryNotReady error from osf.models import NotificationSubscriptionLegacy from website.profile.utils import get_profile_image_url from website.notifications.emails import store_emails + context['referrer_fullname'] = user.fullname resource = context['reviewable'] provider = resource.provider @@ -153,7 +154,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context): # Set message context['message'] = f'has requested withdrawal of "{resource.title}".' # Set url for profile image of the submitter - context['profile_image_url'] = get_profile_image_url(context['referrer']) + context['profile_image_url'] = get_profile_image_url(user) # Set submission url context['reviews_submission_url'] = f'{DOMAIN}reviews/registries/{provider._id}/{resource._id}' @@ -165,7 +166,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context): email_transactional_ids, 'email_transactional', 'new_pending_withdraw_requests', - context['referrer'], + user, resource, timestamp, abstract_provider=provider, @@ -178,7 +179,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context): email_digest_ids, 'email_digest', 'new_pending_withdraw_requests', - context['referrer'], + user, resource, timestamp, abstract_provider=provider, diff --git a/website/templates/emails/collection_submission_accepted.html.mako b/website/templates/emails/collection_submission_accepted.html.mako index 08bd7524d0b..6c29f43dcf3 100644 --- a/website/templates/emails/collection_submission_accepted.html.mako +++ b/website/templates/emails/collection_submission_accepted.html.mako @@ -5,7 +5,7 @@ <%def name="content()"> - Hello ${user.fullname},
    + Hello ${user_fullname},

    % if is_admin: Your request to add ${node.title} to diff --git a/website/templates/emails/confirm_erpc.html.mako b/website/templates/emails/confirm_erpc.html.mako index dd88fde5038..359260cb7f6 100644 --- a/website/templates/emails/confirm_erpc.html.mako +++ b/website/templates/emails/confirm_erpc.html.mako @@ -3,7 +3,7 @@ <%def name="content()"> - Hello ${user.fullname},
    + Hello ${user_fullname},

    Welcome to the Open Science Framework and the Election Research Preacceptance Competition. To continue, please verify your email address by visiting this link:

    diff --git a/website/templates/emails/confirm_moderation.html.mako b/website/templates/emails/confirm_moderation.html.mako index 5e1c74bbe06..69759526d93 100644 --- a/website/templates/emails/confirm_moderation.html.mako +++ b/website/templates/emails/confirm_moderation.html.mako @@ -5,7 +5,7 @@ Hello ${user.fullname},

    - You have been added by ${referrer.fullname}, as ${'an administrator' if is_admin else 'a moderator'} to ${provider.name}, powered by OSF. To set a password for your account, visit:
    + You have been added by ${referrer_fullname}, as ${'an administrator' if is_admin else 'a moderator'} to ${provider.name}, powered by OSF. To set a password for your account, visit:

    ${claim_url}

    diff --git a/website/templates/emails/contributor_added_default.html.mako b/website/templates/emails/contributor_added_default.html.mako index 343169f164a..ae07ceb88ef 100644 --- a/website/templates/emails/contributor_added_default.html.mako +++ b/website/templates/emails/contributor_added_default.html.mako @@ -6,7 +6,7 @@ <%! from website import settings %> - Hello ${user.fullname},
    + Hello ${user_fullname},

    ${referrer_name + ' has added you' if referrer_name else 'You have been added'} as a contributor to the project "${node.title}" on the Open Science Framework: ${node.absolute_url}

    diff --git a/website/templates/emails/forward_invite.html.mako b/website/templates/emails/forward_invite.html.mako index d580a906354..b74f9414d84 100644 --- a/website/templates/emails/forward_invite.html.mako +++ b/website/templates/emails/forward_invite.html.mako @@ -6,7 +6,7 @@ <%! from website import settings %> - Hello ${referrer.fullname},
    + Hello ${referrer_fullname},

    You recently added ${fullname} to "${node.title}". ${fullname} wants to claim their account, but the email address they provided is different from the one you provided. To maintain security of your project, we are sending the account confirmation to you first.

    @@ -20,7 +20,7 @@
    Hello ${fullname},

    - You have been added by ${referrer.fullname} as a contributor to the project "${node.title}" on the Open Science Framework. To set a password for your account, visit:
    + You have been added by ${referrer_fullname} as a contributor to the project "${node.title}" on the Open Science Framework. To set a password for your account, visit:

    ${claim_url}

    diff --git a/website/templates/emails/forward_invite_registered.html.mako b/website/templates/emails/forward_invite_registered.html.mako index 0226b20635b..6131406e56b 100644 --- a/website/templates/emails/forward_invite_registered.html.mako +++ b/website/templates/emails/forward_invite_registered.html.mako @@ -3,7 +3,7 @@ <%def name="content()"> - Hello ${referrer.fullname},
    + Hello ${referrer_fullname},

    You recently added ${fullname} to "${node.title}". ${fullname} wants to claim their account, but the email address they provided is different from the one you provided. To maintain security of your project, we are sending the account confirmation to you first.

    @@ -17,7 +17,7 @@
    Hello ${fullname},

    - You have been added by ${referrer.fullname} as a contributor to the project "${node.title}" on the Open Science Framework. To claim yourself as a contributor to the project, visit this url:
    + You have been added by ${referrer_fullname} as a contributor to the project "${node.title}" on the Open Science Framework. To claim yourself as a contributor to the project, visit this url:

    ${claim_url}

    diff --git a/website/templates/emails/invite_default.html.mako b/website/templates/emails/invite_default.html.mako index 1f373782190..e10f1eaf3a2 100644 --- a/website/templates/emails/invite_default.html.mako +++ b/website/templates/emails/invite_default.html.mako @@ -8,7 +8,7 @@ %> Hello ${fullname},

    - You have been added by ${referrer.fullname} as a contributor to the project "${node.title}" on the Open Science Framework. To set a password for your account, visit:
    + You have been added by ${referrer_fullname} as a contributor to the project "${node.title}" on the Open Science Framework. To set a password for your account, visit:

    ${claim_url}

    diff --git a/website/templates/emails/invite_draft_registration.html.mako b/website/templates/emails/invite_draft_registration.html.mako index 017b0bfd4d9..6c26f95a167 100644 --- a/website/templates/emails/invite_draft_registration.html.mako +++ b/website/templates/emails/invite_draft_registration.html.mako @@ -8,7 +8,7 @@ %> Hello ${fullname},

    - ${referrer.fullname} has added you as a contributor on + ${referrer_fullname} has added you as a contributor on % if not node.title or node.title == 'Untitled': a new registration draft % else: diff --git a/website/templates/emails/invite_preprints.html.mako b/website/templates/emails/invite_preprints.html.mako index 5a417a3c9b5..f389a6da918 100644 --- a/website/templates/emails/invite_preprints.html.mako +++ b/website/templates/emails/invite_preprints.html.mako @@ -8,7 +8,7 @@ %> Hello ${fullname},

    - You have been added by ${referrer.fullname} as a contributor to the ${branded_service.preprint_word} "${node.title}" on ${branded_service.name}, powered by the Open Science Framework. To set a password for your account, visit:
    + You have been added by ${referrer_fullname} as a contributor to the ${branded_service.preprint_word} "${node.title}" on ${branded_service.name}, powered by the Open Science Framework. To set a password for your account, visit:

    ${claim_url}

    diff --git a/website/templates/emails/invite_preprints_osf.html.mako b/website/templates/emails/invite_preprints_osf.html.mako index 36bd9528d47..e1af2edb9a0 100644 --- a/website/templates/emails/invite_preprints_osf.html.mako +++ b/website/templates/emails/invite_preprints_osf.html.mako @@ -8,7 +8,7 @@ %> Hello ${fullname},

    - You have been added by ${referrer.fullname} as a contributor to the preprint "${node.title}" on the Open Science Framework. To set a password for your account, visit:
    + You have been added by ${referrer_fullname} as a contributor to the preprint "${node.title}" on the Open Science Framework. To set a password for your account, visit:

    ${claim_url}

    diff --git a/website/templates/emails/moderator_added.html.mako b/website/templates/emails/moderator_added.html.mako index 892d216c094..7da47728f1e 100644 --- a/website/templates/emails/moderator_added.html.mako +++ b/website/templates/emails/moderator_added.html.mako @@ -5,7 +5,7 @@ Hello ${user.fullname},

    - You have been added by ${referrer.fullname} as ${'an administrator' if is_admin else 'a moderator'} to ${provider.name}, powered by OSF.
    + You have been added by ${referrer_fullname} as ${'an administrator' if is_admin else 'a moderator'} to ${provider.name}, powered by OSF.

    You will automatically be subscribed to notification emails for new submissions to ${provider.name}.

    diff --git a/website/templates/emails/pending_invite.html.mako b/website/templates/emails/pending_invite.html.mako index 7d4e72017e5..7c2dcd91758 100644 --- a/website/templates/emails/pending_invite.html.mako +++ b/website/templates/emails/pending_invite.html.mako @@ -7,7 +7,7 @@
    We received your request to claim an OSF account and become a contributor for "${node.title}".

    - To confirm your identity, ${referrer.fullname} has been sent an email to forward to you with your confirmation link.
    + To confirm your identity, ${referrer_fullname} has been sent an email to forward to you with your confirmation link.

    This link will allow you to complete your registration.

    diff --git a/website/templates/emails/pending_registered.html.mako b/website/templates/emails/pending_registered.html.mako index 36015a17d1a..4389500579b 100644 --- a/website/templates/emails/pending_registered.html.mako +++ b/website/templates/emails/pending_registered.html.mako @@ -7,7 +7,7 @@
    We received your request to become a contributor for "${node.title}".

    - To confirm your identity, ${referrer.fullname} has been sent an email to forward to you with your confirmation link.
    + To confirm your identity, ${referrer_fullname} has been sent an email to forward to you with your confirmation link.

    This link will allow you to contribute to "${node.title}".

    diff --git a/website/templates/emails/reviews_resubmission_confirmation.html.mako b/website/templates/emails/reviews_resubmission_confirmation.html.mako index 3c4499f7dbd..9fac13cc2d6 100644 --- a/website/templates/emails/reviews_resubmission_confirmation.html.mako +++ b/website/templates/emails/reviews_resubmission_confirmation.html.mako @@ -2,7 +2,7 @@ <%def name="content()">

    - Hello ${referrer.fullname}, + Hello ${referrer_fullname},

    The ${document_type} ${reviewable_title} has been successfully diff --git a/website/templates/emails/reviews_submission_confirmation.html.mako b/website/templates/emails/reviews_submission_confirmation.html.mako index 5a2d05e5b34..ad780a8506f 100644 --- a/website/templates/emails/reviews_submission_confirmation.html.mako +++ b/website/templates/emails/reviews_submission_confirmation.html.mako @@ -9,7 +9,7 @@ % if document_type == 'registration':

    - Hello ${user.fullname}, + Hello ${user_fullname},

    Your ${document_type} ${reviewable_title} has been successfully submitted to ${reviewable_provider_name}.

    @@ -24,7 +24,7 @@

    % else:
    -

    Hello ${user.fullname},

    +

    Hello ${user_fullname},

    % if is_creator:

    Your ${document_type} @@ -33,7 +33,7 @@

    % else:

    - ${referrer.fullname} has added you as a contributor to the + ${referrer_fullname} has added you as a contributor to the ${document_type} ${reviewable_title} on ${reviewable_provider_name}, which is hosted on the OSF. diff --git a/website/templates/emails/reviews_submission_status.html.mako b/website/templates/emails/reviews_submission_status.html.mako index a4b6c039656..bf3f0ca2bf7 100644 --- a/website/templates/emails/reviews_submission_status.html.mako +++ b/website/templates/emails/reviews_submission_status.html.mako @@ -1,15 +1,15 @@ ## -*- coding: utf-8 -*- <% from website import settings %>

    -

    Hello ${recipient.fullname},

    +

    Hello ${recipient_fullname},

    % if document_type == 'registration': % if is_rejected: - Your submission ${reviewable_title}, submitted to ${reviewable.provider.name}, + Your submission ${reviewable_title}, submitted to ${reviewable_provider_name}, has not been accepted. Your registration was returned as a draft so you can make the appropriate edits for resubmission. Click here to view your draft. % else: - Your submission ${reviewable_title}, submitted to ${reviewable.provider.name}, has been accepted by the moderator. + Your submission ${reviewable_title}, submitted to ${reviewable_provider_name}, has been accepted by the moderator. % endif

    % if notify_comment: @@ -18,7 +18,7 @@ % endif % else: % if workflow == 'pre-moderation': - Your submission ${reviewable_title}, submitted to ${reviewable.provider.name} has + Your submission ${reviewable_title}, submitted to ${reviewable_provider_name} has % if is_rejected: not been accepted. Contributors with admin permissions may edit the ${document_type} and resubmit, at which time it will return to a pending state and be reviewed by a moderator. @@ -26,7 +26,7 @@ been accepted by the moderator and is now discoverable to others. % endif % elif workflow == 'post-moderation': - Your submission ${reviewable_title}, submitted to ${reviewable.provider.name} has + Your submission ${reviewable_title}, submitted to ${reviewable_provider_name} has % if is_rejected: not been accepted and will be made private and not discoverable by others. Contributors with admin permissions may edit the ${document_type} and contact @@ -66,17 +66,17 @@ - + twitter - + facebook - + LinkedIn @@ -98,10 +98,10 @@

    % endif % endif -

    Learn more about ${reviewable.provider.name} or OSF.

    +

    Learn more about ${reviewable_provider_name} or OSF.


    Sincerely,
    - The ${reviewable.provider.name} and OSF teams + The ${reviewable_provider_name} and OSF teams

    diff --git a/website/templates/emails/storage_cap_exceeded_announcement.html.mako b/website/templates/emails/storage_cap_exceeded_announcement.html.mako index fe007e896da..5360012f11f 100644 --- a/website/templates/emails/storage_cap_exceeded_announcement.html.mako +++ b/website/templates/emails/storage_cap_exceeded_announcement.html.mako @@ -6,7 +6,7 @@ <%! from website import settings %> - Hi ${user.given_name or user.fullname},
    + Hi ${user_fullname},

    Thank you for storing your research materials on OSF. We have updated the OSF Storage capacity to 5 GB for private content and 50 GB for public content. None of your current files stored on OSF Storage will be affected, but after November 3, 2020 projects exceeding capacity will no longer accept new file uploads.
    diff --git a/website/templates/emails/support_request.html.mako b/website/templates/emails/support_request.html.mako index 5d2ad1794f6..e16bca8f346 100644 --- a/website/templates/emails/support_request.html.mako +++ b/website/templates/emails/support_request.html.mako @@ -3,11 +3,11 @@ <%def name="content()"> - ID: ${user._id}
    + ID: ${user__id}

    - Profile: ${user.absolute_url}
    + Profile: ${user_absolute_url}

    - Primary Email: ${user.username}
    + Primary Email: ${user_username}
    From ed5342c280d1652b8e69b9c537fe692ae66257ed Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 22 Jul 2025 15:20:59 -0400 Subject: [PATCH 104/176] add more provider notificationtypes --- api/providers/serializers.py | 24 ++++----- .../test_preprint_provider_moderator_list.py | 49 ++++++++++++------- notifications.yaml | 10 +++- 3 files changed, 51 insertions(+), 32 deletions(-) diff --git a/api/providers/serializers.py b/api/providers/serializers.py index ef89388e281..b10f8290bd8 100644 --- a/api/providers/serializers.py +++ b/api/providers/serializers.py @@ -10,11 +10,10 @@ from api.preprints.serializers import PreprintProviderRelationshipField from api.providers.workflows import Workflows from api.base.metrics import MetricsSerializerMixin -from osf.models import CitationStyle +from osf.models import CitationStyle, NotificationType from osf.models.user import Email, OSFUser from osf.models.validators import validate_email from osf.utils.permissions import REVIEW_GROUPS, ADMIN -from website import mails from website.settings import DOMAIN @@ -313,12 +312,11 @@ def create(self, validated_data): address = validated_data.pop('email', '') provider = self.context['provider'] context = { - 'referrer': auth.user, + 'referrer_fullname': auth.user.fullname, } if user_id and address: raise ValidationError('Cannot specify both "id" and "email".') - user = None if user_id: user = OSFUser.load(user_id) elif address: @@ -344,15 +342,15 @@ def create(self, validated_data): if not user: raise ValidationError('Unable to find specified user.') - context['user'] = user - context['provider'] = provider + context['user_fullname'] = user.fullname + context['provider_name'] = provider.name if bool(get_perms(user, provider)): raise ValidationError('Specified user is already a moderator.') if 'claim_url' in context: - template = mails.CONFIRM_EMAIL_MODERATION(provider) + template = NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION else: - template = mails.MODERATOR_ADDED(provider) + template = NotificationType.Type.PROVIDER_MODERATOR_ADDED perm_group = validated_data.pop('permission_group', '') if perm_group not in REVIEW_GROUPS: @@ -364,10 +362,12 @@ def create(self, validated_data): provider.add_to_group(user, perm_group) setattr(user, 'permission_group', perm_group) # Allows reserialization - mails.send_mail( - user.username, - template, - **context, + print(template, context) + NotificationType.objects.get( + name=template, + ).emit( + user=user, + event_context=context, ) return user diff --git a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py index 8998d2a85ca..ac075faddeb 100644 --- a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py +++ b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py @@ -1,11 +1,13 @@ import pytest from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from osf_tests.factories import ( AuthUserFactory, PreprintProviderFactory, ) from osf.utils import permissions +from tests.utils import capture_notifications @pytest.mark.usefixtures('mock_send_grid') @@ -81,51 +83,60 @@ def test_list_post_unauthorized(self, mock_send_grid, app, url, nonmoderator, mo assert mock_send_grid.call_count == 0 - def test_list_post_admin_success_existing_user(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_success_existing_user(self, app, url, nonmoderator, moderator, admin): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_send_grid.call_count == 1 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - def test_list_post_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin, provider): + def test_list_post_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin): payload = self.create_payload(user_id=moderator._id, permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + assert not notifications assert res.status_code == 400 - assert mock_send_grid.call_count == 0 - def test_list_post_admin_failure_unreg_moderator(self, mock_send_grid, app, url, moderator, nonmoderator, admin, provider): + def test_list_post_admin_failure_unreg_moderator(self, app, url, moderator, nonmoderator, admin): unreg_user = {'full_name': 'Son Goku', 'email': 'goku@dragonball.org'} # test_user_with_no_moderator_admin_permissions payload = self.create_payload(permission_group='moderator', **unreg_user) res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) assert res.status_code == 403 - assert mock_send_grid.call_count == 0 # test_user_with_moderator_admin_permissions payload = self.create_payload(permission_group='moderator', **unreg_user) - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 - assert mock_send_grid.call_count == 1 - assert mock_send_grid.call_args[1]['to_addr'] == unreg_user['email'] + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'].username == unreg_user['email'] - def test_list_post_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): + def test_list_post_admin_failure_invalid_group(self, app, url, nonmoderator, moderator, admin): payload = self.create_payload(user_id=nonmoderator._id, permission_group='citizen') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) assert res.status_code == 400 - assert mock_send_grid.call_count == 0 - - def test_list_post_admin_success_email(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): - payload = self.create_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth) + assert not notifications + + def test_list_post_admin_success_email(self, app, url, nonmoderator, moderator, admin): + payload = self.create_payload( + email='somenewuser@gmail.com', + full_name='Some User', + permission_group='moderator' + ) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) + assert len(notifications) == 1 assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_send_grid.call_count == 1 def test_list_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Alice Alisdottir' diff --git a/notifications.yaml b/notifications.yaml index 6bd704f69cc..0a18afdf681 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -132,8 +132,16 @@ notification_types: template: 'website/templates/emails/contributor_added_preprints.html.mako' - name: provider_reviews_submission_confirmation __docs__: ... - object_content_type_model_name: preprint + object_content_type_model_name: abstractprovider template: 'website/templates/emails/reviews_submission_confirmation.html.mako' + - name: provider_confirm_email_moderation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/confirm_moderation.html.mako' + - name: provider_moderator_added + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/moderator_added.html.mako' #### NODE - name: node_file_updated From b3ca1b8f67746b986a570edd6329aacefb341bd7 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 22 Jul 2025 16:47:04 -0400 Subject: [PATCH 105/176] fix up user claim message notification tests --- osf/models/notification.py | 6 +- osf/models/notification_subscription.py | 2 +- osf/models/notification_type.py | 12 +- tests/test_adding_contributor_views.py | 460 +--------------------- tests/test_claim_views.py | 491 ++++++++++++++++++++++++ website/project/views/contributor.py | 6 +- 6 files changed, 511 insertions(+), 466 deletions(-) create mode 100644 tests/test_claim_views.py diff --git a/osf/models/notification.py b/osf/models/notification.py index 5d339150111..4294eb797eb 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -18,13 +18,13 @@ class Notification(models.Model): seen = models.DateTimeField(null=True, blank=True) created = models.DateTimeField(auto_now_add=True) - def send(self, protocol_type='email', recipient=None): + def send(self, protocol_type='email', destination_address=None): if not settings.USE_EMAIL: return if not protocol_type == 'email': raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') - recipient_address = getattr(recipient, 'username', None) or self.subscription.user.username + recipient_address = destination_address or self.subscription.user.username if protocol_type == 'email' and settings.DEV_MODE and settings.ENABLE_TEST_EMAIL: email.send_email_over_smtp( @@ -42,7 +42,7 @@ def send(self, protocol_type='email', recipient=None): ) elif protocol_type == 'email': email.send_email_with_send_grid( - getattr(recipient, 'username', None) or self.subscription.user, + self.subscription.user, self.subscription.notification_type, self.event_context ) diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index a1c9467b50e..41b88ba9ea2 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -52,7 +52,7 @@ class Meta: verbose_name = 'Notification Subscription' verbose_name_plural = 'Notification Subscriptions' - def emit(self, user, subscribed_object=None, event_context=None): + def emit(self, event_context=None): """Emit a notification to a user by creating Notification and NotificationSubscription objects. Args: diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 14bfa97eac2..19fee3e10e8 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -195,11 +195,19 @@ def desk_types(cls): help_text='Template used to render the subject line of email. Supports Django template syntax.' ) - def emit(self, user, subscribed_object=None, message_frequency='instantly', event_context=None): + def emit( + self, + user, + destination_address=None, + subscribed_object=None, + message_frequency='instantly', + event_context=None + ): """Emit a notification to a user by creating Notification and NotificationSubscription objects. Args: user (OSFUser): The recipient of the notification. + destination_address (optional): For use in case where user's maybe using alternate email addresses. subscribed_object (optional): The object the subscription is related to. message_frequency (optional): Initializing message frequency. event_context (dict, optional): Context for rendering the notification template. @@ -216,7 +224,7 @@ def emit(self, user, subscribed_object=None, message_frequency='instantly', even Notification.objects.create( subscription=subscription, event_context=event_context - ).send() + ).send(destination_address=destination_address) def add_user_to_subscription(self, user, *args, **kwargs): """ diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 003a8f886ad..6bbd70681b6 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -7,26 +7,18 @@ import pytest from django.core.exceptions import ValidationError -from flask import g from pytest import approx from rest_framework import status as http_status from framework import auth -from framework.auth import Auth, authenticate, cas -from framework.auth.utils import impute_names_model +from framework.auth import Auth from framework.exceptions import HTTPError -from framework.flask import redirect -from osf.models import ( - OSFUser, - Tag, - NodeRelation, -) +from osf.models import NodeRelation from osf.utils import permissions from osf_tests.factories import ( fake_email, AuthUserFactory, NodeFactory, - PreprintFactory, ProjectFactory, RegistrationProviderFactory, UserFactory, @@ -38,22 +30,16 @@ get_default_metaschema, OsfTestCase, ) -from tests.test_cas_authentication import generate_external_user_with_resp -from website import settings from website.profile.utils import add_contributor_json, serialize_unregistered from website.project.signals import contributor_added from website.project.views.contributor import ( deserialize_contributors, notify_added_contributor, send_claim_email, - send_claim_registered_email, ) -from website.util.metrics import OsfSourceTags, OsfClaimedTags, provider_source_tag, provider_claimed_tag from conftest import start_mock_notification_send @pytest.mark.enable_implicit_clean -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) class TestAddingContributorViews(OsfTestCase): def setUp(self): @@ -433,8 +419,6 @@ def test_add_contribs_to_multiple_nodes(self): assert child.contributors.count() == n_contributors_pre + len(payload['users']) -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) class TestUserInviteViews(OsfTestCase): def setUp(self): @@ -443,8 +427,6 @@ def setUp(self): self.project = ProjectFactory(creator=self.user) self.invite_url = f'/api/v1/project/{self.project._primary_key}/invite_contributor/' - self.mock_notification_send = start_mock_notification_send(self) - def test_invite_contributor_post_if_not_in_db(self): name, email = fake.name(), fake_email() res = self.app.post( @@ -555,441 +537,3 @@ def test_send_claim_email_before_throttle_expires(self): assert not self.mock_notification_send.called -@pytest.mark.enable_implicit_clean -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) -class TestClaimViews(OsfTestCase): - - def setUp(self): - super().setUp() - self.referrer = AuthUserFactory() - self.project = ProjectFactory(creator=self.referrer, is_public=True) - self.project_with_source_tag = ProjectFactory(creator=self.referrer, is_public=True) - self.preprint_with_source_tag = PreprintFactory(creator=self.referrer, is_public=True) - osf_source_tag, created = Tag.all_tags.get_or_create(name=OsfSourceTags.Osf.value, system=True) - preprint_source_tag, created = Tag.all_tags.get_or_create(name=provider_source_tag(self.preprint_with_source_tag.provider._id, 'preprint'), system=True) - self.project_with_source_tag.add_system_tag(osf_source_tag.name) - self.preprint_with_source_tag.add_system_tag(preprint_source_tag.name) - self.given_name = fake.name() - self.given_email = fake_email() - self.project_with_source_tag.add_unregistered_contributor( - fullname=self.given_name, - email=self.given_email, - auth=Auth(user=self.referrer) - ) - self.preprint_with_source_tag.add_unregistered_contributor( - fullname=self.given_name, - email=self.given_email, - auth=Auth(user=self.referrer) - ) - self.user = self.project.add_unregistered_contributor( - fullname=self.given_name, - email=self.given_email, - auth=Auth(user=self.referrer) - ) - self.project.save() - - self.mock_notification_send = start_mock_notification_send(self) - - @mock.patch('website.project.views.contributor.send_claim_email') - def test_claim_user_already_registered_redirects_to_claim_user_registered(self, claim_email): - name = fake.name() - email = fake_email() - - # project contributor adds an unregistered contributor (without an email) on public project - unregistered_user = self.project.add_unregistered_contributor( - fullname=name, - email=None, - auth=Auth(user=self.referrer) - ) - assert unregistered_user in self.project.contributors - - # unregistered user comes along and claims themselves on the public project, entering an email - invite_url = self.project.api_url_for('claim_user_post', uid='undefined') - self.app.post(invite_url, json={ - 'pk': unregistered_user._primary_key, - 'value': email - }) - assert claim_email.call_count == 1 - - # set unregistered record email since we are mocking send_claim_email() - unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) - unclaimed_record.update({'email': email}) - unregistered_user.save() - - # unregistered user then goes and makes an account with same email, before claiming themselves as contributor - UserFactory(username=email, fullname=name) - - # claim link for the now registered email is accessed while not logged in - token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] - claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' - res = self.app.get(claim_url) - - # should redirect to 'claim_user_registered' view - claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' - assert res.status_code == 302 - assert claim_registered_url in res.headers.get('Location') - - @mock.patch('website.project.views.contributor.send_claim_email') - def test_claim_user_already_registered_secondary_email_redirects_to_claim_user_registered(self, claim_email): - name = fake.name() - email = fake_email() - secondary_email = fake_email() - - # project contributor adds an unregistered contributor (without an email) on public project - unregistered_user = self.project.add_unregistered_contributor( - fullname=name, - email=None, - auth=Auth(user=self.referrer) - ) - assert unregistered_user in self.project.contributors - - # unregistered user comes along and claims themselves on the public project, entering an email - invite_url = self.project.api_url_for('claim_user_post', uid='undefined') - self.app.post(invite_url, json={ - 'pk': unregistered_user._primary_key, - 'value': secondary_email - }) - assert claim_email.call_count == 1 - - # set unregistered record email since we are mocking send_claim_email() - unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) - unclaimed_record.update({'email': secondary_email}) - unregistered_user.save() - - # unregistered user then goes and makes an account with same email, before claiming themselves as contributor - registered_user = UserFactory(username=email, fullname=name) - registered_user.emails.create(address=secondary_email) - registered_user.save() - - # claim link for the now registered email is accessed while not logged in - token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] - claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' - res = self.app.get(claim_url) - - # should redirect to 'claim_user_registered' view - claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' - assert res.status_code == 302 - assert claim_registered_url in res.headers.get('Location') - - def test_claim_user_invited_with_no_email_posts_to_claim_form(self): - given_name = fake.name() - invited_user = self.project.add_unregistered_contributor( - fullname=given_name, - email=None, - auth=Auth(user=self.referrer) - ) - self.project.save() - - url = invited_user.get_claim_url(self.project._primary_key) - res = self.app.post(url, data={ - 'password': 'bohemianrhap', - 'password2': 'bohemianrhap' - }) - assert res.status_code == 400 - - def test_claim_user_post_with_registered_user_id(self): - # registered user who is attempting to claim the unclaimed contributor - reg_user = UserFactory() - payload = { - # pk of unreg user record - 'pk': self.user._primary_key, - 'claimerId': reg_user._primary_key - } - url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' - res = self.app.post(url, json=payload) - - # mail was sent - assert self.mock_notification_send.call_count == 2 - # ... to the correct address - referrer_call = self.mock_notification_send.call_args_list[0] - claimer_call = self.mock_notification_send.call_args_list[1] - - assert referrer_call[1]['to_addr'] == self.referrer.email - assert claimer_call[1]['to_addr'] == reg_user.email - - # view returns the correct JSON - assert res.json == { - 'status': 'success', - 'email': reg_user.username, - 'fullname': self.given_name, - } - - def test_send_claim_registered_email(self): - reg_user = UserFactory() - send_claim_registered_email( - claimer=reg_user, - unclaimed_user=self.user, - node=self.project - ) - assert self.mock_notification_send.call_count == 2 - first_call_args = self.mock_notification_send.call_args_list[0][1] - print(first_call_args) - second_call_args = self.mock_notification_send.call_args_list[1][1] - print(second_call_args) - - assert second_call_args['to_addr'] == reg_user.email - - def test_send_claim_registered_email_before_throttle_expires(self): - reg_user = UserFactory() - send_claim_registered_email( - claimer=reg_user, - unclaimed_user=self.user, - node=self.project, - ) - self.mock_notification_send.reset_mock() - # second call raises error because it was called before throttle period - with pytest.raises(HTTPError): - send_claim_registered_email( - claimer=reg_user, - unclaimed_user=self.user, - node=self.project, - ) - assert not self.mock_notification_send.called - - @mock.patch('website.project.views.contributor.send_claim_registered_email') - def test_claim_user_post_with_email_already_registered_sends_correct_email( - self, send_claim_registered_email): - reg_user = UserFactory() - payload = { - 'value': reg_user.username, - 'pk': self.user._primary_key - } - url = self.project.api_url_for('claim_user_post', uid=self.user._id) - self.app.post(url, json=payload) - assert send_claim_registered_email.called - - def test_user_with_removed_unclaimed_url_claiming(self): - """ Tests that when an unclaimed user is removed from a project, the - unregistered user object does not retain the token. - """ - self.project.remove_contributor(self.user, Auth(user=self.referrer)) - - assert self.project._primary_key not in self.user.unclaimed_records.keys() - - def test_user_with_claim_url_cannot_claim_twice(self): - """ Tests that when an unclaimed user is replaced on a project with a - claimed user, the unregistered user object does not retain the token. - """ - reg_user = AuthUserFactory() - - self.project.replace_contributor(self.user, reg_user) - - assert self.project._primary_key not in self.user.unclaimed_records.keys() - - def test_claim_user_form_redirects_to_password_confirm_page_if_user_is_logged_in(self): - reg_user = AuthUserFactory() - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.get(url, auth=reg_user.auth) - assert res.status_code == 302 - res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) - token = self.user.get_unclaimed_record(self.project._primary_key)['token'] - expected = self.project.web_url_for( - 'claim_user_registered', - uid=self.user._id, - token=token, - ) - assert res.request.path == expected - - @mock.patch('framework.auth.cas.make_response_from_ticket') - def test_claim_user_when_user_is_registered_with_orcid(self, mock_response_from_ticket): - # TODO: check in qa url encoding - token = self.user.get_unclaimed_record(self.project._primary_key)['token'] - url = f'/user/{self.user._id}/{self.project._id}/claim/verify/{token}/' - # logged out user gets redirected to cas login - res1 = self.app.get(url) - assert res1.status_code == 302 - res = self.app.resolve_redirect(self.app.get(url)) - service_url = f'http://localhost{url}' - expected = cas.get_logout_url(service_url=cas.get_login_url(service_url=service_url)) - assert res1.location == expected - - # user logged in with orcid automatically becomes a contributor - orcid_user, validated_credentials, cas_resp = generate_external_user_with_resp(url) - mock_response_from_ticket.return_value = authenticate( - orcid_user, - redirect(url) - ) - orcid_user.set_unusable_password() - orcid_user.save() - - # The request to OSF with CAS service ticket must not have cookie and/or auth. - service_ticket = fake.md5() - url_with_service_ticket = f'{url}?ticket={service_ticket}' - res = self.app.get(url_with_service_ticket) - # The response of this request is expected to be a 302 with `Location`. - # And the redirect URL must equal to the originial service URL - assert res.status_code == 302 - redirect_url = res.headers['Location'] - assert redirect_url == url - # The response of this request is expected have the `Set-Cookie` header with OSF cookie. - # And the cookie must belong to the ORCiD user. - raw_set_cookie = res.headers['Set-Cookie'] - assert raw_set_cookie - simple_cookie = SimpleCookie() - simple_cookie.load(raw_set_cookie) - cookie_dict = {key: value.value for key, value in simple_cookie.items()} - osf_cookie = cookie_dict.get(settings.COOKIE_NAME, None) - assert osf_cookie is not None - user = OSFUser.from_cookie(osf_cookie) - assert user._id == orcid_user._id - # The ORCiD user must be different from the unregistered user created when the contributor was added - assert user._id != self.user._id - - # Must clear the Flask g context manual and set the OSF cookie to context - g.current_session = None - self.app.set_cookie(settings.COOKIE_NAME, osf_cookie) - res = self.app.resolve_redirect(res) - assert res.status_code == 302 - assert self.project.is_contributor(orcid_user) - assert self.project.url in res.headers.get('Location') - - def test_get_valid_form(self): - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.get(url, follow_redirects=True) - assert res.status_code == 200 - - def test_invalid_claim_form_raise_400(self): - uid = self.user._primary_key - pid = self.project._primary_key - url = f'/user/{uid}/{pid}/claim/?token=badtoken' - res = self.app.get(url, follow_redirects=True) - assert res.status_code == 400 - - @mock.patch('osf.models.OSFUser.update_search_nodes') - def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes): - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.post(url, data={ - 'username': self.user.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - - assert res.status_code == 302 - location = res.headers.get('Location') - assert 'login?service=' in location - assert 'username' in location - assert 'verification_key' in location - assert self.project._primary_key in location - - self.user.reload() - assert self.user.is_registered - assert self.user.is_active - assert self.project._primary_key not in self.user.unclaimed_records - - @mock.patch('osf.models.OSFUser.update_search_nodes') - def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_search_nodes): - # user has multiple unclaimed records - p2 = ProjectFactory(creator=self.referrer) - self.user.add_unclaimed_record(p2, referrer=self.referrer, - given_name=fake.name()) - self.user.save() - assert len(self.user.unclaimed_records.keys()) > 1 # sanity check - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.post(url, data={ - 'username': self.given_email, - 'password': 'bohemianrhap', - 'password2': 'bohemianrhap' - }) - self.user.reload() - assert self.user.unclaimed_records == {} - - @mock.patch('osf.models.OSFUser.update_search_nodes') - def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_search_nodes): - # User is created with a full name - original_name = fake.name() - unreg = UnregUserFactory(fullname=original_name) - # User invited with a different name - different_name = fake.name() - new_user = self.project.add_unregistered_contributor( - email=unreg.username, - fullname=different_name, - auth=Auth(self.project.creator), - ) - self.project.save() - # Goes to claim url - claim_url = new_user.get_claim_url(self.project._id) - self.app.post(claim_url, data={ - 'username': unreg.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - unreg.reload() - # Full name was set correctly - assert unreg.fullname == different_name - # CSL names were set correctly - parsed_name = impute_names_model(different_name) - assert unreg.given_name == parsed_name['given_name'] - assert unreg.family_name == parsed_name['family_name'] - - def test_claim_user_post_returns_fullname(self): - url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' - res = self.app.post( - url, - auth=self.referrer.auth, - json={ - 'value': self.given_email, - 'pk': self.user._primary_key - }, - ) - assert res.json['fullname'] == self.given_name - assert self.mock_notification_send.called - - def test_claim_user_post_if_email_is_different_from_given_email(self): - email = fake_email() # email that is different from the one the referrer gave - url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' - self.app.post(url, json={'value': email, 'pk': self.user._primary_key} ) - assert self.mock_notification_send.called - assert self.mock_notification_send.call_count == 2 - call_to_invited = self.mock_notification_send.mock_calls[0] - call_to_invited.assert_called_with(to_addr=email) - call_to_referrer = self.mock_notification_send.mock_calls[1] - call_to_referrer.assert_called_with(to_addr=self.given_email) - - def test_claim_url_with_bad_token_returns_400(self): - url = self.project.web_url_for( - 'claim_user_registered', - uid=self.user._id, - token='badtoken', - ) - res = self.app.get(url, auth=self.referrer.auth) - assert res.status_code == 400 - - def test_cannot_claim_user_with_user_who_is_already_contributor(self): - # user who is already a contirbutor to the project - contrib = AuthUserFactory() - self.project.add_contributor(contrib, auth=Auth(self.project.creator)) - self.project.save() - # Claiming user goes to claim url, but contrib is already logged in - url = self.user.get_claim_url(self.project._primary_key) - res = self.app.get( - url, - auth=contrib.auth, follow_redirects=True) - # Response is a 400 - assert res.status_code == 400 - - def test_claim_user_with_project_id_adds_corresponding_claimed_tag_to_user(self): - assert OsfClaimedTags.Osf.value not in self.user.system_tags - url = self.user.get_claim_url(self.project_with_source_tag._primary_key) - res = self.app.post(url, data={ - 'username': self.user.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - - assert res.status_code == 302 - self.user.reload() - assert OsfClaimedTags.Osf.value in self.user.system_tags - - def test_claim_user_with_preprint_id_adds_corresponding_claimed_tag_to_user(self): - assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') not in self.user.system_tags - url = self.user.get_claim_url(self.preprint_with_source_tag._primary_key) - res = self.app.post(url, data={ - 'username': self.user.username, - 'password': 'killerqueen', - 'password2': 'killerqueen' - }) - - assert res.status_code == 302 - self.user.reload() - assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') in self.user.system_tags diff --git a/tests/test_claim_views.py b/tests/test_claim_views.py new file mode 100644 index 00000000000..025aa1a53eb --- /dev/null +++ b/tests/test_claim_views.py @@ -0,0 +1,491 @@ +import pytest +from flask import g + +from http.cookies import SimpleCookie +from unittest import mock + +from framework.auth import Auth, authenticate, cas +from framework.auth.utils import impute_names_model +from framework.exceptions import HTTPError +from framework.flask import redirect +from osf.models import ( + OSFUser, + Tag, NotificationType, +) +from osf_tests.factories import ( + fake_email, + AuthUserFactory, + PreprintFactory, + ProjectFactory, + UserFactory, + UnregUserFactory, +) +from tests.base import ( + fake, + OsfTestCase, +) +from tests.test_cas_authentication import generate_external_user_with_resp +from tests.utils import capture_notifications +from website import settings +from website.project.views.contributor import send_claim_registered_email +from website.util.metrics import ( + OsfSourceTags, + OsfClaimedTags, + provider_source_tag, + provider_claimed_tag +) + + +@pytest.mark.enable_implicit_clean +class TestClaimViews(OsfTestCase): + + def setUp(self): + super().setUp() + self.referrer = AuthUserFactory() + self.project = ProjectFactory(creator=self.referrer, is_public=True) + self.project_with_source_tag = ProjectFactory(creator=self.referrer, is_public=True) + self.preprint_with_source_tag = PreprintFactory(creator=self.referrer, is_public=True) + osf_source_tag, created = Tag.all_tags.get_or_create(name=OsfSourceTags.Osf.value, system=True) + preprint_source_tag, created = Tag.all_tags.get_or_create(name=provider_source_tag(self.preprint_with_source_tag.provider._id, 'preprint'), system=True) + self.project_with_source_tag.add_system_tag(osf_source_tag.name) + self.preprint_with_source_tag.add_system_tag(preprint_source_tag.name) + self.given_name = fake.name() + self.given_email = fake_email() + self.project_with_source_tag.add_unregistered_contributor( + fullname=self.given_name, + email=self.given_email, + auth=Auth(user=self.referrer) + ) + self.preprint_with_source_tag.add_unregistered_contributor( + fullname=self.given_name, + email=self.given_email, + auth=Auth(user=self.referrer) + ) + self.user = self.project.add_unregistered_contributor( + fullname=self.given_name, + email=self.given_email, + auth=Auth(user=self.referrer) + ) + self.project.save() + + def test_claim_user_already_registered_redirects_to_claim_user_registered(self): + name = fake.name() + email = fake_email() + + # project contributor adds an unregistered contributor (without an email) on public project + unregistered_user = self.project.add_unregistered_contributor( + fullname=name, + email=None, + auth=Auth(user=self.referrer) + ) + assert unregistered_user in self.project.contributors + + # unregistered user comes along and claims themselves on the public project, entering an email + invite_url = self.project.api_url_for( + 'claim_user_post', + uid='undefined' + ) + with capture_notifications() as notifications: + self.app.post( + invite_url, + json={ + 'pk': unregistered_user._primary_key, + 'value': email + } + ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION + assert notifications[1]['type'] == NotificationType.Type.USER_FORWARD_INVITE + + # set unregistered record email since we are mocking send_claim_email() + unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) + unclaimed_record.update({'email': email}) + unregistered_user.save() + + # unregistered user then goes and makes an account with same email, before claiming themselves as contributor + UserFactory(username=email, fullname=name) + + # claim link for the now registered email is accessed while not logged in + token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] + claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' + res = self.app.get(claim_url) + + # should redirect to 'claim_user_registered' view + claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' + assert res.status_code == 302 + assert claim_registered_url in res.headers.get('Location') + + def test_claim_user_already_registered_secondary_email_redirects_to_claim_user_registered(self): + name = fake.name() + email = fake_email() + secondary_email = fake_email() + + # project contributor adds an unregistered contributor (without an email) on public project + unregistered_user = self.project.add_unregistered_contributor( + fullname=name, + email=None, + auth=Auth(user=self.referrer) + ) + assert unregistered_user in self.project.contributors + + # unregistered user comes along and claims themselves on the public project, entering an email + invite_url = self.project.api_url_for( + 'claim_user_post', + uid='undefined' + ) + with capture_notifications() as notifications: + self.app.post( + invite_url, + json={ + 'pk': unregistered_user._primary_key, + 'value': secondary_email + } + ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION + assert notifications[1]['type'] == NotificationType.Type.USER_FORWARD_INVITE + + # set unregistered record email since we are mocking send_claim_email() + unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key) + unclaimed_record.update({'email': secondary_email}) + unregistered_user.save() + + # unregistered user then goes and makes an account with same email, before claiming themselves as contributor + registered_user = UserFactory(username=email, fullname=name) + registered_user.emails.create(address=secondary_email) + registered_user.save() + + # claim link for the now registered email is accessed while not logged in + token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token'] + claim_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/?token={token}' + res = self.app.get(claim_url) + + # should redirect to 'claim_user_registered' view + claim_registered_url = f'/user/{unregistered_user._id}/{self.project._id}/claim/verify/{token}/' + assert res.status_code == 302 + assert claim_registered_url in res.headers.get('Location') + + def test_claim_user_invited_with_no_email_posts_to_claim_form(self): + given_name = fake.name() + invited_user = self.project.add_unregistered_contributor( + fullname=given_name, + email=None, + auth=Auth(user=self.referrer) + ) + self.project.save() + + url = invited_user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'password': 'bohemianrhap', + 'password2': 'bohemianrhap' + }) + assert res.status_code == 400 + + def test_claim_user_post_with_registered_user_id(self): + # registered user who is attempting to claim the unclaimed contributor + reg_user = UserFactory() + with capture_notifications() as notifications: + res = self.app.post( + f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/', + json={ + # pk of unreg user record + 'pk': self.user._primary_key, + 'claimerId': reg_user._primary_key + } + ) + + # mail was sent + assert len(notifications) == 2 + # ... to the correct address + assert notifications[0]['kwargs']['user'] == self.referrer + assert notifications[1]['kwargs']['user'] == reg_user + + # view returns the correct JSON + assert res.json == { + 'status': 'success', + 'email': reg_user.username, + 'fullname': self.given_name, + } + + def test_send_claim_registered_email(self): + reg_user = UserFactory() + with capture_notifications() as notifications: + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project + ) + assert len(notifications) == 2 + # ... to the correct address + assert notifications[0]['kwargs']['user'] == self.referrer + assert notifications[1]['kwargs']['user'] == reg_user + + def test_send_claim_registered_email_before_throttle_expires(self): + reg_user = UserFactory() + with capture_notifications() as notifications: + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project, + ) + # second call raises error because it was called before throttle period + with pytest.raises(HTTPError): + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project, + ) + assert not notifications + + @mock.patch('website.project.views.contributor.send_claim_registered_email') + def test_claim_user_post_with_email_already_registered_sends_correct_email( + self, send_claim_registered_email): + reg_user = UserFactory() + payload = { + 'value': reg_user.username, + 'pk': self.user._primary_key + } + url = self.project.api_url_for('claim_user_post', uid=self.user._id) + self.app.post(url, json=payload) + assert send_claim_registered_email.called + + def test_user_with_removed_unclaimed_url_claiming(self): + """ Tests that when an unclaimed user is removed from a project, the + unregistered user object does not retain the token. + """ + self.project.remove_contributor(self.user, Auth(user=self.referrer)) + + assert self.project._primary_key not in self.user.unclaimed_records.keys() + + def test_user_with_claim_url_cannot_claim_twice(self): + """ Tests that when an unclaimed user is replaced on a project with a + claimed user, the unregistered user object does not retain the token. + """ + reg_user = AuthUserFactory() + + self.project.replace_contributor(self.user, reg_user) + + assert self.project._primary_key not in self.user.unclaimed_records.keys() + + def test_claim_user_form_redirects_to_password_confirm_page_if_user_is_logged_in(self): + reg_user = AuthUserFactory() + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get(url, auth=reg_user.auth) + assert res.status_code == 302 + res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) + token = self.user.get_unclaimed_record(self.project._primary_key)['token'] + expected = self.project.web_url_for( + 'claim_user_registered', + uid=self.user._id, + token=token, + ) + assert res.request.path == expected + + @mock.patch('framework.auth.cas.make_response_from_ticket') + def test_claim_user_when_user_is_registered_with_orcid(self, mock_response_from_ticket): + # TODO: check in qa url encoding + token = self.user.get_unclaimed_record(self.project._primary_key)['token'] + url = f'/user/{self.user._id}/{self.project._id}/claim/verify/{token}/' + # logged out user gets redirected to cas login + res1 = self.app.get(url) + assert res1.status_code == 302 + res = self.app.resolve_redirect(self.app.get(url)) + service_url = f'http://localhost{url}' + expected = cas.get_logout_url(service_url=cas.get_login_url(service_url=service_url)) + assert res1.location == expected + + # user logged in with orcid automatically becomes a contributor + orcid_user, validated_credentials, cas_resp = generate_external_user_with_resp(url) + mock_response_from_ticket.return_value = authenticate( + orcid_user, + redirect(url) + ) + orcid_user.set_unusable_password() + orcid_user.save() + + # The request to OSF with CAS service ticket must not have cookie and/or auth. + service_ticket = fake.md5() + url_with_service_ticket = f'{url}?ticket={service_ticket}' + res = self.app.get(url_with_service_ticket) + # The response of this request is expected to be a 302 with `Location`. + # And the redirect URL must equal to the originial service URL + assert res.status_code == 302 + redirect_url = res.headers['Location'] + assert redirect_url == url + # The response of this request is expected have the `Set-Cookie` header with OSF cookie. + # And the cookie must belong to the ORCiD user. + raw_set_cookie = res.headers['Set-Cookie'] + assert raw_set_cookie + simple_cookie = SimpleCookie() + simple_cookie.load(raw_set_cookie) + cookie_dict = {key: value.value for key, value in simple_cookie.items()} + osf_cookie = cookie_dict.get(settings.COOKIE_NAME, None) + assert osf_cookie is not None + user = OSFUser.from_cookie(osf_cookie) + assert user._id == orcid_user._id + # The ORCiD user must be different from the unregistered user created when the contributor was added + assert user._id != self.user._id + + # Must clear the Flask g context manual and set the OSF cookie to context + g.current_session = None + self.app.set_cookie(settings.COOKIE_NAME, osf_cookie) + res = self.app.resolve_redirect(res) + assert res.status_code == 302 + assert self.project.is_contributor(orcid_user) + assert self.project.url in res.headers.get('Location') + + def test_get_valid_form(self): + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get(url, follow_redirects=True) + assert res.status_code == 200 + + def test_invalid_claim_form_raise_400(self): + uid = self.user._primary_key + pid = self.project._primary_key + url = f'/user/{uid}/{pid}/claim/?token=badtoken' + res = self.app.get(url, follow_redirects=True) + assert res.status_code == 400 + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes): + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + location = res.headers.get('Location') + assert 'login?service=' in location + assert 'username' in location + assert 'verification_key' in location + assert self.project._primary_key in location + + self.user.reload() + assert self.user.is_registered + assert self.user.is_active + assert self.project._primary_key not in self.user.unclaimed_records + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_search_nodes): + # user has multiple unclaimed records + p2 = ProjectFactory(creator=self.referrer) + self.user.add_unclaimed_record(p2, referrer=self.referrer, + given_name=fake.name()) + self.user.save() + assert len(self.user.unclaimed_records.keys()) > 1 # sanity check + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'username': self.given_email, + 'password': 'bohemianrhap', + 'password2': 'bohemianrhap' + }) + self.user.reload() + assert self.user.unclaimed_records == {} + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_search_nodes): + # User is created with a full name + original_name = fake.name() + unreg = UnregUserFactory(fullname=original_name) + # User invited with a different name + different_name = fake.name() + new_user = self.project.add_unregistered_contributor( + email=unreg.username, + fullname=different_name, + auth=Auth(self.project.creator), + ) + self.project.save() + # Goes to claim url + claim_url = new_user.get_claim_url(self.project._id) + self.app.post(claim_url, data={ + 'username': unreg.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + unreg.reload() + # Full name was set correctly + assert unreg.fullname == different_name + # CSL names were set correctly + parsed_name = impute_names_model(different_name) + assert unreg.given_name == parsed_name['given_name'] + assert unreg.family_name == parsed_name['family_name'] + + def test_claim_user_post_returns_fullname(self): + with capture_notifications() as notifications: + res = self.app.post( + f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/', + auth=self.referrer.auth, + json={ + 'value': self.given_email, + 'pk': self.user._primary_key + }, + ) + assert res.json['fullname'] == self.given_name + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_INVITE_DEFAULT + + def test_claim_user_post_if_email_is_different_from_given_email(self): + email = fake_email() # email that is different from the one the referrer gave + with capture_notifications() as notifications: + self.app.post( + f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/', + json={ + 'value': email, + 'pk': self.user._primary_key + } + ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION + assert notifications[0]['kwargs']['user'].username == self.given_email + assert notifications[1]['type'] == NotificationType.Type.USER_FORWARD_INVITE + assert notifications[1]['kwargs']['destination_address'] == email + + def test_claim_url_with_bad_token_returns_400(self): + url = self.project.web_url_for( + 'claim_user_registered', + uid=self.user._id, + token='badtoken', + ) + res = self.app.get(url, auth=self.referrer.auth) + assert res.status_code == 400 + + def test_cannot_claim_user_with_user_who_is_already_contributor(self): + # user who is already a contirbutor to the project + contrib = AuthUserFactory() + self.project.add_contributor(contrib, auth=Auth(self.project.creator)) + self.project.save() + # Claiming user goes to claim url, but contrib is already logged in + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get( + url, + auth=contrib.auth, follow_redirects=True) + # Response is a 400 + assert res.status_code == 400 + + def test_claim_user_with_project_id_adds_corresponding_claimed_tag_to_user(self): + assert OsfClaimedTags.Osf.value not in self.user.system_tags + url = self.user.get_claim_url(self.project_with_source_tag._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + self.user.reload() + assert OsfClaimedTags.Osf.value in self.user.system_tags + + def test_claim_user_with_preprint_id_adds_corresponding_claimed_tag_to_user(self): + assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') not in self.user.system_tags + url = self.user.get_claim_url(self.preprint_with_source_tag._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + self.user.reload() + assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') in self.user.system_tags diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 766ffb088e5..f3788f8b0c5 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -449,7 +449,8 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 event_context={ 'claim_url': claim_url, 'fullname': unclaimed_record['name'], - 'referrer': referrer.username, + 'referrer_username': referrer.username, + 'referrer_fullname': referrer.fullname, 'node': node.title, 'can_change_preferences': False, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, @@ -549,6 +550,7 @@ def send_claim_email( NotificationType.objects.get(name=notification_type).emit( user=referrer, + destination_address=email, event_context={ 'user': unclaimed_user.id, 'referrer': referrer.id, @@ -992,7 +994,7 @@ def claim_user_post(node, **kwargs): claimer = get_user(email=email) # registered user if claimer and claimer.is_registered: - send_claim_registered_email(claimer, unclaimed_user, node) + send_claim_registered_email(claimer, unclaimed_user, node, email) # unregistered user else: send_claim_email(email, unclaimed_user, node, notify=True) From 2ac9ef942fd84246fbd744d03bb9f2fc58fdf568 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 22 Jul 2025 17:02:20 -0400 Subject: [PATCH 106/176] add bulk registration upload notifications to tests --- api_tests/providers/tasks/test_bulk_upload.py | 66 +++++++++++++------ notifications.yaml | 16 +++++ 2 files changed, 62 insertions(+), 20 deletions(-) diff --git a/api_tests/providers/tasks/test_bulk_upload.py b/api_tests/providers/tasks/test_bulk_upload.py index 221861ea313..8caf27d89bf 100644 --- a/api_tests/providers/tasks/test_bulk_upload.py +++ b/api_tests/providers/tasks/test_bulk_upload.py @@ -4,12 +4,14 @@ from api.providers.tasks import bulk_create_registrations from osf.exceptions import RegistrationBulkCreationContributorError, RegistrationBulkCreationRowError -from osf.models import RegistrationBulkUploadJob, RegistrationBulkUploadRow, RegistrationProvider, RegistrationSchema +from osf.models import RegistrationBulkUploadJob, RegistrationBulkUploadRow, RegistrationProvider, RegistrationSchema, \ + NotificationType from osf.models.registration_bulk_upload_job import JobState from osf.models.registration_bulk_upload_row import RegistrationBulkUploadContributors from osf.utils.permissions import ADMIN, READ, WRITE from osf_tests.factories import InstitutionFactory, SubjectFactory, UserFactory +from tests.utils import capture_notifications class TestRegistrationBulkUploadContributors: @@ -317,10 +319,20 @@ def test_bulk_creation_dry_run(self, registration_row_1, registration_row_2, upl assert upload_job_done_full.state == JobState.PICKED_UP assert not upload_job_done_full.email_sent - def test_bulk_creation_done_full(self, mock_send_grid, registration_row_1, registration_row_2, - upload_job_done_full, provider, initiator, read_contributor, write_contributor): - - bulk_create_registrations(upload_job_done_full.id, dry_run=False) + def test_bulk_creation_done_full( + self, + registration_row_1, + registration_row_2, + upload_job_done_full, + provider, + initiator, + read_contributor, + write_contributor + ): + with capture_notifications() as notifications: + bulk_create_registrations(upload_job_done_full.id, dry_run=False) + notification_types = [notifications['type'] for notifications in notifications] + assert NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL in notification_types upload_job_done_full.reload() assert upload_job_done_full.state == JobState.DONE_FULL assert upload_job_done_full.email_sent @@ -335,13 +347,20 @@ def test_bulk_creation_done_full(self, mock_send_grid, registration_row_1, regis assert row.draft_registration.contributor_set.get(user=write_contributor).permission == WRITE assert row.draft_registration.contributor_set.get(user=read_contributor).permission == READ - mock_send_grid.assert_called() - - def test_bulk_creation_done_partial(self, mock_send_grid, registration_row_3, - registration_row_invalid_extra_bib_1, upload_job_done_partial, - provider, initiator, read_contributor, write_contributor): - - bulk_create_registrations(upload_job_done_partial.id, dry_run=False) + def test_bulk_creation_done_partial( + self, + registration_row_3, + registration_row_invalid_extra_bib_1, + upload_job_done_partial, + provider, + initiator, + read_contributor, + write_contributor + ): + with capture_notifications() as notifications: + bulk_create_registrations(upload_job_done_partial.id, dry_run=False) + notification_types = [notifications['type'] for notifications in notifications] + assert NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL in notification_types upload_job_done_partial.reload() assert upload_job_done_partial.state == JobState.DONE_PARTIAL assert upload_job_done_partial.email_sent @@ -355,16 +374,23 @@ def test_bulk_creation_done_partial(self, mock_send_grid, registration_row_3, assert registration_row_3.draft_registration.contributor_set.get(user=write_contributor).permission == WRITE assert registration_row_3.draft_registration.contributor_set.get(user=read_contributor).permission == READ - mock_send_grid.assert_called() + def test_bulk_creation_done_error( + self, + registration_row_invalid_extra_bib_2, + registration_row_invalid_affiliation, + upload_job_done_error, + provider, + initiator, + read_contributor, + write_contributor, + institution + ): + with capture_notifications() as notifications: + bulk_create_registrations(upload_job_done_error.id, dry_run=False) + notification_types = [notifications['type'] for notifications in notifications] + assert NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL in notification_types - def test_bulk_creation_done_error(self, mock_send_grid, registration_row_invalid_extra_bib_2, - registration_row_invalid_affiliation, upload_job_done_error, - provider, initiator, read_contributor, write_contributor, institution): - - bulk_create_registrations(upload_job_done_error.id, dry_run=False) upload_job_done_error.reload() assert upload_job_done_error.state == JobState.DONE_ERROR assert upload_job_done_error.email_sent assert len(RegistrationBulkUploadRow.objects.filter(upload__id=upload_job_done_error.id)) == 0 - - mock_send_grid.assert_called() diff --git a/notifications.yaml b/notifications.yaml index 0a18afdf681..61a146daffa 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -116,6 +116,22 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password_institution.html.mako' + - name: user_registration_bulk_upload_success_all + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/registration_bulk_upload_success_all.html.mako' + - name: user_registration_bulk_upload_failure_all + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/registration_bulk_upload_failure_all.html.mako' + - name: user_registration_bulk_upload_success_partial + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/registration_bulk_upload_success_partial.html.mako' + - name: user_registration_bulk_upload_failure_duplicates + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/registration_bulk_upload_failure_duplicates.html.mako' #### PROVIDER - name: provider_new_pending_submissions From 9815f1f8f6e352f8d3daffa3b2a7f3d06b7546f4 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 22 Jul 2025 17:14:42 -0400 Subject: [PATCH 107/176] fix issues with notifications when adding nodes to instituions --- .../test_institution_relationship_nodes.py | 71 +++++++++++-------- osf/utils/notifications.py | 1 + website/reviews/listeners.py | 3 +- 3 files changed, 44 insertions(+), 31 deletions(-) diff --git a/api_tests/institutions/views/test_institution_relationship_nodes.py b/api_tests/institutions/views/test_institution_relationship_nodes.py index c62d760710d..c025407ab78 100644 --- a/api_tests/institutions/views/test_institution_relationship_nodes.py +++ b/api_tests/institutions/views/test_institution_relationship_nodes.py @@ -1,6 +1,7 @@ import pytest from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from osf_tests.factories import ( RegistrationFactory, InstitutionFactory, @@ -8,6 +9,7 @@ NodeFactory, ) from osf.utils import permissions +from tests.utils import capture_notifications def make_payload(*node_ids): @@ -372,45 +374,56 @@ def test_add_non_node(self, app, user, institution, url_institution_nodes): assert res.status_code == 404 - def test_email_sent_on_affiliation_addition(self, app, user, institution, node_without_institution, - url_institution_nodes, mock_send_grid): + def test_email_sent_on_affiliation_addition( + self, + app, + user, + institution, + node_without_institution, + url_institution_nodes, + ): node_without_institution.add_contributor(user, permissions='admin') current_institution = InstitutionFactory() node_without_institution.affiliated_institutions.add(current_institution) - - res = app.post_json_api( - url_institution_nodes, - { - 'data': [ - { - 'type': 'nodes', 'id': node_without_institution._id - } - ] - }, - auth=user.auth - ) + with capture_notifications() as notifications: + res = app.post_json_api( + url_institution_nodes, + { + 'data': [ + { + 'type': 'nodes', 'id': node_without_institution._id + } + ] + }, + auth=user.auth + ) assert res.status_code == 201 - mock_send_grid.assert_called_once() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED - def test_email_sent_on_affiliation_removal(self, app, admin, institution, node_public, url_institution_nodes, mock_send_grid): + def test_email_sent_on_affiliation_removal(self, app, admin, institution, node_public, url_institution_nodes): current_institution = InstitutionFactory() node_public.affiliated_institutions.add(current_institution) - res = app.delete_json_api( - url_institution_nodes, - { - 'data': [ - { - 'type': 'nodes', 'id': node_public._id - } - ] - }, - auth=admin.auth - ) + with capture_notifications() as notifications: + res = app.delete_json_api( + url_institution_nodes, + { + 'data': [ + { + 'type': 'nodes', 'id': node_public._id + } + ] + }, + auth=admin.auth + ) # Assert response is successful assert res.status_code == 204 - call_args = mock_send_grid.call_args[1] - assert call_args['to_addr'] == admin.email + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED + assert notifications[0]['kwargs']['user'] == node_public.creator + assert notifications[1]['type'] == NotificationType.Type.NODE_AFFILIATION_CHANGED + assert notifications[1]['kwargs']['user'] == admin diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 78a422e4451..4f0c1a0dc05 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -120,6 +120,7 @@ def notify_moderator_registration_requests_withdrawal(resource, user, *args, **k reviews_signals.reviews_withdraw_requests_notification_moderators.send( timestamp=timezone.now(), context=context, + resource=resource, user=user ) diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index b00548b326b..3b6feeec3fc 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -135,14 +135,13 @@ def reviews_submit_notification_moderators(self, timestamp, resource, context, u # Handle email notifications to notify moderators of new submissions. @reviews_signals.reviews_withdraw_requests_notification_moderators.connect -def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user): +def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user, resource): # imports moved here to avoid AppRegistryNotReady error from osf.models import NotificationSubscriptionLegacy from website.profile.utils import get_profile_image_url from website.notifications.emails import store_emails context['referrer_fullname'] = user.fullname - resource = context['reviewable'] provider = resource.provider # Get NotificationSubscription instance, which contains reference to all subscribers From 7c08559ff5fed2265ab2474bbd74e77d8bc2282a Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 23 Jul 2025 10:23:14 -0400 Subject: [PATCH 108/176] fix issues with collection submissions notifications --- osf/models/sanctions.py | 16 +-- osf_tests/test_collection_submission.py | 128 ++++++++++++++---------- 2 files changed, 83 insertions(+), 61 deletions(-) diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index a4fcfe17396..a5b19f3a917 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -8,7 +8,6 @@ from framework.auth import Auth from framework.exceptions import PermissionsError from website import settings as osf_settings -from website import mails from osf.exceptions import ( InvalidSanctionRejectionToken, InvalidSanctionApprovalToken, @@ -404,7 +403,12 @@ def _rejection_url_context(self, user_id): return None def _send_approval_request_email(self, user, template, context): - mails.send_mail(user.username, template, user=user, can_change_preferences=False, **context) + NotificationType.objects.get( + name=template + ).emit( + user=user, + event_context=context + ) def _email_template_context(self, user, node, is_authorizer=False): return {} @@ -781,8 +785,8 @@ class RegistrationApproval(SanctionCallbackMixin, EmailApprovableSanction): DISPLAY_NAME = 'Approval' SHORT_NAME = 'registration_approval' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_REGISTRATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_REGISTRATION_NON_ADMIN AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_pending_registration_admin' NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_pending_registration_non_admin' @@ -957,8 +961,8 @@ class EmbargoTerminationApproval(EmailApprovableSanction): DISPLAY_NAME = 'Embargo Termination Request' SHORT_NAME = 'embargo_termination_approval' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = mails.PENDING_EMBARGO_TERMINATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' diff --git a/osf_tests/test_collection_submission.py b/osf_tests/test_collection_submission.py index 2ff2b279a6b..76baa2de752 100644 --- a/osf_tests/test_collection_submission.py +++ b/osf_tests/test_collection_submission.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from osf_tests.factories import ( @@ -9,13 +8,16 @@ from osf_tests.factories import NodeFactory, CollectionFactory, CollectionProviderFactory -from osf.models import CollectionSubmission +from osf.models import CollectionSubmission, NotificationType from osf.utils.workflows import CollectionSubmissionStates from framework.exceptions import PermissionsError from api_tests.utils import UserRoles from osf.management.commands.populate_collection_provider_notification_subscriptions import populate_collection_provider_notification_subscriptions from django.utils import timezone +from tests.utils import capture_notifications + + @pytest.fixture def user(): return AuthUserFactory() @@ -144,7 +146,6 @@ def configure_test_auth(node, user_role, provider=None): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestModeratedCollectionSubmission: MOCK_NOW = timezone.now() @@ -152,28 +153,27 @@ class TestModeratedCollectionSubmission: @pytest.fixture(autouse=True) def setup(self): populate_collection_provider_notification_subscriptions() - with mock.patch('osf.utils.machines.timezone.now', return_value=self.MOCK_NOW): - yield def test_submit(self, moderated_collection_submission): # .submit on post_save assert moderated_collection_submission.state == CollectionSubmissionStates.PENDING - def test_notify_contributors_pending(self, node, moderated_collection, mock_send_grid): - collection_submission = CollectionSubmission( - guid=node.guids.first(), - collection=moderated_collection, - creator=node.creator, - ) - collection_submission.save() - assert mock_send_grid.called + def test_notify_contributors_pending(self, node, moderated_collection): + with capture_notifications() as notifications: + collection_submission = CollectionSubmission( + guid=node.guids.first(), + collection=moderated_collection, + creator=node.creator, + ) + collection_submission.save() + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_SUBMITTED + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS assert collection_submission.state == CollectionSubmissionStates.PENDING def test_notify_moderators_pending(self, node, moderated_collection): - from website.notifications import emails - store_emails = emails.store_emails - with mock.patch('website.notifications.emails.store_emails') as mock_store_emails: - mock_store_emails.side_effect = store_emails # implicitly test rendering + + with capture_notifications() as notifications: collection_submission = CollectionSubmission( guid=node.guids.first(), collection=moderated_collection, @@ -181,18 +181,10 @@ def test_notify_moderators_pending(self, node, moderated_collection): ) populate_collection_provider_notification_subscriptions() collection_submission.save() - assert mock_store_emails.called + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_SUBMITTED + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS assert collection_submission.state == CollectionSubmissionStates.PENDING - email_call = mock_store_emails.call_args_list[0][0] - moderator = moderated_collection.moderators.get() - assert email_call == ( - [moderator._id], - 'email_transactional', - 'new_pending_submissions', - collection_submission.creator, - node, - self.MOCK_NOW, - ) @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) def test_accept_fails(self, user_role, moderated_collection_submission): @@ -206,10 +198,13 @@ def test_accept_success(self, node, moderated_collection_submission): moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - def test_notify_moderated_accepted(self, node, moderated_collection_submission, mock_send_grid): + def test_notify_moderated_accepted(self, node, moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - moderated_collection_submission.accept(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + moderated_collection_submission.accept(user=moderator, comment='Test Comment') + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_ACCEPTED + assert moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) @@ -224,11 +219,14 @@ def test_reject_success(self, node, moderated_collection_submission): moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - def test_notify_moderated_rejected(self, node, moderated_collection_submission, mock_send_grid): + def test_notify_moderated_rejected(self, node, moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - moderated_collection_submission.reject(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + moderated_collection_submission.reject(user=moderator, comment='Test Comment') + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REJECTED + assert moderated_collection_submission.state == CollectionSubmissionStates.REJECTED @pytest.mark.parametrize('user_role', UserRoles.excluding(*[UserRoles.ADMIN_USER, UserRoles.MODERATOR])) @@ -248,20 +246,27 @@ def test_remove_success(self, node, user_role, moderated_collection_submission): moderated_collection_submission.remove(user=user, comment='Test Comment') assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_moderator(self, node, moderated_collection_submission, mock_send_grid): + def test_notify_moderated_removed_moderator(self, node, moderated_collection_submission): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_MODERATOR + assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, moderated_collection_submission, mock_send_grid): + def test_notify_moderated_removed_admin(self, node, moderated_collection_submission): moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert len(notifications) == 2 + assert notifications[1]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + assert moderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, moderated_collection_submission): @@ -336,12 +341,15 @@ def test_remove_success(self, user_role, node, unmoderated_collection_submission unmoderated_collection_submission.remove(user=user, comment='Test Comment') assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, unmoderated_collection_submission, mock_send_grid): + def test_notify_moderated_removed_admin(self, node, unmoderated_collection_submission): unmoderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - unmoderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + unmoderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + assert notifications[1]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN assert unmoderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, unmoderated_collection_submission): @@ -434,11 +442,13 @@ def test_accept_success(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED - def test_notify_moderated_accepted(self, node, hybrid_moderated_collection_submission, mock_send_grid): + def test_notify_moderated_accepted(self, node, hybrid_moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + hybrid_moderated_collection_submission.accept(user=moderator, comment='Test Comment') + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_ACCEPTED assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.ACCEPTED @pytest.mark.parametrize('user_role', [UserRoles.UNAUTHENTICATED, UserRoles.NONCONTRIB]) @@ -453,11 +463,13 @@ def test_reject_success(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED - def test_notify_moderated_rejected(self, node, hybrid_moderated_collection_submission, mock_send_grid): + def test_notify_moderated_rejected(self, node, hybrid_moderated_collection_submission): moderator = configure_test_auth(node, UserRoles.MODERATOR) - hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + hybrid_moderated_collection_submission.reject(user=moderator, comment='Test Comment') + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REJECTED assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REJECTED @pytest.mark.parametrize('user_role', UserRoles.excluding(*[UserRoles.ADMIN_USER, UserRoles.MODERATOR])) @@ -477,20 +489,26 @@ def test_remove_success(self, node, user_role, hybrid_moderated_collection_submi hybrid_moderated_collection_submission.remove(user=user, comment='Test Comment') assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collection_submission, mock_send_grid): + def test_notify_moderated_removed_moderator(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.MODERATOR) - hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_MODERATOR assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED - def test_notify_moderated_removed_admin(self, node, hybrid_moderated_collection_submission, mock_send_grid): + def test_notify_moderated_removed_admin(self, node, hybrid_moderated_collection_submission): hybrid_moderated_collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) moderator = configure_test_auth(node, UserRoles.ADMIN_USER) - hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') - assert mock_send_grid.called + with capture_notifications() as notifications: + hybrid_moderated_collection_submission.remove(user=moderator, comment='Test Comment') + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + assert notifications[1]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + assert hybrid_moderated_collection_submission.state == CollectionSubmissionStates.REMOVED def test_resubmit_success(self, node, hybrid_moderated_collection_submission): From 86a94666d3378760508bf31d2cb1c03f1b276add Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 23 Jul 2025 11:16:34 -0400 Subject: [PATCH 109/176] fix reviewable and contributor notifications --- ...est_collections_provider_moderator_list.py | 29 ++++++++++++------- notifications.yaml | 8 +++++ osf/utils/notifications.py | 2 +- osf_tests/test_reviewable.py | 19 +++++++----- tests/test_adding_contributor_views.py | 19 +++++++----- website/project/views/contributor.py | 7 ++++- 6 files changed, 56 insertions(+), 28 deletions(-) diff --git a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py index 20d081e8709..5a7275158f2 100644 --- a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py +++ b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py @@ -1,12 +1,14 @@ import pytest from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from osf_tests.factories import ( AuthUserFactory, CollectionProviderFactory, ) from osf.utils import permissions from osf_tests.utils import _ensure_subscriptions +from tests.utils import capture_notifications @pytest.fixture() @@ -112,11 +114,13 @@ def test_POST_forbidden(self, mock_send_grid, app, url, nonmoderator, moderator, def test_POST_admin_success_existing_user(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - assert mock_send_grid.call_count == 1 def test_POST_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin, provider): payload = make_payload(user_id=moderator._id, permission_group='moderator') @@ -124,21 +128,24 @@ def test_POST_admin_failure_existing_moderator(self, mock_send_grid, app, url, m assert res.status_code == 400 assert mock_send_grid.call_count == 0 - def test_POST_admin_failure_unreg_moderator(self, mock_send_grid, app, url, moderator, nonmoderator, admin, provider): + def test_POST_admin_failure_unreg_moderator(self, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Jalen Hurts', 'email': '1eagles@allbatman.org'} # test_user_with_no_moderator_admin_permissions payload = make_payload(permission_group='moderator', **unreg_user) - res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=nonmoderator.auth, expect_errors=True) + assert not notifications assert res.status_code == 403 - assert mock_send_grid.call_count == 0 # test_user_with_moderator_admin_permissions payload = make_payload(permission_group='moderator', **unreg_user) - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) assert res.status_code == 201 - assert mock_send_grid.call_count == 1 - assert mock_send_grid.call_args[1]['to_addr'] == unreg_user['email'] + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION + assert notifications[0]['kwargs']['user'].username == unreg_user['email'] def test_POST_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='citizen') @@ -148,12 +155,14 @@ def test_POST_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmod def test_POST_admin_success_email(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION assert res.status_code == 201 assert len(res.json['data']['id']) == 5 assert res.json['data']['attributes']['permission_group'] == 'moderator' assert 'email' not in res.json['data']['attributes'] - assert mock_send_grid.call_count == 1 def test_moderators_alphabetically(self, app, url, admin, moderator, provider): admin.fullname = 'Flecher Cox' diff --git a/notifications.yaml b/notifications.yaml index 61a146daffa..c5a3d7a6cb5 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -158,6 +158,14 @@ notification_types: __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/moderator_added.html.mako' + - name: provider_reviews_submission_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/reviews_submission_confirmation.html.mako' + - name: provider_reviews_resubmission_confirmation + __docs__: ... + object_content_type_model_name: abstractprovider + template: 'website/templates/emails/reviews_resubmission_confirmation.html.mako' #### NODE - name: node_file_updated diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 4f0c1a0dc05..b85db6532ac 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -60,7 +60,7 @@ def notify_resubmit(resource, user, *args, **kwargs): reviews_signals.reviews_email_submit.send( recipients=recipients, context=context, - template=mails.REVIEWS_RESUBMISSION_CONFIRMATION, + template=NotificationType.Type.PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION, resource=resource, ) reviews_signals.reviews_email_submit_moderators_notifications.send( diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py index e3bc0b3d709..eb3783b71bc 100644 --- a/osf_tests/test_reviewable.py +++ b/osf_tests/test_reviewable.py @@ -1,13 +1,13 @@ from unittest import mock import pytest -from osf.models import Preprint +from osf.models import Preprint, NotificationType from osf.utils.workflows import DefaultStates from osf_tests.factories import PreprintFactory, AuthUserFactory +from tests.utils import capture_notifications @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestReviewable: @mock.patch('website.identifiers.utils.request_identifiers') @@ -34,23 +34,26 @@ def test_state_changes(self, _): from_db.refresh_from_db() assert from_db.machine_state == DefaultStates.ACCEPTED.value - def test_reject_resubmission_sends_emails(self, mock_send_grid): + def test_reject_resubmission_sends_emails(self): user = AuthUserFactory() preprint = PreprintFactory( reviews_workflow='pre-moderation', is_published=False ) assert preprint.machine_state == DefaultStates.INITIAL.value - assert not mock_send_grid.call_count - preprint.run_submit(user) - assert mock_send_grid.call_count == 1 + with capture_notifications() as notifications: + preprint.run_submit(user) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION assert preprint.machine_state == DefaultStates.PENDING.value assert not user.notification_subscriptions.exists() preprint.run_reject(user, 'comment') assert preprint.machine_state == DefaultStates.REJECTED.value - preprint.run_submit(user) # Resubmission alerts users and moderators + with capture_notifications() as notifications: + preprint.run_submit(user) # Resubmission alerts users and moderators + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION assert preprint.machine_state == DefaultStates.PENDING.value - assert mock_send_grid.call_count == 2 diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 6bbd70681b6..5825a0b42b5 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -13,7 +13,7 @@ from framework import auth from framework.auth import Auth from framework.exceptions import HTTPError -from osf.models import NodeRelation +from osf.models import NodeRelation, NotificationType from osf.utils import permissions from osf_tests.factories import ( fake_email, @@ -30,6 +30,7 @@ get_default_metaschema, OsfTestCase, ) +from tests.utils import capture_notifications from website.profile.utils import add_contributor_json, serialize_unregistered from website.project.signals import contributor_added from website.project.views.contributor import ( @@ -171,11 +172,10 @@ def test_add_contributor_with_unreg_contribs_and_reg_contribs(self): assert rec['email'] == email @mock.patch('website.project.views.contributor.send_claim_email') - def test_add_contributors_post_only_sends_one_email_to_unreg_user( - self, mock_send_claim_email): + def test_add_contributors_post_only_sends_one_email_to_unreg_user(self, mock_send_claim_email): # Project has components - comp1, comp2 = NodeFactory( - creator=self.creator), NodeFactory(creator=self.creator) + comp1 = NodeFactory(creator=self.creator) + comp2 = NodeFactory(creator=self.creator) NodeRelation.objects.create(parent=self.project, child=comp1) NodeRelation.objects.create(parent=self.project, child=comp2) self.project.save() @@ -224,10 +224,13 @@ def test_add_contributors_post_only_sends_one_email_to_registered_user(self): # send request url = self.project.api_url_for('project_contributors_post') assert self.project.can_edit(user=self.creator) - self.app.post(url, json=payload, auth=self.creator.auth) + with capture_notifications() as notifications: + self.app.post(url, json=payload, auth=self.creator.auth) + assert len(notifications) == 3 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[2]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - # send_mail should only have been called once - assert self.mock_notification_send.call_count == 1 def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self): # Project has a component with a sub-component diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index f3788f8b0c5..ea4ec0f67be 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -471,7 +471,12 @@ def check_email_throttle_claim_email(node, contributor): contributor.contributor_added_email_records[node._id] = {} def send_claim_email( - email, unclaimed_user, node, notify=True, throttle=24 * 3600, email_template='default' + email, + unclaimed_user, + node, + notify=True, + throttle=24 * 3600, + email_template='default' ): """ Send a claim email to an unregistered contributor or the referrer, depending on the scenario. From 0e2f169d7e9dc3031fe82847feac06166067b656 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 24 Jul 2025 10:05:38 -0400 Subject: [PATCH 110/176] fix preprint versioning tests --- framework/auth/views.py | 2 +- osf/models/collection_submission.py | 1 + osf/models/preprint.py | 6 +- tests/test_misc_views.py | 9 +- website/notifications/constants.py | 2 +- website/notifications/emails.py | 19 +- website/notifications/events/files.py | 26 +- website/notifications/utils.py | 48 ++-- website/notifications/views.py | 48 ++-- website/reviews/listeners.py | 243 +++--------------- .../emails/new_pending_submissions.html.mako | 2 +- 11 files changed, 104 insertions(+), 302 deletions(-) diff --git a/framework/auth/views.py b/framework/auth/views.py index e00df8679cd..7e4cd6ad234 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -666,7 +666,7 @@ def external_login_confirm_email_get(auth, uid, token): ).emit( user=user, event_context={ - 'external_id_provider': provider.name, + 'external_id_provider': provider, 'can_change_preferences': False, 'osf_contact_email': settings.OSF_CONTACT_EMAIL, }, diff --git a/osf/models/collection_submission.py b/osf/models/collection_submission.py index 41b05862a02..c7f5e93b3e9 100644 --- a/osf/models/collection_submission.py +++ b/osf/models/collection_submission.py @@ -129,6 +129,7 @@ def _notify_moderators_pending(self, event_data): subscribed_object=self.guid.referent, event_context={ 'submitter': self.creator.id, + 'requester_contributor_names': ''.join(self.guid.referent.contributors.values_list('fullname', flat=True)) }, ) diff --git a/osf/models/preprint.py b/osf/models/preprint.py index 2dd469c5fa9..17e792e15aa 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -35,8 +35,6 @@ from osf.utils import sanitize from osf.utils.permissions import ADMIN, WRITE from osf.utils.requests import get_request_and_user_id, string_type_request_headers -from website.notifications.emails import get_user_subscriptions -from website.notifications import utils from website.identifiers.clients import CrossRefClient, ECSArXivCrossRefClient from website.project.licenses import set_license from website.util import api_v2_url, api_url_for, web_url_for @@ -1032,8 +1030,6 @@ def _add_creator_as_contributor(self): def _send_preprint_confirmation(self, auth): # Send creator confirmation email recipient = self.creator - event_type = utils.find_subscription_type('global_reviews') - user_subscriptions = get_user_subscriptions(recipient, event_type) if self.provider._id == 'osf': logo = settings.OSF_PREPRINTS_LOGO else: @@ -1050,7 +1046,7 @@ def _send_preprint_confirmation(self, auth): provider_id=self.provider._id if not self.provider.domain else '').strip('/'), 'provider_contact_email': self.provider.email_contact or settings.OSF_CONTACT_EMAIL, 'provider_support_email': self.provider.email_support or settings.OSF_SUPPORT_EMAIL, - 'no_future_emails': user_subscriptions['none'], + 'no_future_emails': False, 'is_creator': True, 'provider_name': 'OSF Preprints' if self.provider.name == 'Open Science Framework' else self.provider.name, 'logo': logo, diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index 814ab0556f1..27c2a3e383c 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -21,7 +21,7 @@ Comment, OSFUser, SpamStatus, - NodeRelation, + NodeRelation, NotificationType, ) from osf.utils import permissions from osf_tests.factories import ( @@ -50,6 +50,7 @@ from website.util import web_url_for from website.util import rubeus from conftest import start_mock_send_grid +from tests.utils import capture_notifications pytestmark = pytest.mark.django_db @@ -426,13 +427,15 @@ def test_external_login_confirm_email_get_link(self): self.user.save() assert not self.user.is_registered url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') - res = self.app.get(url) + with capture_notifications() as notification: + res = self.app.get(url) + assert len(notification) == 1 + assert notification[0]['type'] == NotificationType.Type.USER_EXTERNAL_LOGIN_LINK_SUCCESS assert res.status_code == 302, 'redirects to cas login' assert 'You should be redirected automatically' in str(res.html) assert '/login?service=' in res.location assert 'new=true' not in parse.unquote(res.location) - assert self.mock_send_grid.call_count == 1 self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' diff --git a/website/notifications/constants.py b/website/notifications/constants.py index ce3c9db4315..66bb575b765 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -1,5 +1,5 @@ NODE_SUBSCRIPTIONS_AVAILABLE = { - 'file_updated': 'Files updated' + 'node_file_updated': 'Files updated' } # Note: if the subscription starts with 'global_', it will be treated like a default diff --git a/website/notifications/emails.py b/website/notifications/emails.py index d28352b2bdd..da2024e8e31 100644 --- a/website/notifications/emails.py +++ b/website/notifications/emails.py @@ -2,7 +2,7 @@ from babel import dates, core, Locale -from osf.models import AbstractNode, NotificationSubscriptionLegacy +from osf.models import AbstractNode, NotificationSubscription from osf.models.notifications import NotificationDigest from osf.utils.permissions import ADMIN, READ from website import mails @@ -13,7 +13,7 @@ def notify(event, user, node, timestamp, **context): """Retrieve appropriate ***subscription*** and passe user list - +website/notifications/u :param event: event that triggered the notification :param user: user who triggered notification :param node: instance of Node @@ -160,7 +160,10 @@ def check_node(node, event): """Return subscription for a particular node and event.""" node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES} if node: - subscription = NotificationSubscriptionLegacy.load(utils.to_subscription_key(node._id, event)) + subscription = NotificationSubscription.objects.filter( + node=node, + notification_type__name=event + ) for notification_type in node_subscriptions: users = getattr(subscription, notification_type, []) if users: @@ -173,11 +176,11 @@ def check_node(node, event): def get_user_subscriptions(user, event): if user.is_disabled: return {} - user_subscription = NotificationSubscriptionLegacy.load(utils.to_subscription_key(user._id, event)) - if user_subscription: - return {key: list(getattr(user_subscription, key).all().values_list('guids___id', flat=True)) for key in constants.NOTIFICATION_TYPES} - else: - return {key: [user._id] if (event in constants.USER_SUBSCRIPTIONS_AVAILABLE and key == 'email_transactional') else [] for key in constants.NOTIFICATION_TYPES} + user_subscription, _ = NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=event + ) + return user_subscription def get_node_lineage(node): diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index fdaabad0426..db8a9c91fdc 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -238,8 +238,13 @@ def perform(self): return # File if self.payload['destination']['kind'] != 'folder': - moved, warn, rm_users = event_utils.categorize_users(self.user, self.event_type, self.source_node, - self.event_type, self.node) + moved, warn, rm_users = event_utils.categorize_users( + self.user, + self.event_type, + self.source_node, + self.event_type, + self.node + ) warn_message = f'{self.html_message} You are no longer tracking that file based on the settings you selected for the component.' remove_message = ( f'{self.html_message} Your subscription has been removed due to ' @@ -248,11 +253,20 @@ def perform(self): # Folder else: # Gets all the files in a folder to look for permissions conflicts - files = event_utils.get_file_subs_from_folder(self.addon, self.user, self.payload['destination']['kind'], - self.payload['destination']['path'], - self.payload['destination']['name']) + files = event_utils.get_file_subs_from_folder( + self.addon, + self.user, + self.payload['destination']['kind'], + self.payload['destination']['path'], + self.payload['destination']['name'] + ) # Bins users into different permissions - moved, warn, rm_users = event_utils.compile_user_lists(files, self.user, self.source_node, self.node) + moved, warn, rm_users = event_utils.compile_user_lists( + files, + self.user, + self.source_node, + self.node + ) # For users that don't have individual file subscription but has permission on the new node warn_message = f'{self.html_message} You are no longer tracking that folder or files within based on the settings you selected for the component.' diff --git a/website/notifications/utils.py b/website/notifications/utils.py index 51d487ff67a..e64d76c258f 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -1,9 +1,11 @@ import collections from django.apps import apps +from django.contrib.contenttypes.models import ContentType from django.db.models import Q from framework.postcommit_tasks.handlers import run_postcommit +from osf.models import NotificationSubscription from osf.utils.permissions import READ from website.notifications import constants from website.notifications.exceptions import InvalidSubscriptionError @@ -144,22 +146,17 @@ def users_to_remove(source_event, source_node, new_node): :param new_node: Node instance where a sub or new sub will be. :return: Dict of notification type lists with user_ids """ - NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') removed_users = {key: [] for key in constants.NOTIFICATION_TYPES} if source_node == new_node: return removed_users - old_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, source_event)) - old_node_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, - '_'.join(source_event.split('_')[-2:]))) - if not old_sub and not old_node_sub: - return removed_users + old_sub = NotificationSubscription.objects.get( + subscribed_object=source_node, + notification_type__name=source_event + ) for notification_type in constants.NOTIFICATION_TYPES: users = [] if hasattr(old_sub, notification_type): users += list(getattr(old_sub, notification_type).values_list('guids___id', flat=True)) - if hasattr(old_node_sub, notification_type): - users += list(getattr(old_node_sub, notification_type).values_list('guids___id', flat=True)) - subbed, removed_users[notification_type] = separate_users(new_node, users) return removed_users @@ -449,7 +446,6 @@ def subscribe_user_to_notifications(node, user): """ Update the notification settings for the creator or contributors :param user: User to subscribe to notifications """ - NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') Preprint = apps.get_model('osf.Preprint') DraftRegistration = apps.get_model('osf.DraftRegistration') if isinstance(node, Preprint): @@ -468,31 +464,19 @@ def subscribe_user_to_notifications(node, user): raise InvalidSubscriptionError('Registrations are invalid targets for subscriptions') events = constants.NODE_SUBSCRIPTIONS_AVAILABLE - notification_type = 'email_transactional' - target_id = node._id if user.is_registered: for event in events: - event_id = to_subscription_key(target_id, event) - global_event_id = to_subscription_key(user._id, 'global_' + event) - global_subscription = NotificationSubscriptionLegacy.load(global_event_id) - - subscription = NotificationSubscriptionLegacy.load(event_id) - - # If no subscription for component and creator is the user, do not create subscription - # If no subscription exists for the component, this means that it should adopt its - # parent's settings - if not (node and node.parent_node and not subscription and node.creator == user): - if not subscription: - subscription = NotificationSubscriptionLegacy(_id=event_id, owner=node, event_name=event) - # Need to save here in order to access m2m fields - subscription.save() - if global_subscription: - global_notification_type = get_global_notification_type(global_subscription, user) - subscription.add_user_to_subscription(user, global_notification_type) - else: - subscription.add_user_to_subscription(user, notification_type) - subscription.save() + subscription, _ = NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=event + ) + subscription, _ = NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=event, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node) + ) def format_user_and_project_subscriptions(user): diff --git a/website/notifications/views.py b/website/notifications/views.py index 1cbb62ee08d..09fb59a1260 100644 --- a/website/notifications/views.py +++ b/website/notifications/views.py @@ -6,8 +6,7 @@ from framework.auth.decorators import must_be_logged_in from framework.exceptions import HTTPError -from osf.models import AbstractNode, Registration -from osf.models.notifications import NotificationSubscriptionLegacy +from osf.models import AbstractNode, Registration, NotificationSubscription from osf.utils.permissions import READ from website.notifications import utils from website.notifications.constants import NOTIFICATION_TYPES @@ -69,7 +68,6 @@ def configure_subscription(auth): f'{user!r} attempted to adopt_parent of a none node id, {target_id}' ) raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - owner = user else: if not node.has_permission(user, READ): sentry.log_message(f'{user!r} attempted to subscribe to private node, {target_id}') @@ -81,40 +79,28 @@ def configure_subscription(auth): ) raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - if notification_type != 'adopt_parent': - owner = node + if 'file_updated' in event and len(event) > len('file_updated'): + pass else: - if 'file_updated' in event and len(event) > len('file_updated'): - pass - else: - parent = node.parent_node - if not parent: - sentry.log_message( - '{!r} attempted to adopt_parent of ' - 'the parentless project, {!r}'.format(user, node) - ) - raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - - # If adopt_parent make sure that this subscription is None for the current User - subscription = NotificationSubscriptionLegacy.load(event_id) - if not subscription: - return {} # We're done here - - subscription.remove_user_from_subscription(user) - return {} - - subscription = NotificationSubscriptionLegacy.load(event_id) - - if not subscription: - subscription = NotificationSubscriptionLegacy(_id=event_id, owner=owner, event_name=event) - subscription.save() + parent = node.parent_node + if not parent: + sentry.log_message( + '{!r} attempted to adopt_parent of ' + 'the parentless project, {!r}'.format(user, node) + ) + raise HTTPError(http_status.HTTP_400_BAD_REQUEST) + + subscription, _ = NotificationSubscription.objects.get_or_create( + user=user, + subscribed_object=node, + notification_type__name=event + ) + subscription.save() if node and node._id not in user.notifications_configured: user.notifications_configured[node._id] = True user.save() - subscription.add_user_to_subscription(user, notification_type) - subscription.save() return {'message': f'Successfully subscribed to {notification_type} list on {event_id}'} diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 3b6feeec3fc..616c95b4b2c 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -1,238 +1,53 @@ -from django.utils import timezone - -from osf.models import NotificationType -from website.notifications import utils +from django.contrib.contenttypes.models import ContentType +from website.profile.utils import get_profile_image_url +from osf.models import NotificationSubscription, NotificationType +from website.settings import DOMAIN from website.reviews import signals as reviews_signals -from website.settings import OSF_PREPRINTS_LOGO, OSF_REGISTRIES_LOGO, DOMAIN - - -@reviews_signals.reviews_email.connect -def reviews_notification(self, creator, template, context, action): - """ - Handle email notifications including: update comment, accept, and reject of submission, but not initial submission - or resubmission. - """ - # Avoid AppRegistryNotReady error - from website.notifications.emails import notify_global_event - recipients = list(action.target.contributors) - time_now = action.created if action is not None else timezone.now() - node = action.target - notify_global_event( - event='global_reviews', - sender_user=creator, - node=node, - timestamp=time_now, - recipients=recipients, - template=template, - context=context - ) - - -@reviews_signals.reviews_email_submit.connect -def reviews_submit_notification(self, recipients, context, resource, template=None): - """ - Handle email notifications for a new submission or a resubmission - """ - if not template: - template = NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION - - # Avoid AppRegistryNotReady error - from website.notifications.emails import get_user_subscriptions - event_type = utils.find_subscription_type('global_reviews') - - provider = resource.provider - if provider._id == 'osf': - if provider.type == 'osf.preprintprovider': - context['logo'] = OSF_PREPRINTS_LOGO - elif provider.type == 'osf.registrationprovider': - context['logo'] = OSF_REGISTRIES_LOGO - else: - raise NotImplementedError() - else: - context['logo'] = resource.provider._id - - for recipient in recipients: - user_subscriptions = get_user_subscriptions(recipient, event_type) - context['no_future_emails'] = user_subscriptions['none'] - context['is_creator'] = recipient == resource.creator - context['provider_name'] = resource.provider.name - NotificationType.objects.get( - name=template - ).emit( - user=recipient, - event_context=context - ) - - -@reviews_signals.reviews_email_submit_moderators_notifications.connect -def reviews_submit_notification_moderators(self, timestamp, resource, context, user): - """ - Handle email notifications to notify moderators of new submissions or resubmission. - """ - # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscriptionLegacy - from website.profile.utils import get_profile_image_url - from website.notifications.emails import store_emails - - provider = resource.provider - - # Set submission url - if provider.type == 'osf.preprintprovider': - context['reviews_submission_url'] = ( - f'{DOMAIN}reviews/preprints/{provider._id}/{resource._id}' - ) - elif provider.type == 'osf.registrationprovider': - context['reviews_submission_url'] = f'{DOMAIN}{resource._id}?mode=moderator' - else: - raise NotImplementedError(f'unsupported provider type {provider.type}') - - # Set url for profile image of the submitter - context['profile_image_url'] = get_profile_image_url(user) - - # Set message - revision_id = context.get('revision_id') - if revision_id: - context['message'] = f'submitted updates to "{resource.title}".' - context['reviews_submission_url'] += f'&revisionId={revision_id}' - else: - if context.get('resubmission'): - context['message'] = f'resubmitted "{resource.title}".' - else: - context['message'] = f'submitted "{resource.title}".' - - # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( - _id=f'{provider._id}_new_pending_submissions', - provider=provider - ) - # "transactional" subscribers receive notifications "Immediately" (i.e. at 5 minute intervals) - # "digest" subscribers receive emails daily - recipients_per_subscription_type = { - 'email_transactional': list( - provider_subscription.email_transactional.all().values_list('guids___id', flat=True) - ), - 'email_digest': list( - provider_subscription.email_digest.all().values_list('guids___id', flat=True) - ) - } - - for subscription_type, recipient_ids in recipients_per_subscription_type.items(): - if not recipient_ids: - continue - - store_emails( - recipient_ids, - subscription_type, - 'new_pending_submissions', - user, - resource, - timestamp, - abstract_provider=provider, - **context - ) - -# Handle email notifications to notify moderators of new submissions. @reviews_signals.reviews_withdraw_requests_notification_moderators.connect def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user, resource): - # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscriptionLegacy - from website.profile.utils import get_profile_image_url - from website.notifications.emails import store_emails context['referrer_fullname'] = user.fullname - provider = resource.provider - # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription, created = NotificationSubscriptionLegacy.objects.get_or_create( - _id=f'{provider._id}_new_pending_withdraw_requests', - provider=provider + provider_subscription, _ = NotificationSubscription.objects.get_or_create( + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS, + object_id=provider.id, + content_type=ContentType.objects.get_for_model(provider.__class__), ) - # Set message context['message'] = f'has requested withdrawal of "{resource.title}".' - # Set url for profile image of the submitter context['profile_image_url'] = get_profile_image_url(user) - # Set submission url context['reviews_submission_url'] = f'{DOMAIN}reviews/registries/{provider._id}/{resource._id}' - email_transactional_ids = list(provider_subscription.email_transactional.all().values_list('guids___id', flat=True)) - email_digest_ids = list(provider_subscription.email_digest.all().values_list('guids___id', flat=True)) - - # Store emails to be sent to subscribers instantly (at a 5 min interval) - store_emails( - email_transactional_ids, - 'email_transactional', - 'new_pending_withdraw_requests', - user, - resource, - timestamp, - abstract_provider=provider, - template='new_pending_submissions', - **context - ) + for recipient in provider_subscription.preorint.moderators.all(): + NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS + ).emit( + user=recipient, + event_context=context, + ) - # Store emails to be sent to subscribers daily - store_emails( - email_digest_ids, - 'email_digest', - 'new_pending_withdraw_requests', - user, - resource, - timestamp, - abstract_provider=provider, - template='new_pending_submissions', - **context - ) -# Handle email notifications to notify moderators of new withdrawal requests @reviews_signals.reviews_email_withdrawal_requests.connect def reviews_withdrawal_requests_notification(self, timestamp, context): - # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationSubscriptionLegacy - from website.notifications.emails import store_emails - from website.profile.utils import get_profile_image_url - from website import settings - - # Get NotificationSubscription instance, which contains reference to all subscribers - provider_subscription = NotificationSubscriptionLegacy.load( - '{}_new_pending_submissions'.format(context['reviewable'].provider._id)) preprint = context['reviewable'] preprint_word = preprint.provider.preprint_word - # Set message + provider_subscription, _ = NotificationSubscription.objects.get_or_create( + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, + object_id=preprint.provider.id, + content_type=ContentType.objects.get_for_model(preprint.provider.__class__), + ) + context['message'] = f'has requested withdrawal of the {preprint_word} "{preprint.title}".' - # Set url for profile image of the submitter context['profile_image_url'] = get_profile_image_url(context['requester']) - # Set submission url - context['reviews_submission_url'] = '{}reviews/preprints/{}/{}'.format(settings.DOMAIN, - preprint.provider._id, - preprint._id) - - email_transactional_ids = list(provider_subscription.email_transactional.all().values_list('guids___id', flat=True)) - email_digest_ids = list(provider_subscription.email_digest.all().values_list('guids___id', flat=True)) - - # Store emails to be sent to subscribers instantly (at a 5 min interval) - store_emails( - email_transactional_ids, - 'email_transactional', - 'new_pending_submissions', - context['requester'], - preprint, - timestamp, - abstract_provider=preprint.provider, - **context - ) + context['reviews_submission_url'] = f'{DOMAIN}reviews/preprints/{preprint.provider._id}/{preprint._id}' - # Store emails to be sent to subscribers daily - store_emails( - email_digest_ids, - 'email_digest', - 'new_pending_submissions', - context['requester'], - preprint, - timestamp, - abstract_provider=preprint.provider, - **context - ) + for recipient in provider_subscription.preorint.contributors.all(): + NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + ).emit( + user=recipient, + event_context=context, + ) diff --git a/website/templates/emails/new_pending_submissions.html.mako b/website/templates/emails/new_pending_submissions.html.mako index 067148e2437..46f6094276b 100644 --- a/website/templates/emails/new_pending_submissions.html.mako +++ b/website/templates/emails/new_pending_submissions.html.mako @@ -7,7 +7,7 @@ % if is_request_email: ${requester_fullname} % else: - ${', '.join(reviewable.contributors.values_list('fullname', flat=True))} + ${requester_contributor_names} % endif ${message} From 69b2d9024197ca6162cd5f3b8d6d99b749677b9c Mon Sep 17 00:00:00 2001 From: antkryt Date: Thu, 24 Jul 2025 17:16:50 +0300 Subject: [PATCH 111/176] [ENG-7979] Registrations pending moderation that have components also pending moderation do not display the children (#11222) * show only public nodes for non-authorized users in the node queryset * add custom filters to can_view(); include pending nodes for moderators in get_node_count() * add test --- api/base/views.py | 6 +-- api/nodes/serializers.py | 15 +++++++- api/registrations/views.py | 14 +++++++ .../test_registrations_childrens_list.py | 38 +++++++++++++++++-- osf/models/node.py | 12 +++--- osf/models/provider.py | 6 +++ osf/models/registrations.py | 9 +---- osf/utils/workflows.py | 9 +++++ 8 files changed, 88 insertions(+), 21 deletions(-) diff --git a/api/base/views.py b/api/base/views.py index aed2a033e0a..e0f5873fe2b 100644 --- a/api/base/views.py +++ b/api/base/views.py @@ -481,7 +481,7 @@ def get_ordering(self): return self.default_ordering # overrides GenericAPIView - def get_queryset(self): + def get_queryset(self, *args, **kwargs): """ Returns non-deleted children of the current resource that the user has permission to view - Children could be public, viewable through a view-only link (if provided), or the user @@ -494,8 +494,8 @@ def get_queryset(self): if self.request.query_params.get('sort', None) == '_order': # Order by the order of the node_relations order = Case(*[When(pk=pk, then=pos) for pos, pk in enumerate(node_pks)]) - return self.get_queryset_from_request().filter(pk__in=node_pks).can_view(auth.user, auth.private_link).order_by(order) - return self.get_queryset_from_request().filter(pk__in=node_pks).can_view(auth.user, auth.private_link) + return self.get_queryset_from_request().filter(pk__in=node_pks).can_view(auth.user, auth.private_link, *args, **kwargs).order_by(order) + return self.get_queryset_from_request().filter(pk__in=node_pks).can_view(auth.user, auth.private_link, *args, **kwargs) class BaseContributorDetail(JSONAPIBaseView, generics.RetrieveAPIView): diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 341c589d8aa..f9e0aeed7a9 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -681,9 +681,22 @@ def get_node_count(self, obj): AND UG.osfuser_id = %s) ) ) + OR ( + osf_abstractnode.type = 'osf.registration' + AND osf_abstractnode.moderation_state IN ('pending', 'pending_withdraw', 'embargo', 'pending_embargo_termination') + AND EXISTS ( + SELECT 1 + FROM auth_permission AS P2 + INNER JOIN osf_abstractprovidergroupobjectpermission AS G2 ON (P2.id = G2.permission_id) + INNER JOIN osf_osfuser_groups AS UG2 ON (G2.group_id = UG2.group_id) + WHERE P2.codename = 'view_submissions' + AND G2.content_object_id = osf_abstractnode.provider_id + AND UG2.osfuser_id = %s + ) + ) OR (osf_privatelink.key = %s AND osf_privatelink.is_deleted = FALSE) ); - """, [obj.id, obj.id, user_id, obj.id, user_id, auth.private_key], + """, [obj.id, obj.id, user_id, obj.id, user_id, user_id, auth.private_key], ) return int(cursor.fetchone()[0]) diff --git a/api/registrations/views.py b/api/registrations/views.py index a8d10d0602b..b2026d5f4b8 100644 --- a/api/registrations/views.py +++ b/api/registrations/views.py @@ -407,6 +407,20 @@ class RegistrationChildrenList(BaseChildrenList, generics.ListAPIView, Registrat model_class = Registration + def get_queryset(self): + node = self.get_node() + auth = get_user_auth(self.request) + user = auth.user + provider = getattr(node, 'provider', None) + is_moderated = getattr(provider, 'is_reviewed', False) + custom_filters = {} + + if is_moderated and user and user.is_authenticated and provider.is_moderator(user): + from osf.utils.workflows import RegistrationModerationStates + custom_filters['moderation_state__in'] = RegistrationModerationStates.in_moderation_states() + + return super().get_queryset(**custom_filters) + class RegistrationCitationDetail(NodeCitationDetail, RegistrationMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/registrations_citations_list). diff --git a/api_tests/registrations/views/test_registrations_childrens_list.py b/api_tests/registrations/views/test_registrations_childrens_list.py index 67ff993fa2a..8c6646bdb80 100644 --- a/api_tests/registrations/views/test_registrations_childrens_list.py +++ b/api_tests/registrations/views/test_registrations_childrens_list.py @@ -5,9 +5,11 @@ NodeFactory, ProjectFactory, RegistrationFactory, + RegistrationProviderFactory, AuthUserFactory, PrivateLinkFactory, ) +from osf.utils.workflows import RegistrationModerationStates @pytest.fixture() @@ -69,15 +71,13 @@ def test_registrations_children_list(self, user, app, registration_with_children assert component_two._id in ids def test_return_registrations_list_no_auth_approved(self, user, app, registration_with_children_approved, registration_with_children_approved_url): - component_one, component_two, component_three, component_four = registration_with_children_approved.nodes - res = app.get(registration_with_children_approved_url) ids = [node['id'] for node in res.json['data']] assert res.status_code == 200 assert res.content_type == 'application/vnd.api+json' - assert component_one._id in ids - assert component_two._id in ids + for component in registration_with_children_approved.nodes: + assert component._id in ids def test_registrations_list_no_auth_unapproved(self, user, app, registration_with_children, registration_with_children_url): res = app.get(registration_with_children_url, expect_errors=True) @@ -138,6 +138,36 @@ def test_registration_children_no_auth_vol(self, user, app, registration_with_ch res = app.get(view_only_link_url, expect_errors=True) assert res.status_code == 401 + def test_registration_children_count_and_visibility_for_moderator(self, app, user): + non_contrib_moderator = AuthUserFactory() + + # Setup provider and assign moderator permission + provider = RegistrationProviderFactory(reviews_workflow='pre-moderation') + provider.add_to_group(non_contrib_moderator, 'admin') + provider.save() + + project = ProjectFactory(creator=user) + child = NodeFactory(parent=project, creator=user) + + registration = RegistrationFactory(project=project, provider=provider) + registration.moderation_state = RegistrationModerationStates.PENDING.db_name + registration.save() + + pending_child = RegistrationFactory(project=child, parent=registration, provider=provider) + pending_child.moderation_state = RegistrationModerationStates.PENDING.db_name + pending_child.save() + + url = f'/v2/registrations/{registration._id}/children/' + + res = app.get(url, auth=non_contrib_moderator.auth) + ids = [node['id'] for node in res.json['data']] + assert pending_child._id in ids + + # Count should be 1 + node_url = f'/v2/registrations/{registration._id}/?related_counts=children' + res = app.get(node_url, auth=non_contrib_moderator.auth) + assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 1 + @pytest.mark.django_db class TestRegistrationChildrenListFiltering: diff --git a/osf/models/node.py b/osf/models/node.py index 34fa14f1f03..51fc26af43a 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -145,9 +145,7 @@ def get_children(self, root, active=False, include_root=False): row.append(root.pk) return AbstractNode.objects.filter(id__in=row) - def can_view(self, user=None, private_link=None): - qs = self.filter(is_public=True) - + def can_view(self, user=None, private_link=None, **custom_filters): if private_link is not None: if isinstance(private_link, PrivateLink): private_link = private_link.key @@ -157,9 +155,12 @@ def can_view(self, user=None, private_link=None): return self.filter(private_links__is_deleted=False, private_links__key=private_link).filter( is_deleted=False) + # By default, only public nodes are shown. However, custom filters can be provided. + # This is useful when you want to display a specific subset of nodes unrelated to + # the current user (e.g. only `pending` nodes for moderators). + qs = self.filter(is_public=True) if not custom_filters else self.filter(**custom_filters) if user is not None and not isinstance(user, AnonymousUser): - read_user_query = get_objects_for_user(user, READ_NODE, self, with_superuser=False) - qs |= read_user_query + qs |= get_objects_for_user(user, READ_NODE, self, with_superuser=False) qs |= self.extra(where=[""" "osf_abstractnode".id in ( WITH RECURSIVE implicit_read AS ( @@ -179,6 +180,7 @@ def can_view(self, user=None, private_link=None): ) SELECT * FROM implicit_read ) """], params=(user.id,)) + return qs.filter(is_deleted=False) diff --git a/osf/models/provider.py b/osf/models/provider.py index c78e2f52c94..aee5ae8fa56 100644 --- a/osf/models/provider.py +++ b/osf/models/provider.py @@ -352,6 +352,12 @@ def validate_schema(self, schema): if not self.schemas.filter(id=schema.id).exists(): raise ValidationError('Invalid schema for provider.') + def is_moderator(self, user): + """Return True if the user is a moderator for this provider""" + if user and user.is_authenticated: + return user.has_perm('osf.view_submissions', self) + return False + class PreprintProvider(AbstractProvider): """ diff --git a/osf/models/registrations.py b/osf/models/registrations.py index 3d3e967be30..d74260358f4 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -452,14 +452,7 @@ def can_view(self, auth): if not auth or not auth.user or not self.is_moderated: return False - moderator_viewable_states = { - RegistrationModerationStates.PENDING.db_name, - RegistrationModerationStates.PENDING_WITHDRAW.db_name, - RegistrationModerationStates.EMBARGO.db_name, - RegistrationModerationStates.PENDING_EMBARGO_TERMINATION.db_name, - } - user_is_moderator = auth.user.has_perm('view_submissions', self.provider) - if self.moderation_state in moderator_viewable_states and user_is_moderator: + if self.moderation_state in RegistrationModerationStates.in_moderation_states() and self.provider.is_moderator(auth.user): return True return False diff --git a/osf/utils/workflows.py b/osf/utils/workflows.py index b054de25452..f562ff0aab3 100644 --- a/osf/utils/workflows.py +++ b/osf/utils/workflows.py @@ -121,6 +121,15 @@ def from_sanction(cls, sanction): return new_state + @classmethod + def in_moderation_states(cls): + return [ + cls.PENDING.db_name, + cls.EMBARGO.db_name, + cls.PENDING_EMBARGO_TERMINATION.db_name, + cls.PENDING_WITHDRAW.db_name, + ] + class RegistrationModerationTriggers(ModerationEnum): '''The acceptable 'triggers' to describe a moderated action on a Registration.''' From 0139a5f0b794394d9a131ce36fc291f4fbb7c807 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 24 Jul 2025 10:17:12 -0400 Subject: [PATCH 112/176] update withdraw request declined notification --- osf/utils/notifications.py | 17 +++++++++-------- website/reviews/listeners.py | 2 +- .../withdrawal_request_declined.html.mako | 8 ++++---- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index b85db6532ac..910421ab476 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -1,7 +1,6 @@ from django.utils import timezone from osf.models.notification_type import NotificationType -from website.mails import mails from website.reviews import signals as reviews_signals from website.settings import DOMAIN, OSF_SUPPORT_EMAIL, OSF_CONTACT_EMAIL from osf.utils.workflows import RegistrationModerationTriggers @@ -104,17 +103,19 @@ def notify_reject_withdraw_request(resource, action, *args, **kwargs): context['requester_fullname'] = action.creator.fullname for contributor in resource.contributors.all(): - context['contributor'] = contributor + context['contributor_fullname'] = contributor.fullname context['requester_fullname'] = action.creator.fullname context['is_requester'] = action.creator == contributor - - mails.send_mail( - contributor.username, - mails.WITHDRAWAL_REQUEST_DECLINED, - **context + NotificationType.objects.get( + name=NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_DECLINED + ).emit( + user=contributor, + event_context={ + 'is_requester': contributor, + **context + }, ) - def notify_moderator_registration_requests_withdrawal(resource, user, *args, **kwargs): context = get_email_template_context(resource) reviews_signals.reviews_withdraw_requests_notification_moderators.send( diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 616c95b4b2c..52caa5fb3b0 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -11,7 +11,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, provider = resource.provider provider_subscription, _ = NotificationSubscription.objects.get_or_create( - notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS, + notification_type__name=NotificationType.Type.PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED, object_id=provider.id, content_type=ContentType.objects.get_for_model(provider.__class__), ) diff --git a/website/templates/emails/withdrawal_request_declined.html.mako b/website/templates/emails/withdrawal_request_declined.html.mako index b24ddd861a1..4e63eed1b22 100644 --- a/website/templates/emails/withdrawal_request_declined.html.mako +++ b/website/templates/emails/withdrawal_request_declined.html.mako @@ -7,9 +7,9 @@ from website import settings %> % if document_type == 'registration': - Dear ${contributor.fullname}, + Dear ${contributor_fullname},

    - Your request to withdraw your registration "${reviewable_title}" from ${reviewable.provider.name} has been declined by the service moderators. The registration is still publicly available on ${reviewable.provider.name}. + Your request to withdraw your registration "${reviewable_title}" from ${reviewable_provider_name} has been declined by the service moderators. The registration is still publicly available on ${reviewable_provider_name}.

    % if notify_comment: The moderator has provided the following comment:
    @@ -18,10 +18,10 @@ % else: Dear ${requester_fullname},

    - Your request to withdraw your ${document_type} "${reviewable_title}" from ${reviewable.provider.name} has been declined by the service moderators. Login and visit your ${document_type} to view their feedback. The ${document_type} is still publicly available on ${reviewable.provider.name}. + Your request to withdraw your ${document_type} "${reviewable_title}" from ${reviewable_provider_name} has been declined by the service moderators. Login and visit your ${document_type} to view their feedback. The ${document_type} is still publicly available on ${reviewable_provider_name}. % endif

    Sincerely,
    - The ${reviewable.provider.name} and OSF Teams
    + The ${reviewable_provider_name} and OSF Teams
    From a8084f1a02a4d1cca6d619abff14c759c0080e7c Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Thu, 24 Jul 2025 20:35:35 +0300 Subject: [PATCH 113/176] added academiaInstitution in social-schema, fixed True value of 'ongoing', fixed/added tests (#11239) ## Purpose V2 API doesn't allow setting `True` value for `ongoing` property in employment/education tabs and set `academiaInstitution` property in social tab ## Changes Added `academiaInstitution` field in social-schema Fixed ignored required properties for `ongoing` property in employment/education-schema files Added new tests and fixed the old ones ## QA Notes Can be tested only via API Updates can be viewed in user settings ## Ticket https://openscience.atlassian.net/browse/ENG-8455 --- api/base/schemas/education-schema.json | 32 ++++++++------ api/base/schemas/employment-schema.json | 32 ++++++++------ api/base/schemas/social-schema.json | 4 ++ api_tests/users/views/test_user_detail.py | 54 +++++++++++++++-------- 4 files changed, 78 insertions(+), 44 deletions(-) diff --git a/api/base/schemas/education-schema.json b/api/base/schemas/education-schema.json index 3dda2a3481f..f917e411629 100644 --- a/api/base/schemas/education-schema.json +++ b/api/base/schemas/education-schema.json @@ -25,18 +25,6 @@ "minimum": 1900 }, "ongoing": { - "oneOf": [{ - "enum": [false], - "required": ["startYear", "endYear"] - }, - { - "enum": [true], - "required": ["startYear"], - "not": { - "required": ["endYear"] - } - } - ], "type": "boolean" }, "department": { @@ -56,7 +44,25 @@ "startMonth": ["startYear"], "startYear": ["ongoing"], "endYear": ["ongoing"], - "endYear": ["startYear"] + "endYear": ["startYear"], + "ongoing": { + "if": { + "properties": { + "ongoing": { + "const": false + } + } + }, + "then": { + "required": ["startYear", "endYear"] + }, + "else": { + "required": ["startYear"], + "not": { + "required": ["endYear"] + } + } + } } } } \ No newline at end of file diff --git a/api/base/schemas/employment-schema.json b/api/base/schemas/employment-schema.json index f2e77fb5096..619e59b726f 100644 --- a/api/base/schemas/employment-schema.json +++ b/api/base/schemas/employment-schema.json @@ -25,18 +25,6 @@ "minimum": 1900 }, "ongoing": { - "oneOf": [{ - "enum": [false], - "required": ["startYear", "endYear"] - }, - { - "enum": [true], - "required": ["startYear"], - "not": { - "required": ["endYear"] - } - } - ], "type": "boolean" }, "department": { @@ -56,7 +44,25 @@ "startMonth": ["startYear"], "startYear": ["ongoing"], "endYear": ["ongoing"], - "endYear": ["startYear"] + "endYear": ["startYear"], + "ongoing": { + "if": { + "properties": { + "ongoing": { + "const": false + } + } + }, + "then": { + "required": ["startYear", "endYear"] + }, + "else": { + "required": ["startYear"], + "not": { + "required": ["endYear"] + } + } + } } } } \ No newline at end of file diff --git a/api/base/schemas/social-schema.json b/api/base/schemas/social-schema.json index 2e520c40a76..97b9360698d 100644 --- a/api/base/schemas/social-schema.json +++ b/api/base/schemas/social-schema.json @@ -64,6 +64,10 @@ "description": "The academiaProfileID for the given user", "type": "string" }, + "academiaInstitution": { + "description": "The academiaInstitution for the given user", + "type": "string" + }, "orcid": { "description": "The orcid for the given user", "type": "string" diff --git a/api_tests/users/views/test_user_detail.py b/api_tests/users/views/test_user_detail.py index 02a616bc4c4..cdfc5599ddd 100644 --- a/api_tests/users/views/test_user_detail.py +++ b/api_tests/users/views/test_user_detail.py @@ -935,7 +935,8 @@ def test_patch_all_social_fields(self, app, user_one, url_user_one, mock_spam_he 'impactStory': 'why not', 'orcid': 'ork-id', 'researchGate': 'Why are there so many of these', - 'researcherId': 'ok-lastone' + 'researcherId': 'ok-lastone', + 'academiaInstitution': 'Center for Open Science' } fake_fields = { @@ -1354,30 +1355,47 @@ def test_user_put_profile_date_validate_end_date(self, app, user_one, user_one_u assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'End date must be greater than or equal to the start date.' - def test_user_put_profile_date_validate_end_month_dependency(self, app, user_one, user_one_url, end_month_dependency_payload): - # No endMonth with endYear - res = app.put_json_api(user_one_url, end_month_dependency_payload, auth=user_one.auth, expect_errors=True) + def test_user_put_profile_date_validate_end_month_dependency_ongoing(self, app, user_one, user_attr, user_one_url, start_dates_no_end_dates_payload, request_key): + # End dates, but no start dates + start_dates_no_end_dates_payload['data']['attributes'][request_key][0]['ongoing'] = True + start_dates_no_end_dates_payload['data']['attributes'][request_key][0]['endMonth'] = 3 + + res = app.put_json_api(user_one_url, start_dates_no_end_dates_payload, auth=user_one.auth, expect_errors=True) + user_one.reload() assert res.status_code == 400 assert res.json['errors'][0]['detail'] == "'endYear' is a dependency of 'endMonth'" - def test_user_put_profile_date_validate_start_month_dependency(self, app, user_one, user_one_url, start_month_dependency_payload): - # No endMonth with endYear - res = app.put_json_api(user_one_url, start_month_dependency_payload, auth=user_one.auth, expect_errors=True) - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == "'startYear' is a dependency of 'startMonth'" + def test_false_ongoing_without_start_date_should_fail(self, app, request_payload, user_one_url, user_one, request_key, user_attr): + request_payload['data']['attributes'][request_key][0].pop('startYear') + res = app.put_json_api(user_one_url, request_payload, auth=user_one.auth, expect_errors=True) + user_one.reload() + assert res.json['errors'][0]['detail'] == "'startYear' is a required property" + assert not getattr(user_one, user_attr) - def test_user_put_profile_date_validate_start_date_no_end_date_not_ongoing(self, app, user_one, user_attr, user_one_url, start_dates_no_end_dates_payload, request_key): - # End date is greater then start date - res = app.put_json_api(user_one_url, start_dates_no_end_dates_payload, auth=user_one.auth, expect_errors=True) + def test_false_ongoing_without_end_date_should_fail(self, app, request_payload, user_one_url, user_one, request_key, user_attr): + request_payload['data']['attributes'][request_key][0].pop('endYear') + res = app.put_json_api(user_one_url, request_payload, auth=user_one.auth, expect_errors=True) user_one.reload() - assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == "'endYear' is a required property" + assert not getattr(user_one, user_attr) - def test_user_put_profile_date_validate_end_date_no_start_date(self, app, user_one, user_attr, user_one_url, end_dates_no_start_dates_payload, request_key): - # End dates, but no start dates - res = app.put_json_api(user_one_url, end_dates_no_start_dates_payload, auth=user_one.auth, expect_errors=True) + def test_true_ongoing_without_start_date_should_fail(self, app, request_payload, user_one_url, user_one, request_key, user_attr): + request_payload['data']['attributes'][request_key][0].pop('startYear') + res = app.put_json_api(user_one_url, request_payload, auth=user_one.auth, expect_errors=True) user_one.reload() - assert res.status_code == 400 - assert res.json['errors'][0]['detail'] == "'startYear' is a dependency of 'endYear'" + assert res.json['errors'][0]['detail'] == "'startYear' is a required property" + assert not getattr(user_one, user_attr) + + def test_true_ongoing_without_end_date_should_succeed(self, app, request_payload, user_one_url, user_one, request_key, user_attr): + request_payload['data']['attributes'][request_key][0]['ongoing'] = True + request_payload['data']['attributes'][request_key][0].pop('endYear') + # to avoid dependency error + request_payload['data']['attributes'][request_key][0].pop('endMonth') + + res = app.put_json_api(user_one_url, request_payload, auth=user_one.auth, expect_errors=True) + user_one.reload() + assert res.status_code == 200 + assert getattr(user_one, user_attr)[0]['startYear'] == request_payload['data']['attributes'][request_key][0]['startYear'] @pytest.mark.django_db From b21bf71a46d04065386e784df6345dead77faccc Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 24 Jul 2025 14:32:42 -0400 Subject: [PATCH 114/176] update registration detail --- .../registrations/views/test_registration_detail.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 39348e1f3c4..68222090042 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -10,10 +10,8 @@ from api_tests.subjects.mixins import UpdateSubjectsMixin from osf.utils import permissions from osf.utils.workflows import ApprovalStates -from osf.models import Registration, NodeLog, NodeLicense, SchemaResponse +from osf.models import Registration, NodeLog, NodeLicense, SchemaResponse, NotificationType from framework.auth import Auth -from website.project.signals import contributor_added -from api_tests.utils import disconnected_from_listeners from api.registrations.serializers import RegistrationSerializer, RegistrationDetailSerializer from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory from osf.migrations import update_provider_auth_groups @@ -32,7 +30,7 @@ from osf_tests.utils import get_default_test_schema from api_tests.nodes.views.test_node_detail_license import TestNodeUpdateLicense -from tests.utils import assert_latest_log +from tests.utils import assert_latest_log, capture_notifications from api_tests.utils import create_test_file @@ -786,9 +784,9 @@ def test_initiate_withdrawal_with_embargo_ends_embargo( assert not public_registration.is_pending_embargo def test_withdraw_request_does_not_send_email_to_unregistered_admins( - self, mock_notification_send, app, user, public_registration, public_url, public_payload): + self, app, user, public_registration, public_url, public_payload): unreg = UnregUserFactory() - with disconnected_from_listeners(contributor_added): + with capture_notifications() as notifications: public_registration.add_unregistered_contributor( unreg.fullname, unreg.email, @@ -802,7 +800,8 @@ def test_withdraw_request_does_not_send_email_to_unregistered_admins( # Only the creator gets an email; the unreg user does not get emailed assert public_registration._contributors.count() == 2 - assert mock_notification_send.call_count == 3 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT @pytest.mark.django_db From 7e96d6f7349905e9879aaec31dbe4b477efb2907 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 25 Jul 2025 09:21:54 -0400 Subject: [PATCH 115/176] fix invite and institutional admin contributor tests --- osf/models/notification_type.py | 1 + osf/utils/notifications.py | 7 +--- .../test_institutional_admin_contributors.py | 6 +-- tests/test_adding_contributor_views.py | 40 ++++++++++--------- website/reviews/listeners.py | 2 +- 5 files changed, 29 insertions(+), 27 deletions(-) diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 19fee3e10e8..1944ba8f923 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -111,6 +111,7 @@ class Type(str, Enum): # Provider notifications PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' + PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS = 'provider_new_pending_withdraw_requests' PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION = 'provider_reviews_submission_confirmation' PROVIDER_REVIEWS_MODERATOR_SUBMISSION_CONFIRMATION = 'provider_reviews_moderator_submission_confirmation' PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED = 'preprint_request_withdrawal_requested' diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 910421ab476..8e432af12a5 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -135,14 +135,11 @@ def notify_withdraw_registration(resource, action, *args, **kwargs): context['notify_comment'] = not resource.provider.reviews_comments_private and action.comment for contributor in resource.contributors.all(): - context['contributor'] = contributor + context['contributor_fullname'] = contributor.fullname context['is_requester'] = resource.retraction.initiated_by == contributor NotificationType.objects.get( name=NotificationType.Type.PREPRINT_REQUEST_WITHDRAWAL_APPROVED ).emit( user=contributor, - event_context={ - 'is_requester': contributor, - - }, + event_context=context ) diff --git a/osf_tests/test_institutional_admin_contributors.py b/osf_tests/test_institutional_admin_contributors.py index 93ba0ac1305..62d4205eeb2 100644 --- a/osf_tests/test_institutional_admin_contributors.py +++ b/osf_tests/test_institutional_admin_contributors.py @@ -142,7 +142,7 @@ def test_requested_permissions_or_default(self, app, project, institutional_admi auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - send_email='access_request', + send_email='access', make_curator=False, ) @@ -168,7 +168,7 @@ def test_permissions_override_requested_permissions(self, app, project, institut auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - send_email='access_request', + send_email='access', make_curator=False, ) @@ -194,6 +194,6 @@ def test_requested_permissions_is_used(self, app, project, institutional_admin): auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - send_email='access_request', + send_email='access', make_curator=False, ) diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 5825a0b42b5..0d67e246010 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -1,8 +1,6 @@ - from unittest.mock import ANY import time -from http.cookies import SimpleCookie from unittest import mock import pytest @@ -197,10 +195,9 @@ def test_add_contributors_post_only_sends_one_email_to_unreg_user(self, mock_sen # send request url = self.project.api_url_for('project_contributors_post') assert self.project.can_edit(user=self.creator) - self.app.post(url, json=payload, auth=self.creator.auth) - - # finalize_invitation should only have been called once - assert mock_send_claim_email.call_count == 1 + with capture_notifications() as noitification: + self.app.post(url, json=payload, auth=self.creator.auth) + assert len(noitification) == 1 def test_add_contributors_post_only_sends_one_email_to_registered_user(self): # Project has components @@ -506,22 +503,28 @@ def test_send_claim_email_to_given_email(self): auth=Auth(project.creator), ) project.save() - send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project) + with capture_notifications() as notifications: + send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_INVITE_DEFAULT - self.mock_notification_send.assert_called() def test_send_claim_email_to_referrer(self): project = ProjectFactory() referrer = project.creator given_email, real_email = fake_email(), fake_email() - unreg_user = project.add_unregistered_contributor(fullname=fake.name(), - email=given_email, auth=Auth( - referrer) - ) + unreg_user = project.add_unregistered_contributor( + fullname=fake.name(), + email=given_email, + auth=Auth(referrer) + ) project.save() - send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project) + with capture_notifications() as notifications: + send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project) - assert self.mock_notification_send.called + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION + assert notifications[1]['type'] == NotificationType.Type.USER_FORWARD_INVITE def test_send_claim_email_before_throttle_expires(self): project = ProjectFactory() @@ -533,10 +536,11 @@ def test_send_claim_email_before_throttle_expires(self): ) project.save() send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - self.mock_notification_send.reset_mock() # 2nd call raises error because throttle hasn't expired - with pytest.raises(HTTPError): - send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - assert not self.mock_notification_send.called + + with capture_notifications() as notifications: + with pytest.raises(HTTPError): + send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) + assert not notifications diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 52caa5fb3b0..a48d601e071 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -20,7 +20,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, context['profile_image_url'] = get_profile_image_url(user) context['reviews_submission_url'] = f'{DOMAIN}reviews/registries/{provider._id}/{resource._id}' - for recipient in provider_subscription.preorint.moderators.all(): + for recipient in provider_subscription.subscribed_object.get_group('moderator').user_set.all(): NotificationType.objects.get( name=NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS ).emit( From 7dc8375a91a0e46d2548b52363ba0bd8ea6d4f10 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 25 Jul 2025 12:11:14 -0400 Subject: [PATCH 116/176] clean up test_addons code --- scripts/add_global_subscriptions.py | 60 ------------ tests/test_adding_contributor_views.py | 126 ++++++++++++++----------- website/notifications/emails.py | 45 +++------ website/notifications/events/files.py | 2 +- website/project/views/contributor.py | 6 +- 5 files changed, 87 insertions(+), 152 deletions(-) delete mode 100644 scripts/add_global_subscriptions.py diff --git a/scripts/add_global_subscriptions.py b/scripts/add_global_subscriptions.py deleted file mode 100644 index 52746875d79..00000000000 --- a/scripts/add_global_subscriptions.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -This migration subscribes each user to USER_SUBSCRIPTIONS_AVAILABLE if a subscription -does not already exist. -""" - -import logging -import sys - -from osf.models.notifications import NotificationSubscriptionLegacy -from website.app import setup_django -setup_django() - -from django.apps import apps -from django.db import transaction -from website.app import init_app -from website.notifications import constants -from website.notifications.utils import to_subscription_key - -from scripts import utils as scripts_utils - -logger = logging.getLogger(__name__) - -def add_global_subscriptions(dry=True): - OSFUser = apps.get_model('osf.OSFUser') - notification_type = 'email_transactional' - user_events = constants.USER_SUBSCRIPTIONS_AVAILABLE - - count = 0 - - with transaction.atomic(): - for user in OSFUser.objects.filter(is_registered=True, date_confirmed__isnull=False): - changed = False - if not user.is_active: - continue - for user_event in user_events: - user_event_id = to_subscription_key(user._id, user_event) - - subscription = NotificationSubscriptionLegacy.load(user_event_id) - if not subscription: - logger.info(f'No {user_event} subscription found for user {user._id}. Subscribing...') - subscription = NotificationSubscriptionLegacy(_id=user_event_id, owner=user, event_name=user_event) - subscription.save() # Need to save in order to access m2m fields - subscription.add_user_to_subscription(user, notification_type) - subscription.save() - changed = True - else: - logger.info(f'User {user._id} already has a {user_event} subscription') - if changed: - count += 1 - - logger.info(f'Added subscriptions for {count} users') - if dry: - raise RuntimeError('Dry mode -- rolling back transaction') - -if __name__ == '__main__': - dry = '--dry' in sys.argv - init_app(routes=False) - if not dry: - scripts_utils.add_file_logger(logger, __file__) - add_global_subscriptions(dry=dry) diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 0d67e246010..30e38b3425a 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -1,5 +1,3 @@ -from unittest.mock import ANY - import time from unittest import mock @@ -30,13 +28,11 @@ ) from tests.utils import capture_notifications from website.profile.utils import add_contributor_json, serialize_unregistered -from website.project.signals import contributor_added from website.project.views.contributor import ( deserialize_contributors, notify_added_contributor, send_claim_email, ) -from conftest import start_mock_notification_send @pytest.mark.enable_implicit_clean class TestAddingContributorViews(OsfTestCase): @@ -46,10 +42,6 @@ def setUp(self): self.creator = AuthUserFactory() self.project = ProjectFactory(creator=self.creator) self.auth = Auth(self.project.creator) - # Authenticate all requests - contributor_added.connect(notify_added_contributor) - - self.mock_notification_send = start_mock_notification_send(self) def test_serialize_unregistered_without_record(self): name, email = fake.name(), fake_email() @@ -197,7 +189,10 @@ def test_add_contributors_post_only_sends_one_email_to_unreg_user(self, mock_sen assert self.project.can_edit(user=self.creator) with capture_notifications() as noitification: self.app.post(url, json=payload, auth=self.creator.auth) - assert len(noitification) == 1 + assert len(noitification) == 3 + assert noitification[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert noitification[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert noitification[2]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributors_post_only_sends_one_email_to_registered_user(self): # Project has components @@ -251,10 +246,14 @@ def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_nod # send request url = self.project.api_url_for('project_contributors_post') assert self.project.can_edit(user=self.creator) - self.app.post(url, json=payload, auth=self.creator.auth) + with capture_notifications() as notifications: + self.app.post(url, json=payload, auth=self.creator.auth) # send_mail is called for both the project and the sub-component - assert self.mock_notification_send.call_count == 2 + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + @mock.patch('website.project.views.contributor.send_claim_email') def test_email_sent_when_unreg_user_is_added(self, send_mail): @@ -272,8 +271,9 @@ def test_email_sent_when_unreg_user_is_added(self, send_mail): 'node_ids': [] } url = self.project.api_url_for('project_contributors_post') - self.app.post(url, json=payload, follow_redirects=True, auth=self.creator.auth) - send_mail.assert_called_with(email, ANY,ANY,notify=True, email_template='default') + with capture_notifications() as notifications: + self.app.post(url, json=payload, follow_redirects=True, auth=self.creator.auth) + assert len(notifications) == 1 def test_email_sent_when_reg_user_is_added(self): contributor = UserFactory() @@ -283,52 +283,61 @@ def test_email_sent_when_reg_user_is_added(self): 'permissions': permissions.WRITE }] project = ProjectFactory(creator=self.auth.user) - project.add_contributors(contributors, auth=self.auth) - project.save() - assert self.mock_notification_send.called + with capture_notifications() as notifications: + project.add_contributors(contributors, auth=self.auth) + project.save() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT contributor.refresh_from_db() assert contributor.contributor_added_email_records[project._id]['last_sent'] == approx(int(time.time()), rel=1) def test_contributor_added_email_sent_to_unreg_user(self): unreg_user = UnregUserFactory() project = ProjectFactory() - project.add_unregistered_contributor(fullname=unreg_user.fullname, email=unreg_user.email, auth=Auth(project.creator)) - project.save() - assert self.mock_notification_send.called + with capture_notifications() as notifications: + project.add_unregistered_contributor(fullname=unreg_user.fullname, email=unreg_user.email, auth=Auth(project.creator)) + project.save() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_forking_project_does_not_send_contributor_added_email(self): project = ProjectFactory() - project.fork_node(auth=Auth(project.creator)) - assert not self.mock_notification_send.called + with capture_notifications() as notifications: + project.fork_node(auth=Auth(project.creator)) + assert not notifications def test_templating_project_does_not_send_contributor_added_email(self): project = ProjectFactory() - project.use_as_template(auth=Auth(project.creator)) - assert not self.mock_notification_send.called + with capture_notifications() as notifications: + project.use_as_template(auth=Auth(project.creator)) + assert not notifications @mock.patch('website.archiver.tasks.archive') def test_registering_project_does_not_send_contributor_added_email(self, mock_archive): project = ProjectFactory() provider = RegistrationProviderFactory() - project.register_node( - get_default_metaschema(), - Auth(user=project.creator), - DraftRegistrationFactory(branched_from=project), - None, - provider=provider - ) - assert not self.mock_notification_send.called + with capture_notifications() as notifications: + project.register_node( + get_default_metaschema(), + Auth(user=project.creator), + DraftRegistrationFactory(branched_from=project), + None, + provider=provider + ) + assert not notifications def test_notify_contributor_email_does_not_send_before_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) - notify_added_contributor(project, contributor, auth) - assert self.mock_notification_send.called + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, 'default', auth) + assert len(notifications) == 1 # 2nd call does not send email because throttle period has not expired - notify_added_contributor(project, contributor, auth) - assert self.mock_notification_send.call_count == 1 + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, 'default', auth) + assert not notifications def test_notify_contributor_email_sends_after_throttle_expires(self): throttle = 0.5 @@ -336,38 +345,45 @@ def test_notify_contributor_email_sends_after_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) - notify_added_contributor(project, contributor, auth, throttle=throttle) - assert self.mock_notification_send.called + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, 'default', auth, throttle=throttle) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT time.sleep(1) # throttle period expires - notify_added_contributor(project, contributor, auth, throttle=throttle) - assert self.mock_notification_send.call_count == 2 + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, 'default', auth, throttle=throttle) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributor_to_fork_sends_email(self): contributor = UserFactory() - fork = self.project.fork_node(auth=Auth(self.creator)) - fork.add_contributor(contributor, auth=Auth(self.creator)) - fork.save() - assert self.mock_notification_send.called - assert self.mock_notification_send.call_count == 1 + with capture_notifications() as notifications: + fork = self.project.fork_node(auth=Auth(self.creator)) + fork.add_contributor(contributor, auth=Auth(self.creator)) + fork.save() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributor_to_template_sends_email(self): contributor = UserFactory() - template = self.project.use_as_template(auth=Auth(self.creator)) - template.add_contributor(contributor, auth=Auth(self.creator)) - template.save() - assert self.mock_notification_send.called - assert self.mock_notification_send.call_count == 1 + with capture_notifications() as notifications: + template = self.project.use_as_template(auth=Auth(self.creator)) + template.add_contributor(contributor, auth=Auth(self.creator)) + template.save() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_creating_fork_does_not_email_creator(self): - contributor = UserFactory() - fork = self.project.fork_node(auth=Auth(self.creator)) - assert not self.mock_notification_send.called + with capture_notifications() as notifications: + self.project.fork_node(auth=Auth(self.creator)) + assert not notifications def test_creating_template_does_not_email_creator(self): - contributor = UserFactory() - template = self.project.use_as_template(auth=Auth(self.creator)) - assert not self.mock_notification_send.called + with capture_notifications() as notifications: + self.project.use_as_template(auth=Auth(self.creator)) + assert not notifications def test_add_multiple_contributors_only_adds_one_log(self): n_logs_pre = self.project.logs.count() diff --git a/website/notifications/emails.py b/website/notifications/emails.py index da2024e8e31..9c34867ad3a 100644 --- a/website/notifications/emails.py +++ b/website/notifications/emails.py @@ -1,8 +1,9 @@ from django.apps import apps from babel import dates, core, Locale +from django.contrib.contenttypes.models import ContentType -from osf.models import AbstractNode, NotificationSubscription +from osf.models import AbstractNode, NotificationSubscription, NotificationType from osf.models.notifications import NotificationDigest from osf.utils.permissions import ADMIN, READ from website import mails @@ -22,37 +23,14 @@ def notify(event, user, node, timestamp, **context): target_user: used with comment_replies :return: List of user ids notifications were sent to """ - sent_users = [] - # The user who the current comment is a reply to - target_user = context.get('target_user', None) - exclude = context.get('exclude', []) - # do not notify user who initiated the emails - exclude.append(user._id) - - event_type = utils.find_subscription_type(event) - if target_user and event_type in constants.USER_SUBSCRIPTIONS_AVAILABLE: - # global user - subscriptions = get_user_subscriptions(target_user, event_type) - else: - # local project user - subscriptions = compile_subscriptions(node, event_type, event) - - for notification_type in subscriptions: - if notification_type == 'none' or not subscriptions[notification_type]: - continue - # Remove excluded ids from each notification type - subscriptions[notification_type] = [guid for guid in subscriptions[notification_type] if guid not in exclude] - - # If target, they get a reply email and are removed from the general email - if target_user and target_user._id in subscriptions[notification_type]: - subscriptions[notification_type].remove(target_user._id) - store_emails([target_user._id], notification_type, 'comment_replies', user, node, timestamp, **context) - sent_users.append(target_user._id) - - if subscriptions[notification_type]: - store_emails(subscriptions[notification_type], notification_type, event_type, user, node, timestamp, **context) - sent_users.extend(subscriptions[notification_type]) - return sent_users + if event.endswith('_file_updated'): + NotificationType.objects.get( + name=NotificationType.Type.NODE_FILE_ADDED + ).emit( + user=user, + subscribed_object=node, + event_context=context + ) def notify_mentions(event, user, node, timestamp, **context): OSFUser = apps.get_model('osf', 'OSFUser') @@ -161,7 +139,8 @@ def check_node(node, event): node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES} if node: subscription = NotificationSubscription.objects.filter( - node=node, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node), notification_type__name=event ) for notification_type in node_subscriptions: diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index db8a9c91fdc..6a7c7cab3d9 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -68,7 +68,7 @@ def text_message(self): @property def event_type(self): """Most basic event type.""" - return 'file_updated' + return 'node_file_updated' @property def waterbutler_id(self): diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index ea4ec0f67be..0800afaf8ca 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -588,7 +588,7 @@ def check_email_throttle(node, contributor, throttle=None): try: notification_type = NotificationType.objects.get( - name=NotificationType.Type.NODE_COMMENT.value # or whatever event type you're using for 'contributor added' + name=NotificationType.Type.NODE_COMMENT.value ) except NotificationType.DoesNotExist: return False # Fail-safe: if the notification type isn't set up, don't throttle @@ -600,7 +600,7 @@ def check_email_throttle(node, contributor, throttle=None): user=contributor, notification_type=notification_type, content_type=ContentType.objects.get_for_model(node), - object_id=str(node.id) + object_id=node.id ).first() if not subscription: @@ -619,7 +619,7 @@ def check_email_throttle(node, contributor, throttle=None): return False # No previous sent notification, not throttled @contributor_added.connect -def notify_added_contributor(node, contributor, auth=None, email_template=None, *args, **kwargs): +def notify_added_contributor(node, contributor, email_template, auth=None, *args, **kwargs): """Send a notification to a contributor who was just added to a node. Handles: From fb6086bc22dbd21825ec20531143c2db74259d14 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 25 Jul 2025 12:56:45 -0400 Subject: [PATCH 117/176] fix up auth_views tests --- api/users/views.py | 2 +- framework/auth/views.py | 15 ++++++++------- notifications.yaml | 20 ++++++++++++-------- osf/models/notification_type.py | 2 +- tests/test_auth_views.py | 17 +++++++++++------ tests/test_events.py | 17 +++++++++++------ website/notifications/utils.py | 9 +++++---- 7 files changed, 49 insertions(+), 33 deletions(-) diff --git a/api/users/views.py b/api/users/views.py index 590216ade10..3c7f16e17fb 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -786,7 +786,7 @@ def post(self, request, *args, **kwargs): # Don't go anywhere return JsonResponse( { - 'external_id_provider': external_id_provider.name, + 'external_id_provider': external_id_provider, 'auth_user_fullname': fullname, }, status=status.HTTP_200_OK, diff --git a/framework/auth/views.py b/framework/auth/views.py index 7e4cd6ad234..81b362532e9 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -841,23 +841,24 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte if external_id_provider and external_id: # First time login through external identity provider, link or create an OSF account confirmation if user.external_identity[external_id_provider][external_id] == 'CREATE': - notificaton_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE + notification_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE elif user.external_identity[external_id_provider][external_id] == 'LINK': - notificaton_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK + notification_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK elif merge_target: # Merge account confirmation - notificaton_type = NotificationType.Type.USER_CONFIRM_MERGE + notification_type = NotificationType.Type.USER_CONFIRM_MERGE elif user.is_active: # Add email confirmation - notificaton_type = NotificationType.Type.USER_CONFIRM_EMAIL + notification_type = NotificationType.Type.USER_CONFIRM_EMAIL elif campaign: # Account creation confirmation: from campaign - notificaton_type = campaigns.email_template_for_campaign(campaign) + notification_type = campaigns.email_template_for_campaign(campaign) else: # Account creation confirmation: from OSF - notificaton_type = NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL + notification_type = NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL - NotificationType.objects.get(name=notificaton_type).emit( + print(notification_type) + NotificationType.objects.get(name=notification_type).emit( user=user, event_context={ 'user_fullname': user.fullname, diff --git a/notifications.yaml b/notifications.yaml index c5a3d7a6cb5..8ff1a5683d8 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -1,15 +1,11 @@ # This file contains the configuration for our notification system using the NotificationType object, this is intended to -# exist as a simple declarative list of NotificationTypes and their attributes. +# exist as a simple declarative list of NotificationTypes and their attributes to populate the type data. -# Workflow: -# 1. Add a new notification template -# 2. Add a entry here with the desired notification types -# 3. Add name tp Enum osf.notification.NotificationType.Type -# 4. Use the emit method to send or subscribe the notification for immediate deliver or periodic digest. notification_types: - #### GLOBAL (User Notifications) + #### User Notifications - name: user_pending_verification_registered - __docs__: ... + __docs__: This email is sent when a user requests access to a node and has confirm their identity, + `referrer` is sent an email to forward the confirmation link. object_content_type_model_name: osfuser template: 'website/templates/emails/pending_registered.html.mako' - name: user_pending_verification @@ -132,6 +128,14 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/registration_bulk_upload_failure_duplicates.html.mako' + - name: user_external_login_email_confirm_link + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/external_confirm_link.html.mako' + - name: user_external_login_confirm_email_create + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/external_confirm_create.html.mako' #### PROVIDER - name: provider_new_pending_submissions diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 1944ba8f923..7c651c511b5 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -66,7 +66,7 @@ class Type(str, Enum): USER_PASSWORD_RESET = 'user_password_reset' USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = 'user_contributor_added_draft_registration' USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = 'user_external_login_confirm_email_create' - USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_confirm_email_link' + USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_email_confirm_link' USER_CONFIRM_MERGE = 'user_confirm_merge' USER_CONFIRM_EMAIL = 'user_confirm_email' USER_INITIAL_CONFIRM_EMAIL = 'user_initial_confirm_email' diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index 31445da2c8d..4d385b68dd6 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -12,7 +12,7 @@ from django.utils import timezone from flask import request from rest_framework import status as http_status -from tests.utils import run_celery_tasks +from tests.utils import run_celery_tasks, capture_notifications from framework import auth from framework.auth import Auth, cas @@ -25,7 +25,7 @@ ) from framework.auth.exceptions import InvalidTokenError from framework.auth.views import login_and_register_handler -from osf.models import OSFUser, NotableDomain +from osf.models import OSFUser, NotableDomain, NotificationType from osf_tests.factories import ( fake_email, AuthUserFactory, @@ -320,8 +320,11 @@ def test_resend_confirmation(self): self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} - self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert self.mock_send_grid.called + with capture_notifications() as notifications: + self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) + + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL self.user.reload() assert token != self.user.get_confirmation_token(email) @@ -497,8 +500,10 @@ def test_resend_confirmation_does_not_send_before_throttle_expires(self): self.user.save() url = api_url_for('resend_confirmation') header = {'address': email, 'primary': False, 'confirmed': False} - self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) - assert self.mock_send_grid.called + with capture_notifications() as notifications: + self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL # 2nd call does not send email because throttle period has not expired res = self.app.put(url, json={'id': self.user._id, 'email': header}, auth=self.user.auth) assert res.status_code == 400 diff --git a/tests/test_events.py b/tests/test_events.py index e06559ebbb4..ca8793da6da 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -1,7 +1,11 @@ from collections import OrderedDict from unittest import mock + +from django.contrib.contenttypes.models import ContentType from pytest import raises + +from osf.models import NotificationType from website.notifications.events.base import Event, register, event_registry from website.notifications.events.files import ( FileAdded, FileRemoved, FolderCreated, FileUpdated, @@ -184,11 +188,12 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionLegacyFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' + self.project_subscription = factories.NotificationSubscription( + user=self.user, + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_ADDED), ) + self.project_subscription.object_id = self.project.id + self.project_subscription.content_type = ContentType.objects.get_for_model(self.project) self.project_subscription.save() self.user2 = factories.UserFactory() self.event = event_registry['file_removed']( @@ -196,12 +201,12 @@ def setUp(self): ) def test_info_formed_correct_file(self): - assert 'file_updated' == self.event.event_type + assert NotificationType.Type.NODE_FILE_UPDATED == self.event.event_type assert f'removed file "{materialized.lstrip("/")}".' == self.event.html_message assert f'removed file "{materialized.lstrip("/")}".' == self.event.text_message def test_info_formed_correct_folder(self): - assert 'file_updated' == self.event.event_type + assert NotificationType.Type.NODE_FILE_UPDATED == self.event.event_type self.event.payload['metadata']['materialized'] += '/' assert f'removed folder "{materialized.lstrip("/")}/".' == self.event.html_message assert f'removed folder "{materialized.lstrip("/")}/".' == self.event.text_message diff --git a/website/notifications/utils.py b/website/notifications/utils.py index e64d76c258f..38707ac24a6 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -149,14 +149,15 @@ def users_to_remove(source_event, source_node, new_node): removed_users = {key: [] for key in constants.NOTIFICATION_TYPES} if source_node == new_node: return removed_users - old_sub = NotificationSubscription.objects.get( - subscribed_object=source_node, + sub = NotificationSubscription.objects.get( + object_id=source_node.id, + content_type=ContentType.objects.get_for_model(source_node), notification_type__name=source_event ) for notification_type in constants.NOTIFICATION_TYPES: users = [] - if hasattr(old_sub, notification_type): - users += list(getattr(old_sub, notification_type).values_list('guids___id', flat=True)) + if hasattr(sub, notification_type): + users += list(getattr(sub, notification_type).values_list('guids___id', flat=True)) return removed_users From 53b89ac2851f0b53e49f0932bb8ee34ec29d436a Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 25 Jul 2025 13:32:06 -0400 Subject: [PATCH 118/176] fix up contributor and desk notifications --- api/users/views.py | 13 ++++++++----- notifications.yaml | 8 ++++++++ tests/test_user_profile_view.py | 17 ++++++++++------- 3 files changed, 26 insertions(+), 12 deletions(-) diff --git a/api/users/views.py b/api/users/views.py index 3c7f16e17fb..df2d2a215e6 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -103,7 +103,7 @@ ) from osf.utils.tokens import TokenHandler from osf.utils.tokens.handlers import sanction_handler -from website import mails, settings, language +from website import settings, language from website.project.views.contributor import send_claim_email, send_claim_registered_email from website.util.metrics import CampaignClaimedTags, CampaignSourceTags from framework.auth import exceptions @@ -639,11 +639,14 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) user = self.get_user() - mails.send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.REQUEST_EXPORT, + NotificationType.objects.get( + name=NotificationType.Type.DESK_REQUEST_EXPORT, + ).emit( user=user, - can_change_preferences=False, + destination_address=settings.OSF_SUPPORT_EMAIL, + event_context={ + 'can_change_preferences': False, + }, ) user.email_last_sent = timezone.now() user.save() diff --git a/notifications.yaml b/notifications.yaml index 8ff1a5683d8..d1cade2fb9b 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -136,6 +136,10 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_create.html.mako' + - name: user_primary_email_changed + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/primary_email_changed.html.mako' #### PROVIDER - name: provider_new_pending_submissions @@ -338,3 +342,7 @@ notification_types: __docs__: ... object_content_type_model_name: desk template: 'website/templates/emails/support_request.html.mako' + - name: desk_request_export + __docs__: ... + object_content_type_model_name: desk + template: 'website/templates/emails/support_request.html.mako' diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index bb801340423..20095abfba1 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -10,7 +10,7 @@ from framework.celery_tasks import handlers from osf.external.spam import tasks as spam_tasks from osf.models import ( - NotableDomain + NotableDomain, NotificationType ) from osf_tests.factories import ( fake_email, @@ -23,6 +23,7 @@ fake, OsfTestCase, ) +from tests.utils import capture_notifications from website import mailchimp_utils from website.settings import MAILCHIMP_GENERAL_LIST from website.util import api_url_for, web_url_for @@ -720,15 +721,17 @@ def test_password_change_invalid_empty_string_confirm_password(self): def test_password_change_invalid_blank_confirm_password(self): self.test_password_change_invalid_blank_password('password', 'new password', ' ') - @mock.patch('website.mails.settings.USE_EMAIL', True) - @mock.patch('website.mails.settings.USE_CELERY', False) def test_user_cannot_request_account_export_before_throttle_expires(self): url = api_url_for('request_export') - self.app.post(url, auth=self.user.auth) - assert self.mock_send_grid.called - res = self.app.post(url, auth=self.user.auth) + with capture_notifications() as notifications: + self.app.post(url, auth=self.user.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.DESK_REQUEST_EXPORT + + with capture_notifications() as notifications: + res = self.app.post(url, auth=self.user.auth) assert res.status_code == 400 - assert self.mock_send_grid.call_count == 1 + assert len(notifications) == 0 def test_get_unconfirmed_emails_exclude_external_identity(self): external_identity = { From 46b69441b6031f4359072c20f301554864949a3d Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 25 Jul 2025 13:39:00 -0400 Subject: [PATCH 119/176] deletes detect_duplicate notifications as that only applied to legacy notifications --- admin/nodes/views.py | 5 +---- admin/notifications/views.py | 30 ++----------------------- admin_tests/notifications/test_views.py | 24 +++++--------------- 3 files changed, 9 insertions(+), 50 deletions(-) diff --git a/admin/nodes/views.py b/admin/nodes/views.py index 40cf261945d..971b4a8cd6d 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -22,7 +22,7 @@ from admin.base.utils import change_embargo_date from admin.base.views import GuidView from admin.base.forms import GuidForm -from admin.notifications.views import detect_duplicate_notifications, delete_selected_notifications +from admin.notifications.views import delete_selected_notifications from api.share.utils import update_share from api.caching.tasks import update_storage_usage_cache @@ -101,13 +101,10 @@ def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) node = self.get_object() - detailed_duplicates = detect_duplicate_notifications(node_id=node.id) - context.update({ 'SPAM_STATUS': SpamStatus, 'STORAGE_LIMITS': settings.StorageLimits, 'node': node, - 'duplicates': detailed_duplicates }) return context diff --git a/admin/notifications/views.py b/admin/notifications/views.py index 3546878e9af..6719ac90a8a 100644 --- a/admin/notifications/views.py +++ b/admin/notifications/views.py @@ -1,30 +1,4 @@ -from osf.models.notifications import NotificationSubscriptionLegacy -from django.db.models import Count +from osf.models.notification_subscription import NotificationSubscription def delete_selected_notifications(selected_ids): - NotificationSubscriptionLegacy.objects.filter(id__in=selected_ids).delete() - -def detect_duplicate_notifications(node_id=None): - query = NotificationSubscriptionLegacy.objects.values('_id').annotate(count=Count('_id')).filter(count__gt=1) - if node_id: - query = query.filter(node_id=node_id) - - detailed_duplicates = [] - for dup in query: - notifications = NotificationSubscriptionLegacy.objects.filter( - _id=dup['_id'] - ).order_by('created') - - for notification in notifications: - detailed_duplicates.append({ - 'id': notification.id, - '_id': notification._id, - 'event_name': notification.event_name, - 'created': notification.created, - 'count': dup['count'], - 'email_transactional': [u._id for u in notification.email_transactional.all()], - 'email_digest': [u._id for u in notification.email_digest.all()], - 'none': [u._id for u in notification.none.all()] - }) - - return detailed_duplicates + NotificationSubscription.objects.filter(id__in=selected_ids).delete() diff --git a/admin_tests/notifications/test_views.py b/admin_tests/notifications/test_views.py index 42d182a77e5..e2003b1cbf8 100644 --- a/admin_tests/notifications/test_views.py +++ b/admin_tests/notifications/test_views.py @@ -3,9 +3,8 @@ from osf.models import OSFUser, Node from admin.notifications.views import ( delete_selected_notifications, - detect_duplicate_notifications, ) -from osf.models.notifications import NotificationSubscriptionLegacy +from osf.models.notification_subscription import NotificationSubscription from tests.base import AdminTestCase pytestmark = pytest.mark.django_db @@ -19,22 +18,11 @@ def setUp(self): self.request_factory = RequestFactory() def test_delete_selected_notifications(self): - notification1 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') - notification2 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event2') - notification3 = NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event3') + notification1 = NotificationSubscription.objects.create(user=self.user) + notification2 = NotificationSubscription.objects.create(user=self.user) + notification3 = NotificationSubscription.objects.create(user=self.user) delete_selected_notifications([notification1.id, notification2.id]) - assert not NotificationSubscriptionLegacy.objects.filter(id__in=[notification1.id, notification2.id]).exists() - assert NotificationSubscriptionLegacy.objects.filter(id=notification3.id).exists() - - def test_detect_duplicate_notifications(self): - NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') - NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event1') - NotificationSubscriptionLegacy.objects.create(user=self.user, node=self.node, event_name='event2') - - duplicates = detect_duplicate_notifications() - - print(f"Detected duplicates: {duplicates}") - - assert len(duplicates) == 3, f"Expected 3 duplicates, but found {len(duplicates)}" + assert not NotificationSubscription.objects.filter(id__in=[notification1.id, notification2.id]).exists() + assert NotificationSubscription.objects.filter(id=notification3.id).exists() From dcdbdc79568cc49c7a128f8214dc3eb1408d62b5 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 25 Jul 2025 15:25:47 -0400 Subject: [PATCH 120/176] fix spam ban notification type and split up webtests into smaller files --- notifications.yaml | 4 + osf/models/mixins.py | 21 +- osf/models/preprint.py | 2 +- osf/models/schema_response.py | 2 +- tests/test_events.py | 11 +- tests/test_forgot_password.py | 237 +++++++++++ tests/test_preprints.py | 20 +- tests/test_resend_confirmation.py | 83 ++++ tests/test_user_claiming.py | 267 +++++++++++++ tests/test_webtests.py | 642 +----------------------------- 10 files changed, 623 insertions(+), 666 deletions(-) create mode 100644 tests/test_forgot_password.py create mode 100644 tests/test_resend_confirmation.py create mode 100644 tests/test_user_claiming.py diff --git a/notifications.yaml b/notifications.yaml index d1cade2fb9b..5be39abc492 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -140,6 +140,10 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/primary_email_changed.html.mako' + - name: user_spam_banned + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/spam_user_banned.html.mako' #### PROVIDER - name: provider_new_pending_submissions diff --git a/osf/models/mixins.py b/osf/models/mixins.py index e22fd7b97f5..3cbb2283aab 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -26,6 +26,7 @@ InvalidTagError, BlockedEmailError, ) +from osf.models.notification_type import NotificationType from .node_relation import NodeRelation from .nodelog import NodeLog from .subject import Subject @@ -54,7 +55,7 @@ from osf.utils.requests import get_request_and_user_id from website.project import signals as project_signals -from website import settings, mails, language +from website import settings, language from website.project.licenses import set_license logger = logging.getLogger(__name__) @@ -306,7 +307,7 @@ def add_affiliated_institution(self, inst, user, log=True, ignore_user_affiliati if not self.is_affiliated_with_institution(inst): self.affiliated_institutions.add(inst) self.update_search() - from . import NotificationType + from osf.models.notification_type import NotificationType if notify and getattr(self, 'type', False) == 'osf.node': for user, _ in self.get_admin_contributors_recursive(unique_users=True): @@ -348,7 +349,7 @@ def remove_affiliated_institution(self, inst, user, save=False, log=True, notify if save: self.save() self.update_search() - from . import NotificationType + from osf.models.notification_type import NotificationType if notify and getattr(self, 'type', False) == 'osf.node': for user, _ in self.get_admin_contributors_recursive(unique_users=True): @@ -2272,12 +2273,14 @@ def suspend_spam_user(self, user): user.flag_spam() if not user.is_disabled: user.deactivate_account() - mails.send_mail( - to_addr=user.username, - mail=mails.SPAM_USER_BANNED, - user=user, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, + NotificationType.objects.get( + name=NotificationType.Type.USER_SPAM_BANNED + ).emit( + user, + event_context={ + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + 'can_change_preferences': False + } ) user.save() diff --git a/osf/models/preprint.py b/osf/models/preprint.py index 17e792e15aa..b6c864bcf83 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -20,7 +20,7 @@ from framework.auth import Auth from framework.exceptions import PermissionsError, UnpublishedPendingPreprintVersionExists from framework.auth import oauth_scopes -from . import NotificationType +from osf.models.notification_type import NotificationType from .subject import Subject from .tag import Tag diff --git a/osf/models/schema_response.py b/osf/models/schema_response.py index 84d0a8f46de..3c4f65155fb 100644 --- a/osf/models/schema_response.py +++ b/osf/models/schema_response.py @@ -9,7 +9,7 @@ from framework.exceptions import PermissionsError from osf.exceptions import PreviousSchemaResponseError, SchemaResponseStateError, SchemaResponseUpdateError -from . import NotificationType +from osf.models.notification_type import NotificationType from .base import BaseModel, ObjectIDMixin from .metaschema import RegistrationSchemaBlock from .schema_response_block import SchemaResponseBlock diff --git a/tests/test_events.py b/tests/test_events.py index ca8793da6da..e98119e61b9 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -188,7 +188,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscription( + self.project_subscription = factories.NotificationSubscriptionFactory( user=self.user, notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_ADDED), ) @@ -224,10 +224,9 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionLegacyFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' + self.project_subscription = factories.NotificationSubscriptionFactory( + user=self.user, + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_ADDED), ) self.project_subscription.save() self.user2 = factories.UserFactory() @@ -236,7 +235,7 @@ def setUp(self): ) def test_info_formed_correct(self): - assert 'file_updated' == self.event.event_type + assert NotificationType.Type.NODE_FILE_UPDATED == self.event.event_type assert 'created folder "Three/".' == self.event.html_message assert 'created folder "Three/".' == self.event.text_message diff --git a/tests/test_forgot_password.py b/tests/test_forgot_password.py new file mode 100644 index 00000000000..9ca6df4fdab --- /dev/null +++ b/tests/test_forgot_password.py @@ -0,0 +1,237 @@ +from urllib.parse import quote_plus + +from osf.models import NotificationType +from tests.base import OsfTestCase +from osf_tests.factories import ( + AuthUserFactory, + UserFactory, +) +from tests.utils import capture_notifications +from website.util import web_url_for +from tests.test_webtests import assert_in_html, assert_not_in_html + +class TestForgotPassword(OsfTestCase): + + def setUp(self): + super().setUp() + self.user = UserFactory() + self.auth_user = AuthUserFactory() + self.get_url = web_url_for('forgot_password_get') + self.post_url = web_url_for('forgot_password_post') + self.user.verification_key_v2 = {} + self.user.save() + + + # log users out before they land on forgot password page + def test_forgot_password_logs_out_user(self): + # visit forgot password link while another user is logged in + res = self.app.get(self.get_url, auth=self.auth_user.auth) + # check redirection to CAS logout + assert res.status_code == 302 + location = res.headers.get('Location') + assert 'reauth' not in location + assert 'logout?service=' in location + assert 'forgotpassword' in location + + # test that forgot password page is loaded correctly + def test_get_forgot_password(self): + res = self.app.get(self.get_url) + assert res.status_code == 200 + assert 'Forgot Password' in res.text + assert res.get_form('forgotPasswordForm') + + # test that existing user can receive reset password email + def test_can_receive_reset_password_email(self): + # load forgot password page and submit email + res = self.app.get(self.get_url) + form = res.get_form('forgotPasswordForm') + form['forgot_password-email'] = self.user.username + with capture_notifications() as notifications: + res = form.submit(self.app) + # check mail was sent + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + # check http 200 response + assert res.status_code == 200 + # check request URL is /forgotpassword + assert res.request.path == self.post_url + # check push notification + assert_in_html('If there is an OSF account', res.text) + assert_not_in_html('Please wait', res.text) + + # check verification_key_v2 is set + self.user.reload() + assert self.user.verification_key_v2 != {} + + # test that non-existing user cannot receive reset password email + def test_cannot_receive_reset_password_email(self): + # load forgot password page and submit email + res = self.app.get(self.get_url) + form = res.get_form('forgotPasswordForm') + form['forgot_password-email'] = 'fake' + self.user.username + with capture_notifications() as noifications: + res = form.submit(self.app) + + # check mail was not sent + assert not noifications + # check http 200 response + assert res.status_code == 200 + # check request URL is /forgotpassword + assert res.request.path == self.post_url + # check push notification + assert_in_html('If there is an OSF account', res.text) + assert_not_in_html('Please wait', res.text) + + # check verification_key_v2 is not set + self.user.reload() + assert self.user.verification_key_v2 == {} + + # test that non-existing user cannot receive reset password email + def test_not_active_user_no_reset_password_email(self): + self.user.deactivate_account() + self.user.save() + + # load forgot password page and submit email + res = self.app.get(self.get_url) + form = res.get_form('forgotPasswordForm') + form['forgot_password-email'] = self.user.username + with capture_notifications() as notification: + res = form.submit(self.app) + + # check mail was not sent + assert not notification + # check http 200 response + assert res.status_code == 200 + # check request URL is /forgotpassword + assert res.request.path == self.post_url + # check push notification + assert_in_html('If there is an OSF account', res.text) + assert_not_in_html('Please wait', res.text) + + # check verification_key_v2 is not set + self.user.reload() + assert self.user.verification_key_v2 == {} + + # test that user cannot submit forgot password request too quickly + def test_cannot_reset_password_twice_quickly(self): + # load forgot password page and submit email + res = self.app.get(self.get_url) + form = res.get_form('forgotPasswordForm') + form['forgot_password-email'] = self.user.username + res = form.submit(self.app) + res = form.submit(self.app) + + # check http 200 response + assert res.status_code == 200 + # check push notification + assert_in_html('Please wait', res.text) + assert_not_in_html('If there is an OSF account', res.text) + + +class TestForgotPasswordInstitution(OsfTestCase): + + def setUp(self): + super().setUp() + self.user = UserFactory() + self.auth_user = AuthUserFactory() + self.get_url = web_url_for('redirect_unsupported_institution') + self.post_url = web_url_for('forgot_password_institution_post') + self.user.verification_key_v2 = {} + self.user.save() + + + # log users out before they land on institutional forgot password page + def test_forgot_password_logs_out_user(self): + # TODO: check in qa url encoding + # visit forgot password link while another user is logged in + res = self.app.get(self.get_url, auth=self.auth_user.auth) + # check redirection to CAS logout + assert res.status_code == 302 + location = res.headers.get('Location') + assert quote_plus('campaign=unsupportedinstitution') in location + assert 'logout?service=' in location + + # test that institutional forgot password page redirects to CAS unsupported + # institution page + def test_get_forgot_password(self): + res = self.app.get(self.get_url) + assert res.status_code == 302 + location = res.headers.get('Location') + assert 'campaign=unsupportedinstitution' in location + + # test that user from disabled institution can receive reset password email + def test_can_receive_reset_password_email(self): + # submit email to institutional forgot-password page + + with capture_notifications() as notifications: + res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) + + # check mail was sent + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + # check http 200 response + assert res.status_code == 200 + # check request URL is /forgotpassword + assert res.request.path == self.post_url + # check push notification + assert_in_html('If there is an OSF account', res.text) + assert_not_in_html('Please wait', res.text) + + # check verification_key_v2 is set + self.user.reload() + assert self.user.verification_key_v2 != {} + + # test that non-existing user cannot receive reset password email + def test_cannot_receive_reset_password_email(self): + # load forgot password page and submit email + + with capture_notifications() as noifications: + res = self.app.post(self.post_url, data={'forgot_password-email': 'fake' + self.user.username}) + # check mail was not sent + assert not noifications + # check http 200 response + assert res.status_code == 200 + # check request URL is /forgotpassword-institution + assert res.request.path == self.post_url + # check push notification + assert_in_html('If there is an OSF account', res.text) + assert_not_in_html('Please wait', res.text) + + # check verification_key_v2 is not set + self.user.reload() + assert self.user.verification_key_v2 == {} + + # test that non-existing user cannot receive institutional reset password email + def test_not_active_user_no_reset_password_email(self): + self.user.deactivate_account() + self.user.save() + + with capture_notifications() as notification: + res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) + + # check mail was not sent + assert not notification + # check http 200 response + assert res.status_code == 200 + # check request URL is /forgotpassword-institution + assert res.request.path == self.post_url + # check push notification + assert_in_html('If there is an OSF account', res.text) + assert_not_in_html('Please wait', res.text) + + # check verification_key_v2 is not set + self.user.reload() + assert self.user.verification_key_v2 == {} + + # test that user cannot submit forgot password request too quickly + def test_cannot_reset_password_twice_quickly(self): + # submit institutional forgot-password request in rapid succession + res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) + res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) + + # check http 200 response + assert res.status_code == 200 + # check push notification + assert_in_html('Please wait', res.text) + assert_not_in_html('If there is an OSF account', res.text) + diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 728fb1fe1c8..df1be915bab 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -26,7 +26,7 @@ from addons.base import views from admin_tests.utilities import setup_view from api.preprints.views import PreprintContributorDetail -from osf.models import Tag, Preprint, PreprintLog, PreprintContributor +from osf.models import Tag, Preprint, PreprintLog, PreprintContributor, NotificationType from osf.exceptions import PreprintStateError, ValidationError, ValidationValueError from osf_tests.factories import ( ProjectFactory, @@ -43,7 +43,7 @@ from osf.utils.permissions import READ, WRITE, ADMIN from osf.utils.workflows import DefaultStates, RequestTypes, ReviewStates from tests.base import assert_datetime_equal, OsfTestCase -from tests.utils import assert_preprint_logs +from tests.utils import assert_preprint_logs, capture_notifications from website import settings, mails from website.identifiers.clients import CrossRefClient, ECSArXivCrossRefClient, crossref from website.identifiers.utils import request_identifiers @@ -1999,13 +1999,15 @@ def setUp(self): self.mock_send_grid = start_mock_send_grid(self) def test_creator_gets_email(self): - self.preprint.set_published(True, auth=Auth(self.user), save=True) - domain = self.preprint.provider.domain or settings.DOMAIN - self.mock_send_grid.assert_called() - assert self.mock_send_grid.call_count == 1 - - self.preprint_branded.set_published(True, auth=Auth(self.user), save=True) - assert self.mock_send_grid.call_count == 2 + with capture_notifications() as notifications: + self.preprint.set_published(True, auth=Auth(self.user), save=True) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + + with capture_notifications() as notifications: + self.preprint_branded.set_published(True, auth=Auth(self.user), save=True) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL class TestPreprintOsfStorage(OsfTestCase): diff --git a/tests/test_resend_confirmation.py b/tests/test_resend_confirmation.py new file mode 100644 index 00000000000..95609e5ad76 --- /dev/null +++ b/tests/test_resend_confirmation.py @@ -0,0 +1,83 @@ +from osf.models import NotificationType +from tests.base import OsfTestCase +from osf_tests.factories import ( + UserFactory, + UnconfirmedUserFactory, +) +from tests.utils import capture_notifications +from website.util import web_url_for +from tests.test_webtests import assert_in_html + +class TestResendConfirmation(OsfTestCase): + + def setUp(self): + super().setUp() + self.unconfirmed_user = UnconfirmedUserFactory() + self.confirmed_user = UserFactory() + self.get_url = web_url_for('resend_confirmation_get') + self.post_url = web_url_for('resend_confirmation_post') + + # test that resend confirmation page is load correctly + def test_resend_confirmation_get(self): + res = self.app.get(self.get_url) + assert res.status_code == 200 + assert 'Resend Confirmation' in res.text + assert res.get_form('resendForm') + + # test that unconfirmed user can receive resend confirmation email + def test_can_receive_resend_confirmation_email(self): + # load resend confirmation page and submit email + res = self.app.get(self.get_url) + form = res.get_form('resendForm') + form['email'] = self.unconfirmed_user.unconfirmed_emails[0] + with capture_notifications() as notifications: + res = form.submit(self.app) + # check email, request and response + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL + assert res.status_code == 200 + assert res.request.path == self.post_url + assert_in_html('If there is an OSF account', res.text) + + + # test that confirmed user cannot receive resend confirmation email + def test_cannot_receive_resend_confirmation_email_1(self): + # load resend confirmation page and submit email + res = self.app.get(self.get_url) + form = res.get_form('resendForm') + form['email'] = self.confirmed_user.emails.first().address + with capture_notifications() as notifications: + res = form.submit(self.app) + + assert not notifications + assert res.status_code == 200 + assert res.request.path == self.post_url + assert_in_html('has already been confirmed', res.text) + + # test that non-existing user cannot receive resend confirmation email + def test_cannot_receive_resend_confirmation_email_2(self): + # load resend confirmation page and submit email + res = self.app.get(self.get_url) + form = res.get_form('resendForm') + form['email'] = 'random@random.com' + with capture_notifications() as notifications: + res = form.submit(self.app) + # check email, request and response + assert notifications + assert res.status_code == 200 + assert res.request.path == self.post_url + assert_in_html('If there is an OSF account', res.text) + + # test that user cannot submit resend confirmation request too quickly + def test_cannot_resend_confirmation_twice_quickly(self): + # load resend confirmation page and submit email + res = self.app.get(self.get_url) + form = res.get_form('resendForm') + form['email'] = self.unconfirmed_user.email + form.submit(self.app) + res = form.submit(self.app) + + # check request and response + assert res.status_code == 200 + assert_in_html('Please wait', res.text) + diff --git a/tests/test_user_claiming.py b/tests/test_user_claiming.py new file mode 100644 index 00000000000..8174a5600b5 --- /dev/null +++ b/tests/test_user_claiming.py @@ -0,0 +1,267 @@ +from rest_framework import status +import unittest + +import pytest +from framework.auth import exceptions +from framework.auth.core import Auth +from tests.base import OsfTestCase +from tests.base import fake +from osf_tests.factories import ( + fake_email, + AuthUserFactory, + PreprintFactory, + ProjectFactory, + UserFactory, + UnconfirmedUserFactory, + UnregUserFactory, +) +from tests.test_webtests import assert_in_html +from website import language +from website.util import api_url_for + +@pytest.mark.enable_bookmark_creation +@pytest.mark.enable_implicit_clean +class TestClaiming(OsfTestCase): + + def setUp(self): + super().setUp() + self.referrer = AuthUserFactory() + self.project = ProjectFactory(creator=self.referrer, is_public=True) + + def test_correct_name_shows_in_contributor_list(self): + name1, email = fake.name(), fake_email() + UnregUserFactory(fullname=name1, email=email) + name2, email = fake.name(), fake_email() + # Added with different name + self.project.add_unregistered_contributor(fullname=name2, + email=email, auth=Auth(self.referrer)) + self.project.save() + + res = self.app.get(self.project.url, auth=self.referrer.auth) + # Correct name is shown + assert_in_html(name2, res.text) + assert name1 not in res.text + + def test_user_can_set_password_on_claim_page(self): + name, email = fake.name(), fake_email() + new_user = self.project.add_unregistered_contributor( + email=email, + fullname=name, + auth=Auth(self.referrer) + ) + self.project.save() + claim_url = new_user.get_claim_url(self.project._primary_key) + res = self.app.get(claim_url) + self.project.reload() + assert 'Set Password' in res.text + form = res.get_form('setPasswordForm') + #form['username'] = new_user.username #Removed as long as E-mail can't be updated. + form['password'] = 'killerqueen' + form['password2'] = 'killerqueen' + self.app.resolve_redirect(form.submit(self.app)) + new_user.reload() + assert new_user.check_password('killerqueen') + + def test_sees_is_redirected_if_user_already_logged_in(self): + name, email = fake.name(), fake_email() + new_user = self.project.add_unregistered_contributor( + email=email, + fullname=name, + auth=Auth(self.referrer) + ) + self.project.save() + existing = AuthUserFactory() + claim_url = new_user.get_claim_url(self.project._primary_key) + # a user is already logged in + res = self.app.get(claim_url, auth=existing.auth) + assert res.status_code == 302 + + def test_unregistered_users_names_are_project_specific(self): + name1, name2, email = fake.name(), fake.name(), fake_email() + project2 = ProjectFactory(creator=self.referrer) + # different projects use different names for the same unreg contributor + self.project.add_unregistered_contributor( + email=email, + fullname=name1, + auth=Auth(self.referrer) + ) + self.project.save() + project2.add_unregistered_contributor( + email=email, + fullname=name2, + auth=Auth(self.referrer) + ) + project2.save() + # Each project displays a different name in the contributor list + res = self.app.get(self.project.url, auth=self.referrer.auth) + assert_in_html(name1, res.text) + + res2 = self.app.get(project2.url, auth=self.referrer.auth) + assert_in_html(name2, res2.text) + + @unittest.skip('as long as E-mails cannot be changed') + def test_cannot_set_email_to_a_user_that_already_exists(self): + reg_user = UserFactory() + name, email = fake.name(), fake_email() + new_user = self.project.add_unregistered_contributor( + email=email, + fullname=name, + auth=Auth(self.referrer) + ) + self.project.save() + # Goes to claim url and successfully claims account + claim_url = new_user.get_claim_url(self.project._primary_key) + res = self.app.get(claim_url) + self.project.reload() + assert 'Set Password' in res + form = res.get_form('setPasswordForm') + # Fills out an email that is the username of another user + form['username'] = reg_user.username + form['password'] = 'killerqueen' + form['password2'] = 'killerqueen' + res = form.submit(follow_redirects=True) + assert language.ALREADY_REGISTERED.format(email=reg_user.username) in res.text + + def test_correct_display_name_is_shown_at_claim_page(self): + original_name = fake.name() + unreg = UnregUserFactory(fullname=original_name) + + different_name = fake.name() + new_user = self.project.add_unregistered_contributor( + email=unreg.username, + fullname=different_name, + auth=Auth(self.referrer), + ) + self.project.save() + claim_url = new_user.get_claim_url(self.project._primary_key) + res = self.app.get(claim_url) + # Correct name (different_name) should be on page + assert_in_html(different_name, res.text) + + +class TestConfirmingEmail(OsfTestCase): + + def setUp(self): + super().setUp() + self.user = UnconfirmedUserFactory() + self.confirmation_url = self.user.get_confirmation_url( + self.user.username, + external=False, + ) + self.confirmation_token = self.user.get_confirmation_token( + self.user.username + ) + + def test_cannot_remove_another_user_email(self): + user1 = AuthUserFactory() + user2 = AuthUserFactory() + url = api_url_for('update_user') + header = {'id': user1.username, 'emails': [{'address': user1.username}]} + res = self.app.put(url, json=header, auth=user2.auth) + assert res.status_code == 403 + + def test_cannnot_make_primary_email_for_another_user(self): + user1 = AuthUserFactory() + user2 = AuthUserFactory() + email = 'test@cos.io' + user1.emails.create(address=email) + user1.save() + url = api_url_for('update_user') + header = {'id': user1.username, + 'emails': [{'address': user1.username, 'primary': False, 'confirmed': True}, + {'address': email, 'primary': True, 'confirmed': True} + ]} + res = self.app.put(url, json=header, auth=user2.auth) + assert res.status_code == 403 + + def test_cannnot_add_email_for_another_user(self): + user1 = AuthUserFactory() + user2 = AuthUserFactory() + email = 'test@cos.io' + url = api_url_for('update_user') + header = {'id': user1.username, + 'emails': [{'address': user1.username, 'primary': True, 'confirmed': True}, + {'address': email, 'primary': False, 'confirmed': False} + ]} + res = self.app.put(url, json=header, auth=user2.auth) + assert res.status_code == 403 + + def test_error_page_if_confirm_link_is_used(self): + self.user.confirm_email(self.confirmation_token) + self.user.save() + res = self.app.get(self.confirmation_url) + + assert exceptions.InvalidTokenError.message_short in res.text + assert res.status_code == status.HTTP_400_BAD_REQUEST + + +@pytest.mark.enable_implicit_clean +@pytest.mark.enable_bookmark_creation +class TestClaimingAsARegisteredUser(OsfTestCase): + + def setUp(self): + super().setUp() + self.referrer = AuthUserFactory() + self.project = ProjectFactory(creator=self.referrer, is_public=True) + name, email = fake.name(), fake_email() + self.user = self.project.add_unregistered_contributor( + fullname=name, + email=email, + auth=Auth(user=self.referrer) + ) + self.project.save() + + def test_claim_user_registered_with_correct_password(self): + reg_user = AuthUserFactory() # NOTE: AuthUserFactory sets password as 'queenfan86' + url = self.user.get_claim_url(self.project._primary_key) + # Follow to password re-enter page + res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) + + # verify that the "Claim Account" form is returned + assert 'Claim Contributor' in res.text + + form = res.get_form('claimContributorForm') + form['password'] = 'queenfan86' + res = form.submit(self.app, auth=reg_user.auth) + self.app.resolve_redirect(res) + self.project.reload() + self.user.reload() + # user is now a contributor to the project + assert reg_user in self.project.contributors + + # the unregistered user (self.user) is removed as a contributor, and their + assert self.user not in self.project.contributors + + # unclaimed record for the project has been deleted + assert self.project not in self.user.unclaimed_records + + def test_claim_user_registered_preprint_with_correct_password(self): + preprint = PreprintFactory(creator=self.referrer) + name, email = fake.name(), fake_email() + unreg_user = preprint.add_unregistered_contributor( + fullname=name, + email=email, + auth=Auth(user=self.referrer) + ) + reg_user = AuthUserFactory() # NOTE: AuthUserFactory sets password as 'queenfan86' + url = unreg_user.get_claim_url(preprint._id) + # Follow to password re-enter page + res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) + + # verify that the "Claim Account" form is returned + assert 'Claim Contributor' in res.text + + form = res.get_form('claimContributorForm') + form['password'] = 'queenfan86' + res = form.submit(self.app, auth=reg_user.auth) + + preprint.reload() + unreg_user.reload() + # user is now a contributor to the project + assert reg_user in preprint.contributors + + # the unregistered user (unreg_user) is removed as a contributor, and their + assert unreg_user not in preprint.contributors + + # unclaimed record for the project has been deleted + assert preprint not in unreg_user.unclaimed_records diff --git a/tests/test_webtests.py b/tests/test_webtests.py index c55e6b523f4..92cf8b6f2f5 100644 --- a/tests/test_webtests.py +++ b/tests/test_webtests.py @@ -1,8 +1,5 @@ #!/usr/bin/env python3 """Functional tests using WebTest.""" -from urllib.parse import quote_plus - -from rest_framework import status import logging import unittest @@ -13,30 +10,22 @@ from bs4 import BeautifulSoup from django.utils import timezone from addons.wiki.utils import to_mongo_key -from framework.auth import exceptions from framework.auth.core import Auth from tests.base import OsfTestCase -from tests.base import fake from osf_tests.factories import ( - fake_email, AuthUserFactory, NodeFactory, PreprintFactory, PreprintProviderFactory, PrivateLinkFactory, ProjectFactory, - RegistrationFactory, SubjectFactory, UserFactory, - UnconfirmedUserFactory, - UnregUserFactory, ) from osf.utils import permissions from addons.wiki.models import WikiPage, WikiVersion from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory -from website import language -from website.util import web_url_for, api_url_for -from conftest import start_mock_send_grid, start_mock_notification_send +from website.util import web_url_for logging.getLogger('website.project.model').setLevel(logging.ERROR) @@ -205,7 +194,7 @@ def test_wiki_content(self): user=self.user, node=project, ) - wiki = WikiVersionFactory( + WikiVersionFactory( wiki_page=wiki_page, content=wiki_content ) @@ -467,633 +456,6 @@ def test_wiki_url(self): assert self._url_to_body(self.wiki.deep_url) == self._url_to_body(self.wiki.url) -@pytest.mark.enable_bookmark_creation -@pytest.mark.enable_implicit_clean -class TestClaiming(OsfTestCase): - - def setUp(self): - super().setUp() - self.referrer = AuthUserFactory() - self.project = ProjectFactory(creator=self.referrer, is_public=True) - - def test_correct_name_shows_in_contributor_list(self): - name1, email = fake.name(), fake_email() - UnregUserFactory(fullname=name1, email=email) - name2, email = fake.name(), fake_email() - # Added with different name - self.project.add_unregistered_contributor(fullname=name2, - email=email, auth=Auth(self.referrer)) - self.project.save() - - res = self.app.get(self.project.url, auth=self.referrer.auth) - # Correct name is shown - assert_in_html(name2, res.text) - assert name1 not in res.text - - def test_user_can_set_password_on_claim_page(self): - name, email = fake.name(), fake_email() - new_user = self.project.add_unregistered_contributor( - email=email, - fullname=name, - auth=Auth(self.referrer) - ) - self.project.save() - claim_url = new_user.get_claim_url(self.project._primary_key) - res = self.app.get(claim_url) - self.project.reload() - assert 'Set Password' in res.text - form = res.get_form('setPasswordForm') - #form['username'] = new_user.username #Removed as long as E-mail can't be updated. - form['password'] = 'killerqueen' - form['password2'] = 'killerqueen' - self.app.resolve_redirect(form.submit(self.app)) - new_user.reload() - assert new_user.check_password('killerqueen') - - def test_sees_is_redirected_if_user_already_logged_in(self): - name, email = fake.name(), fake_email() - new_user = self.project.add_unregistered_contributor( - email=email, - fullname=name, - auth=Auth(self.referrer) - ) - self.project.save() - existing = AuthUserFactory() - claim_url = new_user.get_claim_url(self.project._primary_key) - # a user is already logged in - res = self.app.get(claim_url, auth=existing.auth) - assert res.status_code == 302 - - def test_unregistered_users_names_are_project_specific(self): - name1, name2, email = fake.name(), fake.name(), fake_email() - project2 = ProjectFactory(creator=self.referrer) - # different projects use different names for the same unreg contributor - self.project.add_unregistered_contributor( - email=email, - fullname=name1, - auth=Auth(self.referrer) - ) - self.project.save() - project2.add_unregistered_contributor( - email=email, - fullname=name2, - auth=Auth(self.referrer) - ) - project2.save() - # Each project displays a different name in the contributor list - res = self.app.get(self.project.url, auth=self.referrer.auth) - assert_in_html(name1, res.text) - - res2 = self.app.get(project2.url, auth=self.referrer.auth) - assert_in_html(name2, res2.text) - - @unittest.skip('as long as E-mails cannot be changed') - def test_cannot_set_email_to_a_user_that_already_exists(self): - reg_user = UserFactory() - name, email = fake.name(), fake_email() - new_user = self.project.add_unregistered_contributor( - email=email, - fullname=name, - auth=Auth(self.referrer) - ) - self.project.save() - # Goes to claim url and successfully claims account - claim_url = new_user.get_claim_url(self.project._primary_key) - res = self.app.get(claim_url) - self.project.reload() - assert 'Set Password' in res - form = res.get_form('setPasswordForm') - # Fills out an email that is the username of another user - form['username'] = reg_user.username - form['password'] = 'killerqueen' - form['password2'] = 'killerqueen' - res = form.submit(follow_redirects=True) - assert language.ALREADY_REGISTERED.format(email=reg_user.username) in res.text - - def test_correct_display_name_is_shown_at_claim_page(self): - original_name = fake.name() - unreg = UnregUserFactory(fullname=original_name) - - different_name = fake.name() - new_user = self.project.add_unregistered_contributor( - email=unreg.username, - fullname=different_name, - auth=Auth(self.referrer), - ) - self.project.save() - claim_url = new_user.get_claim_url(self.project._primary_key) - res = self.app.get(claim_url) - # Correct name (different_name) should be on page - assert_in_html(different_name, res.text) - - -class TestConfirmingEmail(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = UnconfirmedUserFactory() - self.confirmation_url = self.user.get_confirmation_url( - self.user.username, - external=False, - ) - self.confirmation_token = self.user.get_confirmation_token( - self.user.username - ) - - def test_cannot_remove_another_user_email(self): - user1 = AuthUserFactory() - user2 = AuthUserFactory() - url = api_url_for('update_user') - header = {'id': user1.username, 'emails': [{'address': user1.username}]} - res = self.app.put(url, json=header, auth=user2.auth) - assert res.status_code == 403 - - def test_cannnot_make_primary_email_for_another_user(self): - user1 = AuthUserFactory() - user2 = AuthUserFactory() - email = 'test@cos.io' - user1.emails.create(address=email) - user1.save() - url = api_url_for('update_user') - header = {'id': user1.username, - 'emails': [{'address': user1.username, 'primary': False, 'confirmed': True}, - {'address': email, 'primary': True, 'confirmed': True} - ]} - res = self.app.put(url, json=header, auth=user2.auth) - assert res.status_code == 403 - - def test_cannnot_add_email_for_another_user(self): - user1 = AuthUserFactory() - user2 = AuthUserFactory() - email = 'test@cos.io' - url = api_url_for('update_user') - header = {'id': user1.username, - 'emails': [{'address': user1.username, 'primary': True, 'confirmed': True}, - {'address': email, 'primary': False, 'confirmed': False} - ]} - res = self.app.put(url, json=header, auth=user2.auth) - assert res.status_code == 403 - - def test_error_page_if_confirm_link_is_used(self): - self.user.confirm_email(self.confirmation_token) - self.user.save() - res = self.app.get(self.confirmation_url) - - assert exceptions.InvalidTokenError.message_short in res.text - assert res.status_code == status.HTTP_400_BAD_REQUEST - - -@pytest.mark.enable_implicit_clean -@pytest.mark.enable_bookmark_creation -class TestClaimingAsARegisteredUser(OsfTestCase): - - def setUp(self): - super().setUp() - self.referrer = AuthUserFactory() - self.project = ProjectFactory(creator=self.referrer, is_public=True) - name, email = fake.name(), fake_email() - self.user = self.project.add_unregistered_contributor( - fullname=name, - email=email, - auth=Auth(user=self.referrer) - ) - self.project.save() - - def test_claim_user_registered_with_correct_password(self): - reg_user = AuthUserFactory() # NOTE: AuthUserFactory sets password as 'queenfan86' - url = self.user.get_claim_url(self.project._primary_key) - # Follow to password re-enter page - res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) - - # verify that the "Claim Account" form is returned - assert 'Claim Contributor' in res.text - - form = res.get_form('claimContributorForm') - form['password'] = 'queenfan86' - res = form.submit(self.app, auth=reg_user.auth) - self.app.resolve_redirect(res) - self.project.reload() - self.user.reload() - # user is now a contributor to the project - assert reg_user in self.project.contributors - - # the unregistered user (self.user) is removed as a contributor, and their - assert self.user not in self.project.contributors - - # unclaimed record for the project has been deleted - assert self.project not in self.user.unclaimed_records - - def test_claim_user_registered_preprint_with_correct_password(self): - preprint = PreprintFactory(creator=self.referrer) - name, email = fake.name(), fake_email() - unreg_user = preprint.add_unregistered_contributor( - fullname=name, - email=email, - auth=Auth(user=self.referrer) - ) - reg_user = AuthUserFactory() # NOTE: AuthUserFactory sets password as 'queenfan86' - url = unreg_user.get_claim_url(preprint._id) - # Follow to password re-enter page - res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) - - # verify that the "Claim Account" form is returned - assert 'Claim Contributor' in res.text - - form = res.get_form('claimContributorForm') - form['password'] = 'queenfan86' - res = form.submit(self.app, auth=reg_user.auth) - - preprint.reload() - unreg_user.reload() - # user is now a contributor to the project - assert reg_user in preprint.contributors - - # the unregistered user (unreg_user) is removed as a contributor, and their - assert unreg_user not in preprint.contributors - - # unclaimed record for the project has been deleted - assert preprint not in unreg_user.unclaimed_records - - -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) -class TestResendConfirmation(OsfTestCase): - - def setUp(self): - super().setUp() - self.unconfirmed_user = UnconfirmedUserFactory() - self.confirmed_user = UserFactory() - self.get_url = web_url_for('resend_confirmation_get') - self.post_url = web_url_for('resend_confirmation_post') - - self.mock_send_grid = start_mock_send_grid(self) - - # test that resend confirmation page is load correctly - def test_resend_confirmation_get(self): - res = self.app.get(self.get_url) - assert res.status_code == 200 - assert 'Resend Confirmation' in res.text - assert res.get_form('resendForm') - - # test that unconfirmed user can receive resend confirmation email - def test_can_receive_resend_confirmation_email(self): - # load resend confirmation page and submit email - res = self.app.get(self.get_url) - form = res.get_form('resendForm') - form['email'] = self.unconfirmed_user.unconfirmed_emails[0] - res = form.submit(self.app) - - # check email, request and response - assert self.mock_send_grid.called - assert res.status_code == 200 - assert res.request.path == self.post_url - assert_in_html('If there is an OSF account', res.text) - - # test that confirmed user cannot receive resend confirmation email - def test_cannot_receive_resend_confirmation_email_1(self): - # load resend confirmation page and submit email - res = self.app.get(self.get_url) - form = res.get_form('resendForm') - form['email'] = self.confirmed_user.emails.first().address - res = form.submit(self.app) - - # check email, request and response - assert not self.mock_send_grid.called - assert res.status_code == 200 - assert res.request.path == self.post_url - assert_in_html('has already been confirmed', res.text) - - # test that non-existing user cannot receive resend confirmation email - def test_cannot_receive_resend_confirmation_email_2(self): - # load resend confirmation page and submit email - res = self.app.get(self.get_url) - form = res.get_form('resendForm') - form['email'] = 'random@random.com' - res = form.submit(self.app) - - # check email, request and response - assert not self.mock_send_grid.called - assert res.status_code == 200 - assert res.request.path == self.post_url - assert_in_html('If there is an OSF account', res.text) - - # test that user cannot submit resend confirmation request too quickly - def test_cannot_resend_confirmation_twice_quickly(self): - # load resend confirmation page and submit email - res = self.app.get(self.get_url) - form = res.get_form('resendForm') - form['email'] = self.unconfirmed_user.email - res = form.submit(self.app) - res = form.submit(self.app) - - # check request and response - assert res.status_code == 200 - assert_in_html('Please wait', res.text) - - -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) -class TestForgotPassword(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.auth_user = AuthUserFactory() - self.get_url = web_url_for('forgot_password_get') - self.post_url = web_url_for('forgot_password_post') - self.user.verification_key_v2 = {} - self.user.save() - - self.mock_send_grid = start_mock_send_grid(self) - self.start_mock_notification_send = start_mock_notification_send(self) - - # log users out before they land on forgot password page - def test_forgot_password_logs_out_user(self): - # visit forgot password link while another user is logged in - res = self.app.get(self.get_url, auth=self.auth_user.auth) - # check redirection to CAS logout - assert res.status_code == 302 - location = res.headers.get('Location') - assert 'reauth' not in location - assert 'logout?service=' in location - assert 'forgotpassword' in location - - # test that forgot password page is loaded correctly - def test_get_forgot_password(self): - res = self.app.get(self.get_url) - assert res.status_code == 200 - assert 'Forgot Password' in res.text - assert res.get_form('forgotPasswordForm') - - # test that existing user can receive reset password email - def test_can_receive_reset_password_email(self): - # load forgot password page and submit email - res = self.app.get(self.get_url) - form = res.get_form('forgotPasswordForm') - form['forgot_password-email'] = self.user.username - res = form.submit(self.app) - - # check mail was sent - assert self.start_mock_notification_send.called - # check http 200 response - assert res.status_code == 200 - # check request URL is /forgotpassword - assert res.request.path == self.post_url - # check push notification - assert_in_html('If there is an OSF account', res.text) - assert_not_in_html('Please wait', res.text) - - # check verification_key_v2 is set - self.user.reload() - assert self.user.verification_key_v2 != {} - - # test that non-existing user cannot receive reset password email - def test_cannot_receive_reset_password_email(self): - # load forgot password page and submit email - res = self.app.get(self.get_url) - form = res.get_form('forgotPasswordForm') - form['forgot_password-email'] = 'fake' + self.user.username - res = form.submit(self.app) - - # check mail was not sent - assert not self.mock_send_grid.called - # check http 200 response - assert res.status_code == 200 - # check request URL is /forgotpassword - assert res.request.path == self.post_url - # check push notification - assert_in_html('If there is an OSF account', res.text) - assert_not_in_html('Please wait', res.text) - - # check verification_key_v2 is not set - self.user.reload() - assert self.user.verification_key_v2 == {} - - # test that non-existing user cannot receive reset password email - def test_not_active_user_no_reset_password_email(self): - self.user.deactivate_account() - self.user.save() - - # load forgot password page and submit email - res = self.app.get(self.get_url) - form = res.get_form('forgotPasswordForm') - form['forgot_password-email'] = self.user.username - res = form.submit(self.app) - - # check mail was not sent - assert not self.mock_send_grid.called - # check http 200 response - assert res.status_code == 200 - # check request URL is /forgotpassword - assert res.request.path == self.post_url - # check push notification - assert_in_html('If there is an OSF account', res.text) - assert_not_in_html('Please wait', res.text) - - # check verification_key_v2 is not set - self.user.reload() - assert self.user.verification_key_v2 == {} - - # test that user cannot submit forgot password request too quickly - def test_cannot_reset_password_twice_quickly(self): - # load forgot password page and submit email - res = self.app.get(self.get_url) - form = res.get_form('forgotPasswordForm') - form['forgot_password-email'] = self.user.username - res = form.submit(self.app) - res = form.submit(self.app) - - # check http 200 response - assert res.status_code == 200 - # check push notification - assert_in_html('Please wait', res.text) - assert_not_in_html('If there is an OSF account', res.text) - - -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) -class TestForgotPasswordInstitution(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.auth_user = AuthUserFactory() - self.get_url = web_url_for('redirect_unsupported_institution') - self.post_url = web_url_for('forgot_password_institution_post') - self.user.verification_key_v2 = {} - self.user.save() - - self.mock_send_grid = start_mock_send_grid(self) - self.start_mock_notification_send = start_mock_notification_send(self) - - # log users out before they land on institutional forgot password page - def test_forgot_password_logs_out_user(self): - # TODO: check in qa url encoding - # visit forgot password link while another user is logged in - res = self.app.get(self.get_url, auth=self.auth_user.auth) - # check redirection to CAS logout - assert res.status_code == 302 - location = res.headers.get('Location') - assert quote_plus('campaign=unsupportedinstitution') in location - assert 'logout?service=' in location - - # test that institutional forgot password page redirects to CAS unsupported - # institution page - def test_get_forgot_password(self): - res = self.app.get(self.get_url) - assert res.status_code == 302 - location = res.headers.get('Location') - assert 'campaign=unsupportedinstitution' in location - - # test that user from disabled institution can receive reset password email - def test_can_receive_reset_password_email(self): - # submit email to institutional forgot-password page - res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) - - # check mail was sent - assert self.start_mock_notification_send.called - # check http 200 response - assert res.status_code == 200 - # check request URL is /forgotpassword - assert res.request.path == self.post_url - # check push notification - assert_in_html('If there is an OSF account', res.text) - assert_not_in_html('Please wait', res.text) - - # check verification_key_v2 is set - self.user.reload() - assert self.user.verification_key_v2 != {} - - # test that non-existing user cannot receive reset password email - def test_cannot_receive_reset_password_email(self): - # load forgot password page and submit email - res = self.app.post(self.post_url, data={'forgot_password-email': 'fake' + self.user.username}) - - # check mail was not sent - assert not self.mock_send_grid.called - # check http 200 response - assert res.status_code == 200 - # check request URL is /forgotpassword-institution - assert res.request.path == self.post_url - # check push notification - assert_in_html('If there is an OSF account', res.text) - assert_not_in_html('Please wait', res.text) - - # check verification_key_v2 is not set - self.user.reload() - assert self.user.verification_key_v2 == {} - - # test that non-existing user cannot receive institutional reset password email - def test_not_active_user_no_reset_password_email(self): - self.user.deactivate_account() - self.user.save() - - res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) - - # check mail was not sent - assert not self.mock_send_grid.called - # check http 200 response - assert res.status_code == 200 - # check request URL is /forgotpassword-institution - assert res.request.path == self.post_url - # check push notification - assert_in_html('If there is an OSF account', res.text) - assert_not_in_html('Please wait', res.text) - - # check verification_key_v2 is not set - self.user.reload() - assert self.user.verification_key_v2 == {} - - # test that user cannot submit forgot password request too quickly - def test_cannot_reset_password_twice_quickly(self): - # submit institutional forgot-password request in rapid succession - res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) - res = self.app.post(self.post_url, data={'forgot_password-email': self.user.username}) - - # check http 200 response - assert res.status_code == 200 - # check push notification - assert_in_html('Please wait', res.text) - assert_not_in_html('If there is an OSF account', res.text) - - -@unittest.skip('Public projects/components are dynamically loaded now.') -class TestAUserProfile(OsfTestCase): - - def setUp(self): - OsfTestCase.setUp(self) - - self.user = AuthUserFactory() - self.me = AuthUserFactory() - self.project = ProjectFactory(creator=self.me, is_public=True, title=fake.bs()) - self.component = NodeFactory(creator=self.me, parent=self.project, is_public=True, title=fake.bs()) - - # regression test for https://github.com/CenterForOpenScience/osf.io/issues/2623 - def test_has_public_projects_and_components(self): - # I go to my own profile - url = web_url_for('profile_view_id', uid=self.me._primary_key) - # I see the title of both my project and component - res = self.app.get(url, auth=self.me.auth) - assert_in_html(self.component.title, res) - assert_in_html(self.project.title, res) - - # Another user can also see my public project and component - url = web_url_for('profile_view_id', uid=self.me._primary_key) - # I see the title of both my project and component - res = self.app.get(url, auth=self.user.auth) - assert_in_html(self.component.title, res) - assert_in_html(self.project.title, res) - - def test_shows_projects_with_many_contributors(self): - # My project has many contributors - for _ in range(5): - user = UserFactory() - self.project.add_contributor(user, auth=Auth(self.project.creator), save=True) - - # I go to my own profile - url = web_url_for('profile_view_id', uid=self.me._primary_key) - res = self.app.get(url, auth=self.me.auth) - # I see '3 more' as a link - assert '3 more' in res.text - - res = res.click('3 more') - assert res.request.path == self.project.url - - def test_has_no_public_projects_or_components_on_own_profile(self): - # User goes to their profile - url = web_url_for('profile_view_id', uid=self.user._id) - res = self.app.get(url, auth=self.user.auth) - - # user has no public components/projects - assert 'You have no public projects' in res - assert 'You have no public components' in res - - def test_user_no_public_projects_or_components(self): - # I go to other user's profile - url = web_url_for('profile_view_id', uid=self.user._id) - # User has no public components/projects - res = self.app.get(url, auth=self.me.auth) - assert 'This user has no public projects' in res - assert 'This user has no public components'in res - - # regression test - def test_does_not_show_registrations(self): - project = ProjectFactory(creator=self.user) - component = NodeFactory(parent=project, creator=self.user, is_public=False) - # User has a registration with public components - reg = RegistrationFactory(project=component.parent_node, creator=self.user, is_public=True) - for each in reg.nodes: - each.is_public = True - each.save() - # I go to other user's profile - url = web_url_for('profile_view_id', uid=self.user._id) - # Registration does not appear on profile - res = self.app.get(url, auth=self.me.auth) - assert 'This user has no public components' in res - assert reg.title not in res - assert reg.nodes[0].title not in res - - @pytest.mark.enable_bookmark_creation class TestPreprintBannerView(OsfTestCase): def setUp(self): From 68b04946ef3226c341c3de9bfad55e586f6244cb Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 25 Jul 2025 15:44:14 -0400 Subject: [PATCH 121/176] more clean-up from self-CR --- api/nodes/serializers.py | 12 ++++-------- api/nodes/views.py | 4 ++-- api/providers/serializers.py | 11 +++++------ api/providers/tasks.py | 26 ++++++++++++-------------- api/users/views.py | 31 +++++++++++++++++++++---------- tests/test_forgot_password.py | 2 +- website/language.py | 3 +++ 7 files changed, 48 insertions(+), 41 deletions(-) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 4154dfbb5f8..8725479456d 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -1256,15 +1256,11 @@ def create(self, validated_data): try: contributor_dict = { - 'auth': auth, - 'user_id': id, - 'email': email, - 'full_name': full_name, - 'send_email': send_email, - 'bibliographic': bibliographic, - 'index': index, - 'permissions': permissions, + 'auth': auth, 'user_id': id, 'email': email, 'full_name': full_name, 'send_email': send_email, + 'bibliographic': bibliographic, 'index': index, } + + contributor_dict['permissions'] = permissions contributor_obj = node.add_contributor_registered_or_not(**contributor_dict) except ValidationError as e: raise exceptions.ValidationError(detail=e.messages[0]) diff --git a/api/nodes/views.py b/api/nodes/views.py index 14e104b4de0..50ba08cb7fe 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -1051,7 +1051,7 @@ def perform_create(self, serializer): user=user, event_context={ 'guid': node._id, - 'title': node._id, + 'title': node.title, 'can_change_preferences': False, }, ) @@ -1063,7 +1063,7 @@ def perform_create(self, serializer): user=user, event_context={ 'guid': fork._id, - 'title': node._id, + 'title': node.title, 'can_change_preferences': False, }, ) diff --git a/api/providers/serializers.py b/api/providers/serializers.py index b10f8290bd8..673e22a1b0e 100644 --- a/api/providers/serializers.py +++ b/api/providers/serializers.py @@ -347,10 +347,6 @@ def create(self, validated_data): if bool(get_perms(user, provider)): raise ValidationError('Specified user is already a moderator.') - if 'claim_url' in context: - template = NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION - else: - template = NotificationType.Type.PROVIDER_MODERATOR_ADDED perm_group = validated_data.pop('permission_group', '') if perm_group not in REVIEW_GROUPS: @@ -362,9 +358,12 @@ def create(self, validated_data): provider.add_to_group(user, perm_group) setattr(user, 'permission_group', perm_group) # Allows reserialization - print(template, context) + if 'claim_url' in context: + notification_type = NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION + else: + notification_type = NotificationType.Type.PROVIDER_MODERATOR_ADDED NotificationType.objects.get( - name=template, + name=notification_type, ).emit( user=user, event_context=context, diff --git a/api/providers/tasks.py b/api/providers/tasks.py index b0a39c9c337..5891494cfb2 100644 --- a/api/providers/tasks.py +++ b/api/providers/tasks.py @@ -639,18 +639,6 @@ def bulk_upload_finish_job(upload, row_count, success_count, draft_errors, appro approval_errors.sort() if not dry_run: upload.save() - notification_type = None - event_context = { - 'initiator_fullname': initiator.fullname, - 'auto_approval': auto_approval, - 'count': row_count, - 'pending_submissions_url': get_registration_provider_submissions_url(provider), - 'draft_errors': draft_errors, - 'approval_errors': approval_errors, - 'successes': success_count, - 'failures': len(draft_errors), - 'osf_support_email': settings.OSF_SUPPORT_EMAIL, - } if upload.state == JobState.DONE_FULL: notification_type = NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_SUCCESS_ALL @@ -666,12 +654,22 @@ def bulk_upload_finish_job(upload, row_count, success_count, draft_errors, appro name=notification_type, ).emit( user=initiator, - event_context=event_context, + event_context={ + 'initiator_fullname': initiator.fullname, + 'auto_approval': auto_approval, + 'count': row_count, + 'pending_submissions_url': get_registration_provider_submissions_url(provider), + 'draft_errors': draft_errors, + 'approval_errors': approval_errors, + 'successes': success_count, + 'failures': len(draft_errors), + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + }, ) upload.email_sent = timezone.now() upload.save() - logger.info(f'Notification sent to bulk upload initiator [{initiator._id}]') + logger.info(f'Email sent to bulk upload initiator [{initiator._id}]') def handle_internal_error(initiator=None, provider=None, message=None, dry_run=True): diff --git a/api/users/views.py b/api/users/views.py index df2d2a215e6..8fabe5b54ee 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -825,29 +825,33 @@ def get(self, request, *args, **kwargs): if not email: raise ValidationError('Request must include email in query params.') - institutional = bool(request.query_params.get('institutional', None)) - mail_template = 'forgot_password' if not institutional else 'forgot_password_institution' - status_message = language.RESET_PASSWORD_SUCCESS_STATUS_MESSAGE.format(email=email) - kind = 'success' # check if the user exists user_obj = get_user(email=email) + institutional = bool(request.query_params.get('institutional', None)) if user_obj: # rate limit forgot_password_post if not throttle_period_expired(user_obj.email_last_sent, settings.SEND_EMAIL_THROTTLE): - status_message = 'You have recently requested to change your password. Please wait a few minutes ' \ - 'before trying again.' - kind = 'error' - return Response({'message': status_message, 'kind': kind}, status=status.HTTP_429_TOO_MANY_REQUESTS) + return Response( + { + 'message': language.THROTTLE_PASSWORD_CHANGE_ERROR_MESSAGE, + 'kind': 'error', + }, + status=status.HTTP_429_TOO_MANY_REQUESTS, + ) elif user_obj.is_active: # new random verification key (v2) user_obj.verification_key_v2 = generate_verification_key(verification_type='password') user_obj.email_last_sent = timezone.now() user_obj.save() reset_link = f'{settings.RESET_PASSWORD_URL}{user_obj._id}/{user_obj.verification_key_v2['token']}/' + if institutional: + notification_type = NotificationType.Type.USER_FORGOT_PASSWORD_INSTITUTION + else: + notification_type = NotificationType.Type.USER_FORGOT_PASSWORD - NotificationType.objects.get(name=mail_template).emit( + NotificationType.objects.get(name=notification_type).emit( user=user_obj, message_frequency='instantly', event_context={ @@ -856,7 +860,14 @@ def get(self, request, *args, **kwargs): }, ) - return Response(status=status.HTTP_200_OK, data={'message': status_message, 'kind': kind, 'institutional': institutional}) + return Response( + status=status.HTTP_200_OK, + data={ + 'message': status_message, + 'kind': 'success', + 'institutional': institutional, + }, + ) @method_decorator(csrf_protect) def post(self, request, *args, **kwargs): diff --git a/tests/test_forgot_password.py b/tests/test_forgot_password.py index 9ca6df4fdab..0a383d30fd9 100644 --- a/tests/test_forgot_password.py +++ b/tests/test_forgot_password.py @@ -168,7 +168,7 @@ def test_can_receive_reset_password_email(self): # check mail was sent assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD_INSTITUTION # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword diff --git a/website/language.py b/website/language.py index 80936924e6a..605773694d2 100644 --- a/website/language.py +++ b/website/language.py @@ -222,6 +222,9 @@ 'you should have, please contact OSF Support. ' ) +THROTTLE_PASSWORD_CHANGE_ERROR_MESSAGE = \ + 'You have recently requested to change your password. Please wait a few minutes before trying again.' + SANCTION_STATUS_MESSAGES = { 'registration': { 'approve': 'Your registration approval has been accepted.', From d65288ecfa9b025fa62114d1cdf41f6a0d5f0118 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Sun, 27 Jul 2025 12:04:34 -0400 Subject: [PATCH 122/176] fix user claim and i forgot email notifications --- api/users/views.py | 4 +- api_tests/users/views/test_user_claim.py | 106 +++++++++++++---------- notifications.yaml | 4 + tests/test_forgot_password.py | 2 +- 4 files changed, 65 insertions(+), 51 deletions(-) diff --git a/api/users/views.py b/api/users/views.py index 8fabe5b54ee..7cd8947a79a 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -1085,7 +1085,7 @@ def _process_external_identity(self, user, external_identity, service_url): message_frequency='instantly', event_context={ 'can_change_preferences': False, - 'external_id_provider': provider.name, + 'external_id_provider': provider, }, ) enqueue_task(update_affiliation_for_orcid_sso_users.s(user._id, provider_id)) @@ -1408,7 +1408,7 @@ def post(self, request, *args, **kwargs): message_frequency='instantly', event_context={ 'can_change_preferences': False, - 'external_id_provider': provider.name, + 'external_id_provider': provider, }, ) diff --git a/api_tests/users/views/test_user_claim.py b/api_tests/users/views/test_user_claim.py index d5f5967df57..ddd7cfad4e5 100644 --- a/api_tests/users/views/test_user_claim.py +++ b/api_tests/users/views/test_user_claim.py @@ -5,14 +5,16 @@ from api.users.views import ClaimUser from api_tests.utils import only_supports_methods from framework.auth.core import Auth +from osf.models import NotificationType from osf_tests.factories import ( AuthUserFactory, ProjectFactory, PreprintFactory, ) +from tests.utils import capture_notifications + @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestClaimUser: @pytest.fixture() @@ -115,41 +117,47 @@ def test_claim_unauth_failure(self, app, url, unreg_user, project, wrong_preprin ) assert res.status_code == 401 - def test_claim_unauth_success_with_original_email(self, app, url, project, unreg_user, mock_send_grid): - mock_send_grid.reset_mock() - res = app.post_json_api( - url.format(unreg_user._id), - self.payload(email='david@david.son', id=project._id), - ) + def test_claim_unauth_success_with_original_email(self, app, url, project, unreg_user): + with capture_notifications() as notifications: + res = app.post_json_api( + url.format(unreg_user._id), + self.payload(email='david@david.son', id=project._id), + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_INVITE_DEFAULT assert res.status_code == 204 - assert mock_send_grid.call_count == 1 - def test_claim_unauth_success_with_claimer_email(self, app, url, unreg_user, project, claimer, mock_send_grid): - mock_send_grid.reset_mock() - res = app.post_json_api( - url.format(unreg_user._id), - self.payload(email=claimer.username, id=project._id) - ) + def test_claim_unauth_success_with_claimer_email(self, app, url, unreg_user, project, claimer): + with capture_notifications() as notifications: + res = app.post_json_api( + url.format(unreg_user._id), + self.payload(email=claimer.username, id=project._id) + ) assert res.status_code == 204 - assert mock_send_grid.call_count == 2 - - def test_claim_unauth_success_with_unknown_email(self, app, url, project, unreg_user, mock_send_grid): - mock_send_grid.reset_mock() - res = app.post_json_api( - url.format(unreg_user._id), - self.payload(email='asdf@fdsa.com', id=project._id), - ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_FORWARD_INVITE_REGISTERED + assert notifications[1]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION_REGISTERED + + def test_claim_unauth_success_with_unknown_email(self, app, url, project, unreg_user): + with capture_notifications() as notifications: + res = app.post_json_api( + url.format(unreg_user._id), + self.payload(email='asdf@fdsa.com', id=project._id), + ) assert res.status_code == 204 - assert mock_send_grid.call_count == 2 - - def test_claim_unauth_success_with_preprint_id(self, app, url, preprint, unreg_user, mock_send_grid): - mock_send_grid.reset_mock() - res = app.post_json_api( - url.format(unreg_user._id), - self.payload(email='david@david.son', id=preprint._id), - ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION + assert notifications[1]['type'] == NotificationType.Type.USER_FORWARD_INVITE + + def test_claim_unauth_success_with_preprint_id(self, app, url, preprint, unreg_user): + with capture_notifications() as notifications: + res = app.post_json_api( + url.format(unreg_user._id), + self.payload(email='david@david.son', id=preprint._id), + ) assert res.status_code == 204 - assert mock_send_grid.call_count == 1 + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_INVITE_DEFAULT def test_claim_auth_failure(self, app, url, claimer, wrong_preprint, project, unreg_user, referrer): _url = url.format(unreg_user._id) @@ -208,26 +216,28 @@ def test_claim_auth_failure(self, app, url, claimer, wrong_preprint, project, un ) assert res.status_code == 403 - def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project, mock_send_grid): + def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project): unreg_user.unclaimed_records[project._id]['last_sent'] = timezone.now() unreg_user.save() - mock_send_grid.reset_mock() - res = app.post_json_api( - url.format(unreg_user._id), - self.payload(id=project._id), - auth=claimer.auth, - expect_errors=True - ) + with capture_notifications() as notifications: + res = app.post_json_api( + url.format(unreg_user._id), + self.payload(id=project._id), + auth=claimer.auth, + expect_errors=True + ) + assert not notifications assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'User account can only be claimed with an existing user once every 24 hours' - assert mock_send_grid.call_count == 0 - def test_claim_auth_success(self, app, url, claimer, unreg_user, project, mock_send_grid): - mock_send_grid.reset_mock() - res = app.post_json_api( - url.format(unreg_user._id), - self.payload(id=project._id), - auth=claimer.auth - ) + def test_claim_auth_success(self, app, url, claimer, unreg_user, project): + with capture_notifications() as notifications: + res = app.post_json_api( + url.format(unreg_user._id), + self.payload(id=project._id), + auth=claimer.auth + ) assert res.status_code == 204 - assert mock_send_grid.call_count == 2 + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_FORWARD_INVITE_REGISTERED + assert notifications[1]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION_REGISTERED diff --git a/notifications.yaml b/notifications.yaml index 5be39abc492..8b3e1fc7ea3 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -108,6 +108,10 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/duplicate_accounts_sso_osf4i.html.mako' + - name: user_forgot_password + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/forgot_password.html.mako' - name: user_forgot_password_institution __docs__: ... object_content_type_model_name: osfuser diff --git a/tests/test_forgot_password.py b/tests/test_forgot_password.py index 0a383d30fd9..4d00f70d688 100644 --- a/tests/test_forgot_password.py +++ b/tests/test_forgot_password.py @@ -50,7 +50,7 @@ def test_can_receive_reset_password_email(self): res = form.submit(self.app) # check mail was sent assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD # check http 200 response assert res.status_code == 200 # check request URL is /forgotpassword From 9501714f2230fe23ddb19a89fea0aa9db7d33ee2 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Sun, 27 Jul 2025 16:22:21 -0400 Subject: [PATCH 123/176] remove old notification mocking and simplify celery config --- addons/boa/tasks.py | 239 ++++++++++++----- addons/boa/tests/test_tasks.py | 75 +++--- .../views/test_crossref_email_response.py | 46 ++-- ...est_draft_registration_contributor_list.py | 11 +- .../views/test_draft_registration_list.py | 13 +- .../test_institution_relationship_nodes.py | 1 - api_tests/mailhog/test_mailhog.py | 8 +- .../views/test_node_contributors_list.py | 28 +- api_tests/nodes/views/test_node_forks_list.py | 23 +- .../test_node_relationship_institutions.py | 17 +- .../views/test_preprint_contributors_list.py | 55 ++-- ...est_collections_provider_moderator_list.py | 41 +-- .../test_preprint_provider_moderator_list.py | 21 +- api_tests/providers/tasks/test_bulk_upload.py | 1 - .../views/test_registration_detail.py | 9 +- .../test_node_request_institutional_access.py | 61 ++--- .../requests/views/test_node_request_list.py | 1 - .../views/test_preprint_request_list.py | 7 - .../views/test_request_actions_create.py | 36 +-- api_tests/users/views/test_user_confirm.py | 99 ++++--- api_tests/users/views/test_user_list.py | 129 ++++----- .../test_user_message_institutional_access.py | 81 +++--- api_tests/users/views/test_user_settings.py | 22 +- .../users/views/test_user_settings_detail.py | 9 +- .../test_user_settings_reset_password.py | 22 +- conftest.py | 33 --- docker-compose.yml | 4 +- framework/email/__init__.py | 0 framework/email/tasks.py | 227 ---------------- notifications.yaml | 10 +- osf/email/__init__.py | 40 ++- osf/models/notification.py | 19 +- osf/models/notification_type.py | 12 +- osf/models/user_message.py | 18 +- .../test_check_crossref_dois.py | 13 +- .../test_email_all_users.py | 34 ++- osf_tests/test_archiver.py | 107 ++++---- osf_tests/test_collection.py | 21 +- osf_tests/test_collection_submission.py | 2 - osf_tests/test_institution.py | 21 +- osf_tests/test_merging_users.py | 12 +- osf_tests/test_queued_mail.py | 155 ----------- osf_tests/test_sanctions.py | 2 - osf_tests/test_schema_responses.py | 123 +++++---- osf_tests/test_user.py | 32 ++- .../test_deactivate_requested_accounts.py | 19 +- scripts/tests/test_send_queued_mails.py | 84 ------ tests/base.py | 3 - tests/framework_tests/test_email.py | 108 -------- tests/test_auth.py | 44 ++-- tests/test_auth_views.py | 4 - tests/test_misc_views.py | 23 +- tests/test_preprints.py | 8 +- tests/test_registrations/test_embargoes.py | 11 +- tests/test_registrations/test_retractions.py | 36 +-- tests/test_spam_mixin.py | 20 +- tests/test_user_profile_view.py | 3 - website/mails/mails.py | 21 +- website/notifications/tasks.py | 227 ---------------- website/settings/defaults.py | 245 +++++++----------- website/settings/local-ci.py | 1 - website/settings/local-dist.py | 1 - 62 files changed, 1027 insertions(+), 1771 deletions(-) delete mode 100644 framework/email/__init__.py delete mode 100644 framework/email/tasks.py delete mode 100644 osf_tests/test_queued_mail.py delete mode 100644 scripts/tests/test_send_queued_mails.py delete mode 100644 tests/framework_tests/test_email.py delete mode 100644 website/notifications/tasks.py diff --git a/addons/boa/tasks.py b/addons/boa/tasks.py index a64110e69b5..4b8753e5b39 100644 --- a/addons/boa/tasks.py +++ b/addons/boa/tasks.py @@ -1,9 +1,7 @@ -import asyncio from http.client import HTTPException import logging import time -from asgiref.sync import async_to_sync, sync_to_async from boaapi.boa_client import BoaClient, BoaException from boaapi.status import CompilerStatus, ExecutionStatus from urllib import request @@ -14,10 +12,9 @@ from addons.boa.boa_error_code import BoaErrorCode from framework import sentry from framework.celery_tasks import app as celery_app -from osf.models import OSFUser +from osf.models import OSFUser, NotificationType from osf.utils.fields import ensure_str, ensure_bytes from website import settings as osf_settings -from website.mails import send_mail, ADDONS_BOA_JOB_COMPLETE, ADDONS_BOA_JOB_FAILURE logger = logging.getLogger(__name__) @@ -38,14 +35,34 @@ def submit_to_boa(host, username, password, user_guid, project_guid, * Running asyncio in celery is tricky. Refer to the discussion below for details: * https://stackoverflow.com/questions/39815771/how-to-combine-celery-with-asyncio """ - return async_to_sync(submit_to_boa_async)(host, username, password, user_guid, project_guid, - query_dataset, query_file_name, file_size, file_full_path, - query_download_url, output_upload_url) + return _submit_to_boa( + host, + username, + password, + user_guid, + project_guid, + query_dataset, + query_file_name, + file_size, + file_full_path, + query_download_url, + output_upload_url + ) -async def submit_to_boa_async(host, username, password, user_guid, project_guid, - query_dataset, query_file_name, file_size, file_full_path, - query_download_url, output_upload_url): +def _submit_to_boa( + host, + username, + password, + user_guid, + project_guid, + query_dataset, + query_file_name, + file_size, + file_full_path, + query_download_url, + output_upload_url +): """ Download Boa query file, submit it to Boa API, wait for Boa to finish the job and upload result output to OSF. Send success / failure emails notifications. @@ -55,19 +72,27 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, * See notes in ``submit_to_boa()`` for details. """ - logger.debug('>>>>>>>> Task begins') - user = await sync_to_async(OSFUser.objects.get)(guids___id=user_guid) - cookie_value = (await sync_to_async(user.get_or_create_cookie)()).decode() + user = OSFUser.objects.get(guids___id=user_guid) + cookie_value = user.get_or_create_cookie().decode() project_url = f'{osf_settings.DOMAIN}{project_guid}/' - output_file_name = query_file_name.replace('.boa', boa_settings.OUTPUT_FILE_SUFFIX) + output_file_name = query_file_name.replace( + '.boa', + boa_settings.OUTPUT_FILE_SUFFIX + ) if file_size > boa_settings.MAX_SUBMISSION_SIZE: message = f'Boa query file too large to submit: user=[{user_guid}], project=[{project_guid}], ' \ f'file_name=[{query_file_name}], file_size=[{file_size}], ' \ f'full_path=[{file_full_path}], url=[{query_download_url}] ...' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.FILE_TOO_LARGE_ERROR, - user.username, user.fullname, project_url, file_full_path, - query_file_name=query_file_name, file_size=file_size) + handle_boa_error( + message, + BoaErrorCode.FILE_TOO_LARGE_ERROR, + user, + project_url, + file_full_path, + query_file_name=query_file_name, + file_size=file_size + ) return BoaErrorCode.FILE_TOO_LARGE_ERROR logger.debug(f'Downloading Boa query file: user=[{user_guid}], project=[{project_guid}], ' @@ -79,8 +104,14 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, except (ValueError, HTTPError, URLError, HTTPException): message = f'Failed to download Boa query file: user=[{user_guid}], project=[{project_guid}], ' \ f'file_name=[{query_file_name}], full_path=[{file_full_path}], url=[{query_download_url}] ...' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.UNKNOWN, user.username, user.fullname, - project_url, file_full_path, query_file_name=query_file_name) + handle_boa_error( + message, + BoaErrorCode.UNKNOWN, + user, + project_url, + file_full_path, + query_file_name=query_file_name + ) return BoaErrorCode.UNKNOWN logger.info('Boa query successfully downloaded.') logger.debug(f'Boa query:\n########\n{boa_query}\n########') @@ -93,8 +124,14 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, except BoaException: # Don't call `client.close()`, since it will fail with `BoaException` if `client.login()` fails message = f'Boa login failed: boa_username=[{username}], boa_host=[{host}]!' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.AUTHN_ERROR, user.username, user.fullname, - project_url, file_full_path, query_file_name=query_file_name) + handle_boa_error( + message, + BoaErrorCode.AUTHN_ERROR, + user, + project_url, + file_full_path, + query_file_name=query_file_name + ) return BoaErrorCode.AUTHN_ERROR logger.info('Boa login completed.') @@ -104,8 +141,14 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, except BoaException: client.close() message = f'Failed to retrieve or verify the target Boa dataset: dataset=[{query_dataset}]!' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.UNKNOWN, user.username, user.fullname, - project_url, file_full_path, query_file_name=query_file_name) + handle_boa_error( + message, + BoaErrorCode.UNKNOWN, + user, + project_url, + file_full_path, + query_file_name=query_file_name + ) return BoaErrorCode.UNKNOWN logger.info('Boa dataset retrieved.') @@ -116,8 +159,14 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, except BoaException: client.close() message = f'Failed to submit the query to Boa API: : boa_host=[{host}], dataset=[{query_dataset}]!' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.UNKNOWN, user.username, user.fullname, - project_url, file_full_path, query_file_name=query_file_name) + handle_boa_error( + message, + BoaErrorCode.UNKNOWN, + user, + project_url, + file_full_path, + query_file_name=query_file_name + ) return BoaErrorCode.UNKNOWN logger.info('Query successfully submitted.') logger.debug(f'Waiting for job to finish: job_id=[{str(boa_job.id)}] ...') @@ -125,26 +174,44 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, if time.time() - start_time > boa_settings.MAX_JOB_WAITING_TIME: client.close() message = f'Boa job did not complete in time: job_id=[{str(boa_job.id)}]!' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.JOB_TIME_OUT_ERROR, - user.username, user.fullname, project_url, file_full_path, - query_file_name=query_file_name, job_id=boa_job.id) + handle_boa_error( + message, + BoaErrorCode.JOB_TIME_OUT_ERROR, + user, + project_url, + file_full_path, + query_file_name=query_file_name, + job_id=boa_job.id + ) return BoaErrorCode.JOB_TIME_OUT_ERROR logger.debug(f'Boa job still running, waiting 10s: job_id=[{str(boa_job.id)}] ...') boa_job.refresh() - await asyncio.sleep(boa_settings.REFRESH_JOB_INTERVAL) + time.sleep(boa_settings.REFRESH_JOB_INTERVAL) if boa_job.compiler_status is CompilerStatus.ERROR: client.close() message = f'Boa job failed with compile error: job_id=[{str(boa_job.id)}]!' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.QUERY_ERROR, user.username, - user.fullname, project_url, file_full_path, - query_file_name=query_file_name, job_id=boa_job.id) + handle_boa_error( + message, + BoaErrorCode.QUERY_ERROR, + user, + project_url, + file_full_path, + query_file_name=query_file_name, + job_id=boa_job.id + ) return BoaErrorCode.QUERY_ERROR elif boa_job.exec_status is ExecutionStatus.ERROR: client.close() message = f'Boa job failed with execution error: job_id=[{str(boa_job.id)}]!' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.QUERY_ERROR, user.username, - user.fullname, project_url, file_full_path, - query_file_name=query_file_name, job_id=boa_job.id) + handle_boa_error( + message, + BoaErrorCode.QUERY_ERROR, + user, + project_url, + file_full_path, + query_file_name=query_file_name, + job_id=boa_job.id + ) return BoaErrorCode.QUERY_ERROR else: try: @@ -152,9 +219,15 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, except BoaException: client.close() message = f'Boa job output is not available: job_id=[{str(boa_job.id)}]!' - await sync_to_async(handle_boa_error)(message, BoaErrorCode.OUTPUT_ERROR, user.username, - user.fullname, project_url, file_full_path, - query_file_name=query_file_name, job_id=boa_job.id) + handle_boa_error( + message, + BoaErrorCode.OUTPUT_ERROR, + user, + project_url, + file_full_path, + query_file_name=query_file_name, + job_id=boa_job.id + ) return BoaErrorCode.OUTPUT_ERROR logger.info('Boa job finished.') logger.debug(f'Boa job output: job_id=[{str(boa_job.id)}]\n########\n{boa_job_output}\n########') @@ -177,31 +250,50 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, message += f', http_error=[{e.code}: {e.reason}]' if e.code == 409: error_code = BoaErrorCode.UPLOAD_ERROR_CONFLICT - await sync_to_async(handle_boa_error)(message, error_code, user.username, user.fullname, project_url, - file_full_path, query_file_name=query_file_name, - output_file_name=output_file_name, job_id=boa_job.id) + handle_boa_error( + message, + error_code, + user, + project_url, + file_full_path, + query_file_name=query_file_name, + output_file_name=output_file_name, + job_id=boa_job.id + ) return error_code logger.info('Successfully uploaded query output to OSF.') logger.debug('Task ends <<<<<<<<') - await sync_to_async(send_mail)( - to_addr=user.username, - mail=ADDONS_BOA_JOB_COMPLETE, - fullname=user.fullname, - query_file_name=query_file_name, - query_file_full_path=file_full_path, - output_file_name=output_file_name, - job_id=boa_job.id, - project_url=project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.ADDONS_BOA_JOB_COMPLETE + ).emit( + user=user, + event_context={ + 'fullname': user.fullname, + 'query_file_name': query_file_name, + 'query_file_full_path': file_full_path, + 'output_file_name': output_file_name, + 'job_id': boa_job.id, + 'project_url': project_url, + 'boa_job_list_url': boa_settings.BOA_JOB_LIST_URL, + 'boa_support_email': boa_settings.BOA_SUPPORT_EMAIL, + 'osf_support_email': osf_settings.OSF_SUPPORT_EMAIL, + } ) return BoaErrorCode.NO_ERROR -def handle_boa_error(message, code, username, fullname, project_url, query_file_full_path, - query_file_name=None, file_size=None, output_file_name=None, job_id=None): +def handle_boa_error( + message, + code, + user, + project_url, + query_file_full_path, + query_file_name=None, + file_size=None, + output_file_name=None, + job_id=None +): """Handle Boa and WB API errors and send emails. """ logger.error(message) @@ -209,22 +301,25 @@ def handle_boa_error(message, code, username, fullname, project_url, query_file_ sentry.log_message(message, skip_session=True) except Exception: pass - send_mail( - to_addr=username, - mail=ADDONS_BOA_JOB_FAILURE, - fullname=fullname, - code=code, - message=message, - query_file_name=query_file_name, - file_size=file_size, - max_file_size=boa_settings.MAX_SUBMISSION_SIZE, - query_file_full_path=query_file_full_path, - output_file_name=output_file_name, - job_id=job_id, - max_job_wait_hours=boa_settings.MAX_JOB_WAITING_TIME / 3600, - project_url=project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.ADDONS_BOA_JOB_FAILURE + ).emit( + user=user, + event_context={ + 'fullname': user.fullname, + 'code': code, + 'query_file_name': query_file_name, + 'file_size': file_size, + 'max_file_size': boa_settings.MAX_SUBMISSION_SIZE, + 'query_file_full_path': query_file_full_path, + 'output_file_name': output_file_name, + 'job_id': job_id, + 'max_job_wait_hours': boa_settings.MAX_JOB_WAITING_TIME / 3600, + 'project_url': project_url, + 'boa_job_list_url': boa_settings.BOA_JOB_LIST_URL, + 'boa_support_email': boa_settings.BOA_SUPPORT_EMAIL, + 'osf_support_email': osf_settings.OSF_SUPPORT_EMAIL, + + } ) return code diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py index b2dcd6d86bc..f31185fa789 100644 --- a/addons/boa/tests/test_tasks.py +++ b/addons/boa/tests/test_tasks.py @@ -9,10 +9,11 @@ from addons.boa import settings as boa_settings from addons.boa.boa_error_code import BoaErrorCode from addons.boa.tasks import submit_to_boa, submit_to_boa_async, handle_boa_error +from osf.models import NotificationType from osf_tests.factories import AuthUserFactory, ProjectFactory from tests.base import OsfTestCase +from tests.utils import capture_notifications from website import settings as osf_settings -from website.mails import ADDONS_BOA_JOB_COMPLETE, ADDONS_BOA_JOB_FAILURE DEFAULT_REFRESH_JOB_INTERVAL = boa_settings.REFRESH_JOB_INTERVAL DEFAULT_MAX_JOB_WAITING_TIME = boa_settings.MAX_JOB_WAITING_TIME @@ -38,12 +39,6 @@ def setUp(self): self.output_file_name = 'fake_boa_script_results.txt' self.job_id = '1a2b3c4d5e6f7g8' - from conftest import start_mock_send_grid - self.mock_send_grid = start_mock_send_grid(self) - - def tearDown(self): - super().tearDown() - def test_boa_error_code(self): assert BoaErrorCode.NO_ERROR == -1 assert BoaErrorCode.UNKNOWN == 0 @@ -55,24 +50,25 @@ def test_boa_error_code(self): assert BoaErrorCode.FILE_TOO_LARGE_ERROR == 6 assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7 - @mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) def test_handle_boa_error(self): with mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error: - return_value = handle_boa_error( - self.error_message, - BoaErrorCode.UNKNOWN, - self.user_username, - self.user_fullname, - self.project_url, - self.file_full_path, - query_file_name=self.query_file_name, - file_size=self.file_size, - output_file_name=self.output_file_name, - job_id=self.job_id - ) - self.mock_send_grid.assert_called() + with capture_notifications() as notifications: + return_value = handle_boa_error( + self.error_message, + BoaErrorCode.UNKNOWN, + self.user_username, + self.user_fullname, + self.project_url, + self.file_full_path, + query_file_name=self.query_file_name, + file_size=self.file_size, + output_file_name=self.output_file_name, + job_id=self.job_id + ) + assert len(notifications) == 1 + assert notifications[0]['typr'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True) mock_logger_error.assert_called_with(self.error_message) assert return_value == BoaErrorCode.UNKNOWN @@ -154,13 +150,6 @@ def setUp(self): boa_settings.REFRESH_JOB_INTERVAL = DEFAULT_REFRESH_JOB_INTERVAL boa_settings.MAX_JOB_WAITING_TIME = DEFAULT_MAX_JOB_WAITING_TIME - from conftest import start_mock_send_grid - self.mock_send_grid = start_mock_send_grid(self) - - def tearDown(self): - super().tearDown() - - @mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) async def test_submit_success(self): with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \ @@ -172,25 +161,27 @@ async def test_submit_success(self): mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None) as mock_async_sleep, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( - self.host, - self.username, - self.password, - self.user_guid, - self.project_guid, - self.query_dataset, - self.query_file_name, - self.file_size, - self.file_full_path, - self.query_download_url, - self.output_upload_url, - ) + with capture_notifications() as notifications: + return_value = await submit_to_boa_async( + self.host, + self.username, + self.password, + self.user_guid, + self.project_guid, + self.query_dataset, + self.query_file_name, + self.file_size, + self.file_full_path, + self.query_download_url, + self.output_upload_url, + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert return_value == BoaErrorCode.NO_ERROR assert self.mock_job.is_running.call_count == 5 assert self.mock_job.refresh.call_count == 4 assert mock_async_sleep.call_count == 4 mock_close.assert_called() - self.mock_send_grid.assert_called() mock_handle_boa_error.assert_not_called() async def test_download_error(self): diff --git a/api_tests/crossref/views/test_crossref_email_response.py b/api_tests/crossref/views/test_crossref_email_response.py index 775a0045c06..e2a2b705362 100644 --- a/api_tests/crossref/views/test_crossref_email_response.py +++ b/api_tests/crossref/views/test_crossref_email_response.py @@ -5,12 +5,13 @@ from django.utils import timezone +from osf.models import NotificationType from osf_tests import factories +from tests.utils import capture_notifications from website import settings @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestCrossRefEmailResponse: def make_mailgun_payload(self, crossref_response): @@ -155,39 +156,40 @@ def test_wrong_request_context_raises_permission_error(self, app, url, error_xml assert response.status_code == 400 - def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml, mock_send_grid): + def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, error_xml): assert not preprint.get_identifier_value('doi') context_data = self.make_mailgun_payload(crossref_response=error_xml) - app.post(url, context_data) - assert mock_send_grid.called + with capture_notifications() as notifications: + app.post(url, context_data) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert not preprint.get_identifier_value('doi') - def test_success_response_sets_doi(self, app, url, preprint, success_xml, mock_send_grid): + def test_success_response_sets_doi(self, app, url, preprint, success_xml): assert not preprint.get_identifier_value('doi') context_data = self.make_mailgun_payload(crossref_response=success_xml) - mock_send_grid.reset_mock() - app.post(url, context_data) + with capture_notifications() as notifications: + app.post(url, context_data) + assert not notifications preprint.reload() - assert not mock_send_grid.called assert preprint.get_identifier_value('doi') assert preprint.preprint_doi_created - def test_update_success_response(self, app, preprint, url, mock_send_grid): + def test_update_success_response(self, app, preprint, url): initial_value = 'TempDOIValue' preprint.set_identifier_value(category='doi', value=initial_value) update_xml = self.update_success_xml(preprint) context_data = self.make_mailgun_payload(crossref_response=update_xml) - mock_send_grid.reset_mock() - app.post(url, context_data) - - assert not mock_send_grid.called + with capture_notifications() as notifications: + app.post(url, context_data) + assert not notifications assert preprint.get_identifier_value(category='doi') != initial_value - def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, url, mock_send_grid): + def test_update_success_does_not_set_preprint_doi_created(self, app, preprint, url): preprint.set_identifier_value(category='doi', value='test') preprint.preprint_doi_created = timezone.now() preprint.save() @@ -212,14 +214,14 @@ def test_success_batch_response(self, app, url): for preprint in preprint_list: assert preprint.get_identifier_value('doi') == settings.DOI_FORMAT.format(prefix=provider.doi_prefix, guid=preprint._id) - def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint, mock_send_grid): - legacy_value = 'IAmALegacyDOI' - preprint.set_identifier_value(category='legacy_doi', value=legacy_value) - update_xml = self.update_success_xml(preprint) + def test_confirmation_marks_legacy_doi_as_deleted(self, app, url, preprint): + with capture_notifications() as notifications: + legacy_value = 'IAmALegacyDOI' + preprint.set_identifier_value(category='legacy_doi', value=legacy_value) + update_xml = self.update_success_xml(preprint) - context_data = self.make_mailgun_payload(crossref_response=update_xml) - mock_send_grid.reset_mock() - app.post(url, context_data) + context_data = self.make_mailgun_payload(crossref_response=update_xml) + app.post(url, context_data) - assert not mock_send_grid.called + assert not notifications assert preprint.identifiers.get(category='legacy_doi').deleted diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index bf4d211a8d7..090993add28 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -265,8 +265,7 @@ def test_add_contributor_signal_if_default( assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' # Overrides TestNodeContributorCreateEmail - def test_add_unregistered_contributor_sends_email( - self, mock_send_grid, app, user, url_project_contribs): + def test_add_unregistered_contributor_sends_email(self, app, user, url_project_contribs): with capture_notifications() as notifications: res = app.post_json_api( f'{url_project_contribs}?send_email=draft_registration', @@ -305,8 +304,7 @@ def test_add_unregistered_contributor_signal_if_default(self, app, user, url_pro assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION # Overrides TestNodeContributorCreateEmail - def test_add_unregistered_contributor_without_email_no_email( - self, mock_send_grid, app, user, url_project_contribs): + def test_add_unregistered_contributor_without_email_no_email(self, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=draft_registration' payload = { 'data': { @@ -318,10 +316,11 @@ def test_add_unregistered_contributor_without_email_no_email( } with capture_signals() as mock_signal: - res = app.post_json_api(url, payload, auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth) + assert not notifications assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_send_grid.call_count == 0 class TestDraftContributorBulkCreate(DraftRegistrationCRUDTestCase, TestNodeContributorBulkCreate): diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index 85842f1e0a6..b90493825ee 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -158,7 +158,6 @@ def test_draft_with_deleted_registered_node_shows_up_in_draft_list( assert data[0]['attributes']['registration_metadata'] == {} -@pytest.mark.usefixtures('mock_send_grid') class TestDraftRegistrationCreateWithNode(AbstractDraftRegistrationTestCase): @pytest.fixture() @@ -337,11 +336,10 @@ def test_logged_in_non_contributor_cannot_create_draft( ) assert res.status_code == 403 - def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload, mock_send_grid): - mock_send_grid.reset_mock() - app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) - - assert not mock_send_grid.called + def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload): + with capture_notifications() as notifications: + app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) + assert not notifications def test_affiliated_institutions_are_copied_from_node_no_institutions(self, app, user, url_draft_registrations, payload): """ @@ -403,7 +401,6 @@ def test_affiliated_institutions_are_copied_from_user(self, app, user, url_draft assert list(draft_registration.affiliated_institutions.all()) == list(user.get_affiliated_institutions()) -@pytest.mark.usefixtures('mock_send_grid') class TestDraftRegistrationCreateWithoutNode(AbstractDraftRegistrationTestCase): @pytest.fixture() def url_draft_registrations(self): @@ -430,7 +427,7 @@ def test_admin_can_create_draft( assert draft.creator == user assert draft.has_permission(user, ADMIN) is True - def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload, mock_send_grid): + def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload): # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor with capture_notifications() as notifications: app.post_json_api( diff --git a/api_tests/institutions/views/test_institution_relationship_nodes.py b/api_tests/institutions/views/test_institution_relationship_nodes.py index c025407ab78..5acf8a39fd5 100644 --- a/api_tests/institutions/views/test_institution_relationship_nodes.py +++ b/api_tests/institutions/views/test_institution_relationship_nodes.py @@ -27,7 +27,6 @@ def make_registration_payload(*node_ids): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestInstitutionRelationshipNodes: @pytest.fixture() diff --git a/api_tests/mailhog/test_mailhog.py b/api_tests/mailhog/test_mailhog.py index b911eea9b5c..997947f9588 100644 --- a/api_tests/mailhog/test_mailhog.py +++ b/api_tests/mailhog/test_mailhog.py @@ -1,6 +1,7 @@ import requests import pytest -from website.mails import send_mail, TEST +from django.core.mail import send_mail +from website.mails import TEST from waffle.testutils import override_switch from osf import features from website import settings @@ -22,10 +23,9 @@ @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestMailHog: - def test_mailhog_received_mail(self, mock_send_grid): + def test_mailhog_received_mail(self): with override_switch(features.ENABLE_MAILHOG, active=True): mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' @@ -36,12 +36,10 @@ def test_mailhog_received_mail(self, mock_send_grid): assert res['count'] == 1 assert res['items'][0]['Content']['Headers']['To'][0] == 'to_addr@mail.com' assert res['items'][0]['Content']['Headers']['Subject'][0] == 'A test email to Mailhog' - mock_send_grid.assert_called() requests.delete(mailhog_v1) @pytest.mark.django_db -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.ENABLE_TEST_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthMailhog(OsfTestCase): diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index c4c7d63c7f5..4d29857676d 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -1209,7 +1209,6 @@ def test_add_contributor_validation( @pytest.mark.django_db @pytest.mark.enable_bookmark_creation @pytest.mark.enable_enqueue_task -@pytest.mark.usefixtures('mock_send_grid') class TestNodeContributorCreateEmail(NodeCRUDTestCase): @pytest.fixture() @@ -1217,7 +1216,7 @@ def url_project_contribs(self, project_public): return f'/{API_BASE}nodes/{project_public._id}/contributors/' def test_add_contributor_no_email_if_false( - self, mock_send_grid, app, user, url_project_contribs + self, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=false' payload = { @@ -1226,12 +1225,13 @@ def test_add_contributor_no_email_if_false( 'attributes': {'full_name': 'Kanye West', 'email': 'kanye@west.com'}, } } - res = app.post_json_api(url, payload, auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth) + assert not notifications assert res.status_code == 201 - assert mock_send_grid.call_count == 0 def test_add_contributor_sends_email( - self, mock_send_grid, app, user, user_two, url_project_contribs + self, app, user, user_two, url_project_contribs ): with capture_notifications() as notifications: res = app.post_json_api( @@ -1290,7 +1290,7 @@ def test_add_contributor_signal_preprint_email_disallowed( ) def test_add_unregistered_contributor_sends_email( - self, mock_send_grid, app, user, url_project_contribs + self, app, user, url_project_contribs ): with capture_notifications() as notifications: res = app.post_json_api( @@ -1347,7 +1347,7 @@ def test_add_unregistered_contributor_signal_preprint_email_disallowed( ) def test_add_contributor_invalid_send_email_param( - self, mock_send_grid, app, user, url_project_contribs + self, app, user, url_project_contribs ): url = f'{url_project_contribs}?send_email=true' payload = { @@ -1356,16 +1356,15 @@ def test_add_contributor_invalid_send_email_param( 'attributes': {'full_name': 'Kanye West', 'email': 'kanye@west.com'}, } } - res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True) + assert not notifications assert res.status_code == 400 assert ( res.json['errors'][0]['detail'] == 'true is not a valid email preference.' ) - assert mock_send_grid.call_count == 0 - def test_add_unregistered_contributor_without_email_no_email( - self, mock_send_grid, app, user, url_project_contribs - ): + def test_add_unregistered_contributor_without_email_no_email(self, app, user, url_project_contribs): url = f'{url_project_contribs}?send_email=default' payload = { 'data': { @@ -1377,10 +1376,11 @@ def test_add_unregistered_contributor_without_email_no_email( } with capture_signals() as mock_signal: - res = app.post_json_api(url, payload, auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth) + assert not notifications assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_send_grid.call_count == 0 @pytest.mark.django_db diff --git a/api_tests/nodes/views/test_node_forks_list.py b/api_tests/nodes/views/test_node_forks_list.py index 632c178bb2e..a9031b105e8 100644 --- a/api_tests/nodes/views/test_node_forks_list.py +++ b/api_tests/nodes/views/test_node_forks_list.py @@ -205,7 +205,6 @@ def test_forks_list_does_not_show_registrations_of_forks( @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestNodeForkCreate: @pytest.fixture() @@ -419,9 +418,7 @@ def test_read_only_contributor_can_fork_private_registration( assert res.json['data']['attributes']['title'] == 'Fork of ' + \ private_project.title - def test_send_email_success( - self, app, user, public_project_url, - fork_data_with_title, public_project, mock_send_grid): + def test_send_email_success(self, app, user, public_project_url, fork_data_with_title, public_project): with capture_notifications() as notifications: res = app.post_json_api( @@ -437,13 +434,15 @@ def test_send_email_success( assert notifications[0]['type'] == NotificationType.Type.NODE_FORK_COMPLETED def test_send_email_failed( - self, app, user, public_project_url, - fork_data_with_title, public_project, mock_send_grid): + self, app, user, public_project_url, fork_data_with_title, public_project): with mock.patch.object(NodeForksSerializer, 'save', side_effect=Exception()): - with pytest.raises(Exception): - app.post_json_api( - public_project_url, - fork_data_with_title, - auth=user.auth) - assert mock_send_grid.called + with capture_notifications() as notifications: + with pytest.raises(Exception): + app.post_json_api( + public_project_url, + fork_data_with_title, + auth=user.auth + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_FORK_FAILED diff --git a/api_tests/nodes/views/test_node_relationship_institutions.py b/api_tests/nodes/views/test_node_relationship_institutions.py index c19c4e79d4b..179f357b987 100644 --- a/api_tests/nodes/views/test_node_relationship_institutions.py +++ b/api_tests/nodes/views/test_node_relationship_institutions.py @@ -115,7 +115,6 @@ def create_payload(self, institutions): ] } -@pytest.mark.usefixtures('mock_send_grid') class TestNodeRelationshipInstitutions(RelationshipInstitutionsTestMixin): def test_node_with_no_permissions(self, app, unauthorized_user_with_affiliation, institution_one, node_institutions_url): @@ -254,18 +253,18 @@ def test_remove_institutions_with_affiliated_user( assert res.status_code == 200 assert node.affiliated_institutions.count() == 0 - def test_using_post_making_no_changes_returns_201(self, app, user, institution_one, node, node_institutions_url, mock_send_grid): + def test_using_post_making_no_changes_returns_201(self, app, user, institution_one, node, node_institutions_url): node.affiliated_institutions.add(institution_one) node.save() assert institution_one in node.affiliated_institutions.all() - mock_send_grid.reset_mock() - res = app.post_json_api( - node_institutions_url, - self.create_payload([institution_one]), - auth=user.auth - ) - mock_send_grid.assert_not_called() + with capture_notifications() as notifications: + res = app.post_json_api( + node_institutions_url, + self.create_payload([institution_one]), + auth=user.auth + ) + assert not notifications assert res.status_code == 201 assert institution_one in node.affiliated_institutions.all() diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index ce96d8d308c..a719589563c 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -78,7 +78,6 @@ def contrib_id(preprint_id, user_id): return f'{preprint_id}-{user_id}' return contrib_id - @pytest.mark.django_db @pytest.mark.enable_implicit_clean class TestPreprintContributorList(NodeCRUDTestCase): @@ -1352,7 +1351,6 @@ def test_add_contributor_validation(self, preprint_published, validate_data): @pytest.mark.django_db @pytest.mark.enable_enqueue_task -@pytest.mark.usefixtures('mock_send_grid') class TestPreprintContributorCreateEmail(NodeCRUDTestCase): @pytest.fixture() @@ -1360,7 +1358,7 @@ def url_preprint_contribs(self, preprint_published): return f'/{API_BASE}preprints/{preprint_published._id}/contributors/' def test_add_contributor_no_email_if_false( - self, mock_send_grid, app, user, url_preprint_contribs): + self, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=false' payload = { 'data': { @@ -1371,14 +1369,12 @@ def test_add_contributor_no_email_if_false( } } } - mock_send_grid.reset_mock() - res = app.post_json_api(url, payload, auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth) + assert not notifications assert res.status_code == 201 - assert mock_send_grid.call_count == 0 - def test_add_contributor_needs_preprint_filter_to_send_email( - self, mock_send_grid, app, user, user_two, - url_preprint_contribs): + def test_add_contributor_needs_preprint_filter_to_send_email(self, app, user, user_two, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=default' payload = { 'data': { @@ -1395,12 +1391,11 @@ def test_add_contributor_needs_preprint_filter_to_send_email( } } } - - mock_send_grid.reset_mock() - res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True) + assert not notifications assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'default is not a valid email preference.' - assert mock_send_grid.call_count == 0 def test_add_contributor_signal_if_preprint( self, app, user, user_two, url_preprint_contribs): @@ -1467,8 +1462,7 @@ def test_add_unregistered_contributor_signal_if_preprint(self, app, user, url_pr assert len(notifications) == 1 assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT - def test_add_contributor_invalid_send_email_param( - self, mock_send_grid, app, user, url_preprint_contribs): + def test_add_contributor_invalid_send_email_param(self, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=true' payload = { 'data': { @@ -1479,16 +1473,19 @@ def test_add_contributor_invalid_send_email_param( } } } - mock_send_grid.reset_mock() - res = app.post_json_api( - url, payload, auth=user.auth, - expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api( + url, + payload, + auth=user.auth, + expect_errors=True + ) + assert not notifications assert res.status_code == 400 assert res.json['errors'][0]['detail'] == 'true is not a valid email preference.' - assert mock_send_grid.call_count == 0 def test_add_unregistered_contributor_without_email_no_email( - self, mock_send_grid, app, user, url_preprint_contribs): + self, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=preprint' payload = { 'data': { @@ -1499,16 +1496,16 @@ def test_add_unregistered_contributor_without_email_no_email( } } - mock_send_grid.reset_mock() with capture_signals() as mock_signal: - res = app.post_json_api(url, payload, auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth) + assert not notifications assert contributor_added in mock_signal.signals_sent() assert res.status_code == 201 - assert mock_send_grid.call_count == 0 @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated') def test_publishing_preprint_sends_emails_to_contributors( - self, mock_update, mock_send_grid, app, user, url_preprint_contribs, preprint_unpublished): + self, mock_update, app, user, url_preprint_contribs, preprint_unpublished): url = f'/{API_BASE}preprints/{preprint_unpublished._id}/' user_two = AuthUserFactory() preprint_unpublished.add_contributor(user_two, permissions=permissions.WRITE, save=True) @@ -1547,7 +1544,7 @@ def test_contributor_added_signal_not_specified(self, app, user, url_preprint_co assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT def test_contributor_added_not_sent_if_unpublished( - self, mock_send_grid, app, user, preprint_unpublished): + self, app, user, preprint_unpublished): url = f'/{API_BASE}preprints/{preprint_unpublished._id}/contributors/?send_email=preprint' payload = { 'data': { @@ -1558,10 +1555,10 @@ def test_contributor_added_not_sent_if_unpublished( } } } - mock_send_grid.reset_mock() - res = app.post_json_api(url, payload, auth=user.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=user.auth) + assert not notifications assert res.status_code == 201 - assert mock_send_grid.call_count == 0 @pytest.mark.django_db diff --git a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py index 5a7275158f2..bf1efa42e2b 100644 --- a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py +++ b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py @@ -91,27 +91,27 @@ def test_GET_admin_with_filter(self, app, url, nonmoderator, moderator, admin, p @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestPOSTCollectionsModeratorList: - def test_POST_unauthorized(self, mock_send_grid, app, url, nonmoderator, moderator, provider): + def test_POST_unauthorized(self, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post(url, payload, expect_errors=True) + with capture_notifications() as notification: + res = app.post(url, payload, expect_errors=True) + assert not notification assert res.status_code == 401 - assert mock_send_grid.call_count == 0 - def test_POST_forbidden(self, mock_send_grid, app, url, nonmoderator, moderator, provider): + def test_POST_forbidden(self, app, url, nonmoderator, moderator, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) - assert res.status_code == 403 - - res = app.post(url, payload, auth=moderator.auth, expect_errors=True) - assert res.status_code == 403 + with capture_notifications() as notification: + res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) + assert res.status_code == 403 - assert mock_send_grid.call_count == 0 + res = app.post(url, payload, auth=moderator.auth, expect_errors=True) + assert res.status_code == 403 + assert not notification - def test_POST_admin_success_existing_user(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_success_existing_user(self, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='moderator') with capture_notifications() as notifications: @@ -122,11 +122,13 @@ def test_POST_admin_success_existing_user(self, mock_send_grid, app, url, nonmod assert res.json['data']['id'] == nonmoderator._id assert res.json['data']['attributes']['permission_group'] == 'moderator' - def test_POST_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin, provider): + def test_POST_admin_failure_existing_moderator(self, app, url, moderator, admin, provider): payload = make_payload(user_id=moderator._id, permission_group='moderator') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + assert not notifications + assert res.status_code == 400 - assert mock_send_grid.call_count == 0 def test_POST_admin_failure_unreg_moderator(self, app, url, moderator, nonmoderator, admin, provider): unreg_user = {'full_name': 'Jalen Hurts', 'email': '1eagles@allbatman.org'} @@ -147,13 +149,14 @@ def test_POST_admin_failure_unreg_moderator(self, app, url, moderator, nonmodera assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONFIRM_EMAIL_MODERATION assert notifications[0]['kwargs']['user'].username == unreg_user['email'] - def test_POST_admin_failure_invalid_group(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_failure_invalid_group(self, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(user_id=nonmoderator._id, permission_group='citizen') - res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) + assert not notifications assert res.status_code == 400 - assert mock_send_grid.call_count == 0 - def test_POST_admin_success_email(self, mock_send_grid, app, url, nonmoderator, moderator, admin, provider): + def test_POST_admin_success_email(self, app, url, nonmoderator, moderator, admin, provider): payload = make_payload(email='somenewuser@gmail.com', full_name='Some User', permission_group='moderator') with capture_notifications() as notifications: res = app.post_json_api(url, payload, auth=admin.auth) diff --git a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py index ac075faddeb..50713497203 100644 --- a/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py +++ b/api_tests/providers/preprints/views/test_preprint_provider_moderator_list.py @@ -10,7 +10,6 @@ from tests.utils import capture_notifications -@pytest.mark.usefixtures('mock_send_grid') class ProviderModeratorListTestClass: @pytest.fixture() @@ -70,18 +69,18 @@ def test_list_get_admin_with_filter(self, app, url, nonmoderator, moderator, adm assert res.json['data'][0]['id'] == admin._id assert res.json['data'][0]['attributes']['permission_group'] == permissions.ADMIN - def test_list_post_unauthorized(self, mock_send_grid, app, url, nonmoderator, moderator, provider): + def test_list_post_unauthorized(self, app, url, nonmoderator, moderator, provider): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') - res = app.post(url, payload, expect_errors=True) - assert res.status_code == 401 - - res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) - assert res.status_code == 403 + with capture_notifications() as notification: + res = app.post(url, payload, expect_errors=True) + assert res.status_code == 401 - res = app.post(url, payload, auth=moderator.auth, expect_errors=True) - assert res.status_code == 403 + res = app.post(url, payload, auth=nonmoderator.auth, expect_errors=True) + assert res.status_code == 403 - assert mock_send_grid.call_count == 0 + res = app.post(url, payload, auth=moderator.auth, expect_errors=True) + assert res.status_code == 403 + assert not notification def test_list_post_admin_success_existing_user(self, app, url, nonmoderator, moderator, admin): payload = self.create_payload(user_id=nonmoderator._id, permission_group='moderator') @@ -94,7 +93,7 @@ def test_list_post_admin_success_existing_user(self, app, url, nonmoderator, mod assert len(notifications) == 1 assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - def test_list_post_admin_failure_existing_moderator(self, mock_send_grid, app, url, moderator, admin): + def test_list_post_admin_failure_existing_moderator(self, app, url, moderator, admin): payload = self.create_payload(user_id=moderator._id, permission_group='moderator') with capture_notifications() as notifications: res = app.post_json_api(url, payload, auth=admin.auth, expect_errors=True) diff --git a/api_tests/providers/tasks/test_bulk_upload.py b/api_tests/providers/tasks/test_bulk_upload.py index 8caf27d89bf..a2863436bbd 100644 --- a/api_tests/providers/tasks/test_bulk_upload.py +++ b/api_tests/providers/tasks/test_bulk_upload.py @@ -65,7 +65,6 @@ def test_error_message_default(self): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestBulkUploadTasks: @pytest.fixture() diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 68222090042..1be2d14c3be 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -693,7 +693,6 @@ def test_read_write_contributor_can_edit_writeable_fields( @pytest.mark.django_db -@pytest.mark.usefixtures('mock_notification_send') class TestRegistrationWithdrawal(TestRegistrationUpdateTestCase): @pytest.fixture @@ -752,14 +751,16 @@ def test_initiate_withdraw_registration_fails( res = app.put_json_api(public_url, public_payload, auth=user.auth, expect_errors=True) assert res.status_code == 400 - def test_initiate_withdrawal_success(self, mock_notification_send, app, user, public_registration, public_url, public_payload): - res = app.put_json_api(public_url, public_payload, auth=user.auth) + def test_initiate_withdrawal_success(self, app, user, public_registration, public_url, public_payload): + with capture_notifications() as notifications: + res = app.put_json_api(public_url, public_payload, auth=user.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_REVIEWS assert res.status_code == 200 assert res.json['data']['attributes']['pending_withdrawal'] is True public_registration.refresh_from_db() assert public_registration.is_pending_retraction assert public_registration.registered_from.logs.first().action == 'retraction_initiated' - assert mock_notification_send.called @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') def test_initiate_withdrawal_with_embargo_ends_embargo( diff --git a/api_tests/requests/views/test_node_request_institutional_access.py b/api_tests/requests/views/test_node_request_institutional_access.py index 35e18042117..d41b7639f05 100644 --- a/api_tests/requests/views/test_node_request_institutional_access.py +++ b/api_tests/requests/views/test_node_request_institutional_access.py @@ -2,14 +2,15 @@ from api.base.settings.defaults import API_BASE from api_tests.requests.mixins import NodeRequestTestMixin +from osf.models import NotificationType from osf_tests.factories import NodeFactory, InstitutionFactory, AuthUserFactory from osf.utils.workflows import DefaultStates, NodeRequestTypes from framework.auth import Auth +from tests.utils import capture_notifications @pytest.mark.django_db -@pytest.mark.usefixtures('mock_notification_send') class TestNodeRequestListInstitutionalAccess(NodeRequestTestMixin): @pytest.fixture() @@ -206,37 +207,34 @@ def test_institutional_admin_unauth_institution(self, app, project, institution_ assert res.status_code == 403 assert 'Institutional request access is not enabled.' in res.json['errors'][0]['detail'] - def test_email_not_sent_without_recipient(self, mock_notification_send, app, project, institutional_admin, url, + def test_email_not_sent_without_recipient(self, app, project, institutional_admin, url, create_payload, institution): """ Test that an email is not sent when no recipient is listed when an institutional access request is made, but the request is still made anyway without email. """ del create_payload['data']['relationships']['message_recipient'] - mock_notification_send.reset_mock() - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + # Check that an email is not sent + assert not notifications assert res.status_code == 201 - # Check that an email is sent - assert not mock_notification_send.called - - def test_email_not_sent_outside_institution(self, mock_notification_send, app, project, institutional_admin, url, + def test_email_not_sent_outside_institution(self, app, project, institutional_admin, url, create_payload, user_without_affiliation, institution): """ Test that you are prevented from requesting a user with the correct institutional affiliation. """ create_payload['data']['relationships']['message_recipient']['data']['id'] = user_without_affiliation._id - mock_notification_send.reset_mock() - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.post_json_api(url, create_payload, auth=institutional_admin.auth, expect_errors=True) + # Check that an email is not sent + assert not notifications assert res.status_code == 403 assert f'User {user_without_affiliation._id} is not affiliated with the institution.' in res.json['errors'][0]['detail'] - # Check that an email is sent - assert not mock_notification_send.called - def test_email_sent_on_creation( self, - mock_notification_send, app, project, institutional_admin, @@ -248,15 +246,14 @@ def test_email_sent_on_creation( """ Test that an email is sent to the appropriate recipients when an institutional access request is made. """ - mock_notification_send.reset_mock() - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 - assert mock_notification_send.call_count == 1 - def test_bcc_institutional_admin( self, - mock_notification_send, app, project, institutional_admin, @@ -269,15 +266,14 @@ def test_bcc_institutional_admin( Ensure BCC option works as expected, sending messages to sender giving them a copy for themselves. """ create_payload['data']['attributes']['bcc_sender'] = True - mock_notification_send.reset_mock() - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 - assert mock_notification_send.call_count == 1 - def test_reply_to_institutional_admin( self, - mock_notification_send, app, project, institutional_admin, @@ -290,12 +286,12 @@ def test_reply_to_institutional_admin( Ensure reply-to option works as expected, allowing a reply to header be added to the email. """ create_payload['data']['attributes']['reply_to'] = True - mock_notification_send.reset_mock() - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 - assert mock_notification_send.call_count == 1 - def test_access_requests_disabled_raises_permission_denied( self, app, node_with_disabled_access_requests, user_with_affiliation, institutional_admin, create_payload ): @@ -313,7 +309,6 @@ def test_access_requests_disabled_raises_permission_denied( def test_placeholder_text_when_comment_is_empty( self, - mock_notification_send, app, project, institutional_admin, @@ -327,12 +322,12 @@ def test_placeholder_text_when_comment_is_empty( """ # Test with empty comment create_payload['data']['attributes']['comment'] = '' - mock_notification_send.reset_mock() - res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 - mock_notification_send.assert_called() - def test_requester_can_resubmit(self, app, project, institutional_admin, url, create_payload): """ Test that a requester can submit another access request for the same node. diff --git a/api_tests/requests/views/test_node_request_list.py b/api_tests/requests/views/test_node_request_list.py index 4e16d5ce1c2..1356727d2f7 100644 --- a/api_tests/requests/views/test_node_request_list.py +++ b/api_tests/requests/views/test_node_request_list.py @@ -10,7 +10,6 @@ @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestNodeRequestListCreate(NodeRequestTestMixin): @pytest.fixture() def url(self, project): diff --git a/api_tests/requests/views/test_preprint_request_list.py b/api_tests/requests/views/test_preprint_request_list.py index 72e16862f7a..2a859e33ef8 100644 --- a/api_tests/requests/views/test_preprint_request_list.py +++ b/api_tests/requests/views/test_preprint_request_list.py @@ -5,7 +5,6 @@ @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestPreprintRequestListCreate(PreprintRequestTestMixin): def url(self, preprint): return f'/{API_BASE}preprints/{preprint._id}/requests/' @@ -63,9 +62,3 @@ def test_requester_cannot_submit_again(self, app, admin, create_payload, pre_mod res = app.post_json_api(self.url(pre_mod_preprint), create_payload, auth=admin.auth, expect_errors=True) assert res.status_code == 409 assert res.json['errors'][0]['detail'] == 'Users may not have more than one withdrawal request per preprint.' - - @pytest.mark.skip('TODO: IN-284 -- add emails') - def test_email_sent_to_moderators_on_submit(self, mock_send_grid, app, admin, create_payload, moderator, post_mod_preprint): - res = app.post_json_api(self.url(post_mod_preprint), create_payload, auth=admin.auth) - assert res.status_code == 201 - assert mock_send_grid.call_count == 1 diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index 7396e1ec739..ff277ac0233 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -10,7 +10,6 @@ @pytest.mark.django_db @pytest.mark.enable_enqueue_task -@pytest.mark.usefixtures('mock_send_grid') class TestCreateNodeRequestAction(NodeRequestTestMixin): @pytest.fixture() def url(self, node_request): @@ -220,17 +219,17 @@ def test_email_sent_on_reject(self, app, admin, url, node_request): assert initial_state != node_request.machine_state assert node_request.creator not in node_request.target.contributors - def test_email_not_sent_on_reject(self, mock_send_grid, app, requester, url, node_request): - mock_send_grid.reset_mock() + def test_email_not_sent_on_reject(self, app, requester, url, node_request): initial_state = node_request.machine_state initial_comment = node_request.comment payload = self.create_payload(node_request._id, trigger='edit_comment', comment='ASDFG') - res = app.post_json_api(url, payload, auth=requester.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, payload, auth=requester.auth) + assert not notifications assert res.status_code == 201 node_request.reload() assert initial_state == node_request.machine_state assert initial_comment != node_request.comment - assert mock_send_grid.call_count == 0 def test_set_permissions_on_approve(self, app, admin, url, node_request): assert node_request.creator not in node_request.target.contributors @@ -261,7 +260,6 @@ def test_accept_request_defaults_to_read_and_visible(self, app, admin, url, node @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestCreatePreprintRequestAction(PreprintRequestTestMixin): @pytest.fixture() def url(self, pre_request, post_request, none_request): @@ -407,31 +405,5 @@ def test_email_sent_on_approve(self, app, moderator, url, pre_request, post_requ assert initial_state != request.machine_state assert request.target.is_retracted - @pytest.mark.skip('TODO: IN-331 -- add emails') - def test_email_sent_on_reject(self, mock_send_grid, app, moderator, url, pre_request, post_request): - for request in [pre_request, post_request]: - initial_state = request.machine_state - assert not request.target.is_retracted - payload = self.create_payload(request._id, trigger='reject') - res = app.post_json_api(url, payload, auth=moderator.auth) - assert res.status_code == 201 - request.reload() - assert initial_state != request.machine_state - assert not request.target.is_retracted - assert mock_send_grid.call_count == 2 - - @pytest.mark.skip('TODO: IN-284/331 -- add emails') - def test_email_not_sent_on_edit_comment(self, mock_send_grid, app, moderator, url, pre_request, post_request): - for request in [pre_request, post_request]: - initial_state = request.machine_state - assert not request.target.is_retracted - payload = self.create_payload(request._id, trigger='edit_comment', comment='ASDFG') - res = app.post_json_api(url, payload, auth=moderator.auth) - assert res.status_code == 201 - request.reload() - assert initial_state != request.machine_state - assert not request.target.is_retracted - assert mock_send_grid.call_count == 0 - def test_auto_approve(self, app, auto_withdrawable_pre_mod_preprint, auto_approved_pre_request): assert auto_withdrawable_pre_mod_preprint.is_retracted diff --git a/api_tests/users/views/test_user_confirm.py b/api_tests/users/views/test_user_confirm.py index d304fc456b5..bb2acee47c9 100644 --- a/api_tests/users/views/test_user_confirm.py +++ b/api_tests/users/views/test_user_confirm.py @@ -1,12 +1,12 @@ import pytest -from unittest import mock from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from osf_tests.factories import AuthUserFactory +from tests.utils import capture_notifications @pytest.mark.django_db -@pytest.mark.usefixtures('mock_notification_send') class TestConfirmEmail: @pytest.fixture() @@ -114,26 +114,26 @@ def test_post_provider_mismatch(self, app, confirm_url, user_with_email_verifica assert res.status_code == 400 assert 'provider mismatch' in res.json['errors'][0]['detail'].lower() - @mock.patch('website.mails.send_mail') - def test_post_success_create(self, mock_send_mail, app, confirm_url, user_with_email_verification): + def test_post_success_create(self, app, confirm_url, user_with_email_verification): user, token, email = user_with_email_verification user.is_registered = False user.save() - res = app.post_json_api( - confirm_url, - { - 'data': { - 'attributes': { - 'uid': user._id, - 'token': token, - 'destination': 'doesnotmatter', + with capture_notifications() as notifications: + res = app.post_json_api( + confirm_url, + { + 'data': { + 'attributes': { + 'uid': user._id, + 'token': token, + 'destination': 'doesnotmatter', + } } - } - }, - expect_errors=True - ) + }, + expect_errors=True + ) assert res.status_code == 201 - assert not mock_send_mail.called + assert not notifications assert res.json == { 'redirect_url': f'http://localhost:80/v2/users/{user._id}/confirm/&new=true', 'meta': { @@ -148,62 +148,61 @@ def test_post_success_create(self, mock_send_mail, app, confirm_url, user_with_e assert user.external_identity == {'ORCID': {'0002-0001-0001-0001': 'VERIFIED'}} assert user.emails.filter(address=email.lower()).exists() - def test_post_success_link(self, mock_notification_send, app, confirm_url, user_with_email_verification): + def test_post_success_link(self, app, confirm_url, user_with_email_verification): user, token, email = user_with_email_verification user.external_identity['ORCID']['0000-0000-0000-0000'] = 'LINK' user.save() - res = app.post_json_api( - confirm_url, - { - 'data': { - 'attributes': { - 'uid': user._id, - 'token': token, - 'destination': 'doesnotmatter' + with capture_notifications() as notifications: + res = app.post_json_api( + confirm_url, + { + 'data': { + 'attributes': { + 'uid': user._id, + 'token': token, + 'destination': 'doesnotmatter' + } } - } - }, - expect_errors=True - ) - assert res.status_code == 201 + }, + expect_errors=True + ) + assert res.status_code == 201 - assert mock_notification_send.called + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL user.reload() assert user.external_identity['ORCID']['0000-0000-0000-0000'] == 'VERIFIED' - @mock.patch('website.mails.send_mail') def test_post_success_link_with_email_verification_none( - self, mock_send_mail, app, confirm_url, user_with_none_identity + self, app, confirm_url, user_with_none_identity ): user, token, email = user_with_none_identity user.save() - res = app.post_json_api( - confirm_url, - { - 'data': { - 'attributes': { - 'uid': user._id, - 'token': token, - 'destination': 'doesnotmatter' + with capture_notifications() as notifications: + res = app.post_json_api( + confirm_url, + { + 'data': { + 'attributes': { + 'uid': user._id, + 'token': token, + 'destination': 'doesnotmatter' + } } - } - }, - expect_errors=True - ) + }, + expect_errors=True + ) + assert not notifications # no orcid sso message assert res.status_code == 201 - assert not mock_send_mail.called # no orcid sso message - user.reload() assert not user.external_identity - @mock.patch('website.mails.send_mail') def test_post_success_link_with_email_already_exists( self, - mock_send_mail, app, confirm_url, user_with_email_verification diff --git a/api_tests/users/views/test_user_list.py b/api_tests/users/views/test_user_list.py index 32cc69758d4..28a913df2a7 100644 --- a/api_tests/users/views/test_user_list.py +++ b/api_tests/users/views/test_user_list.py @@ -10,7 +10,7 @@ from api.base.settings.defaults import API_BASE from framework.auth.cas import CasResponse -from osf.models import OSFUser, ApiOAuth2PersonalToken +from osf.models import OSFUser, ApiOAuth2PersonalToken, NotificationType from osf_tests.factories import ( AuthUserFactory, UserFactory, @@ -19,6 +19,7 @@ Auth, ) from osf.utils.permissions import CREATOR_PERMISSIONS +from tests.utils import capture_notifications from website import settings @@ -246,7 +247,6 @@ def test_users_list_filter_multiple_fields_with_bad_filter( @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestUsersCreate: @pytest.fixture() @@ -279,35 +279,37 @@ def tearDown(self, app): OSFUser.remove() def test_logged_in_user_with_basic_auth_cannot_create_other_user_or_send_mail( - self, mock_send_grid, app, user, email_unconfirmed, data, url_base): + self, app, user, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - res = app.post_json_api( - f'{url_base}?send_email=true', - data, - auth=user.auth, - expect_errors=True - ) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_base}?send_email=true', + data, + auth=user.auth, + expect_errors=True + ) + assert not notifications assert res.status_code == 403 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_send_grid.call_count == 0 def test_logged_out_user_cannot_create_other_user_or_send_mail( - self, mock_send_grid, app, email_unconfirmed, data, url_base): + self, app, email_unconfirmed, data, url_base): assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - res = app.post_json_api( - f'{url_base}?send_email=true', - data, - expect_errors=True - ) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_base}?send_email=true', + data, + expect_errors=True + ) + assert not notifications assert res.status_code == 401 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision def test_cookied_requests_can_create_and_email( - self, mock_send_grid, app, user, email_unconfirmed, data, url_base): + self, app, user, email_unconfirmed, data, url_base): # NOTE: skipped tests are not tested during session refactor, only updated to fix import session = SessionStore() session['auth_user_id'] = user._id @@ -316,13 +318,15 @@ def test_cookied_requests_can_create_and_email( app.set_cookie(settings.COOKIE_NAME, str(cookie)) assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - res = app.post_json_api( - f'{url_base}?send_email=true', - data - ) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_base}?send_email=true', + data + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_send_grid.call_count == 1 @pytest.mark.skip # failing locally post converision @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') @@ -331,7 +335,7 @@ def test_cookied_requests_can_create_and_email( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_can_create_and_send_email( - self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): + self, mock_auth, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -352,16 +356,18 @@ def test_properly_scoped_token_can_create_and_send_email( mock_auth.return_value = user, mock_cas_resp assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - res = app.post_json_api( - f'{url_base}?send_email=true', - data, - headers={'Authorization': f'Bearer {token.token_id}'} - ) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_base}?send_email=true', + data, + headers={'Authorization': f'Bearer {token.token_id}'} + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_send_grid.call_count == 1 @pytest.mark.skip # failing locally post converision @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') @@ -370,7 +376,7 @@ def test_properly_scoped_token_can_create_and_send_email( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_does_not_send_email_without_kwarg( - self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): + self, mock_auth, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -393,16 +399,17 @@ def test_properly_scoped_token_does_not_send_email_without_kwarg( assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - res = app.post_json_api( - url_base, - data, - headers={'Authorization': f'Bearer {token.token_id}'} - ) + with capture_notifications() as notifications: + res = app.post_json_api( + url_base, + data, + headers={'Authorization': f'Bearer {token.token_id}'} + ) + assert not notifications assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') @@ -411,7 +418,7 @@ def test_properly_scoped_token_does_not_send_email_without_kwarg( not settings.DEV_MODE, 'DEV_MODE disabled, osf.users.create unavailable') def test_properly_scoped_token_can_create_without_username_but_not_send_email( - self, mock_auth, mock_send_grid, app, user, data, url_base): + self, mock_auth, app, user, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Authorized Token', @@ -434,11 +441,13 @@ def test_properly_scoped_token_can_create_without_username_but_not_send_email( data['data']['attributes'] = {'full_name': 'No Email'} assert OSFUser.objects.filter(fullname='No Email').count() == 0 - res = app.post_json_api( - f'{url_base}?send_email=true', - data, - headers={'Authorization': f'Bearer {token.token_id}'} - ) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_base}?send_email=true', + data, + headers={'Authorization': f'Bearer {token.token_id}'} + ) + assert not notifications assert res.status_code == 201 username = res.json['data']['attributes']['username'] @@ -447,11 +456,10 @@ def test_properly_scoped_token_can_create_without_username_but_not_send_email( except ValueError: raise AssertionError('Username is not a valid UUID') assert OSFUser.objects.filter(fullname='No Email').count() == 1 - assert mock_send_grid.call_count == 0 @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') def test_improperly_scoped_token_can_not_create_or_email( - self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): + self, mock_auth, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Unauthorized Token', @@ -474,16 +482,17 @@ def test_improperly_scoped_token_can_not_create_or_email( mock_auth.return_value = user, mock_cas_resp assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - res = app.post_json_api( - f'{url_base}?send_email=true', - data, - headers={'Authorization': f'Bearer {token.token_id}'}, - expect_errors=True - ) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_base}?send_email=true', + data, + headers={'Authorization': f'Bearer {token.token_id}'}, + expect_errors=True + ) + assert not notifications assert res.status_code == 403 assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - assert mock_send_grid.call_count == 0 @pytest.mark.skip # failing locally post converision @mock.patch('api.base.authentication.drf.OSFCASAuthentication.authenticate') @@ -492,7 +501,7 @@ def test_improperly_scoped_token_can_not_create_or_email( not settings.DEV_MODE, 'DEV_MODE disabled, osf.admin unavailable') def test_admin_scoped_token_can_create_and_send_email( - self, mock_auth, mock_send_grid, app, user, email_unconfirmed, data, url_base): + self, mock_auth, app, user, email_unconfirmed, data, url_base): token = ApiOAuth2PersonalToken( owner=user, name='Admin Token', @@ -513,13 +522,15 @@ def test_admin_scoped_token_can_create_and_send_email( mock_auth.return_value = user, mock_cas_resp assert OSFUser.objects.filter(username=email_unconfirmed).count() == 0 - res = app.post_json_api( - f'{url_base}?send_email=true', - data, - headers={'Authorization': f'Bearer {token.token_id}'} - ) + with capture_notifications() as notifications: + res = app.post_json_api( + f'{url_base}?send_email=true', + data, + headers={'Authorization': f'Bearer {token.token_id}'} + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED assert res.status_code == 201 assert res.json['data']['attributes']['username'] == email_unconfirmed assert OSFUser.objects.filter(username=email_unconfirmed).count() == 1 - assert mock_send_grid.call_count == 1 diff --git a/api_tests/users/views/test_user_message_institutional_access.py b/api_tests/users/views/test_user_message_institutional_access.py index 2f60c4ae726..aac978abeb0 100644 --- a/api_tests/users/views/test_user_message_institutional_access.py +++ b/api_tests/users/views/test_user_message_institutional_access.py @@ -1,4 +1,6 @@ import pytest + +from osf.models.notification_type import NotificationType from osf.models.user_message import MessageTypes, UserMessage from api.base.settings.defaults import API_BASE from osf_tests.factories import ( @@ -7,9 +9,9 @@ ) from webtest import AppError +from tests.utils import capture_notifications @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestUserMessageInstitutionalAccess: """ Tests for `UserMessage`. @@ -84,31 +86,35 @@ def payload(self, institution, user): } } - def test_institutional_admin_can_create_message(self, mock_send_grid, app, institutional_admin, institution, url_with_affiliation, payload): + def test_institutional_admin_can_create_message(self, app, institutional_admin, institution, url_with_affiliation, payload): """ Ensure an institutional admin can create a `UserMessage` with a `message` and `institution`. """ - - res = app.post_json_api( - url_with_affiliation, - payload, - auth=institutional_admin.auth - ) + with capture_notifications() as notifications: + res = app.post_json_api( + url_with_affiliation, + payload, + auth=institutional_admin.auth + ) + assert len(notifications) == 1 + user_message = UserMessage.objects.get(sender=institutional_admin) + assert notifications[0]['kwargs']['user'].username == user_message.recipient.username assert res.status_code == 201 data = res.json['data'] - user_message = UserMessage.objects.get(sender=institutional_admin) - assert user_message.message_text == payload['data']['attributes']['message_text'] assert user_message.institution == institution - mock_send_grid.assert_called_once() - assert mock_send_grid.call_args[1]['to_addr'] == user_message.recipient.username assert user_message._id == data['id'] - def test_institutional_admin_can_not_create_message(self, mock_send_grid, app, institutional_admin_on_institution_without_access, - institution_without_access, url_with_affiliation_on_institution_without_access, - payload): + def test_institutional_admin_can_not_create_message( + self, + app, + institutional_admin_on_institution_without_access, + institution_without_access, + url_with_affiliation_on_institution_without_access, + payload + ): """ Ensure an institutional admin cannot create a `UserMessage` with a `message` and `institution` witch has 'institutional_request_access_enabled' as False """ @@ -193,7 +199,6 @@ def test_admin_cannot_message_user_outside_institution( def test_cc_institutional_admin( self, - mock_send_grid, app, institutional_admin, institution, @@ -208,42 +213,46 @@ def test_cc_institutional_admin( # Enable CC in the payload payload['data']['attributes']['bcc_sender'] = True - # Perform the API request - res = app.post_json_api( - url_with_affiliation, - payload, - auth=institutional_admin.auth, - ) + with capture_notifications() as notifications: + # Perform the API request + res = app.post_json_api( + url_with_affiliation, + payload, + auth=institutional_admin.auth, + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['kwargs']['user'].username == user_with_affiliation.username assert res.status_code == 201 user_message = UserMessage.objects.get() - assert user_message.is_sender_BCCed # Two emails are sent during the CC but this is how the mock works `send_email` is called once. - assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username - def test_cc_field_defaults_to_false(self, mock_send_grid, app, institutional_admin, url_with_affiliation, user_with_affiliation, institution, payload): + def test_cc_field_defaults_to_false(self, app, institutional_admin, url_with_affiliation, user_with_affiliation, institution, payload): """ Ensure the `cc` field defaults to `false` when not provided in the payload. """ - res = app.post_json_api(url_with_affiliation, payload, auth=institutional_admin.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url_with_affiliation, payload, auth=institutional_admin.auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['kwargs']['user'].username == user_with_affiliation.username assert res.status_code == 201 user_message = UserMessage.objects.get(sender=institutional_admin) assert user_message.message_text == payload['data']['attributes']['message_text'] - assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username - - def test_reply_to_header_set(self, mock_send_grid, app, institutional_admin, user_with_affiliation, institution, url_with_affiliation, payload): + def test_reply_to_header_set(self, app, institutional_admin, user_with_affiliation, institution, url_with_affiliation, payload): """ Ensure that the 'Reply-To' header is correctly set to the sender's email address. """ payload['data']['attributes']['reply_to'] = True - res = app.post_json_api( - url_with_affiliation, - payload, - auth=institutional_admin.auth, - ) + with capture_notifications() as notifications: + res = app.post_json_api( + url_with_affiliation, + payload, + auth=institutional_admin.auth, + ) assert res.status_code == 201 - - assert mock_send_grid.call_args[1]['to_addr'] == user_with_affiliation.username + assert notifications[0]['user'].username == user_with_affiliation.username diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index eac3bc9fc0c..847576d9913 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -7,8 +7,10 @@ AuthUserFactory, UserFactory, ) -from osf.models import Email, NotableDomain +from osf.models import Email, NotableDomain, NotificationType from framework.auth.views import auth_email_logout +from tests.utils import capture_notifications + @pytest.fixture() def user_one(): @@ -25,7 +27,6 @@ def unconfirmed_address(): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestUserRequestExport: @pytest.fixture() @@ -41,11 +42,11 @@ def payload(self): } } - def test_get(self, app, user_one, url, mock_notification_send): + def test_get(self, app, user_one, url): res = app.get(url, auth=user_one.auth, expect_errors=True) assert res.status_code == 405 - def test_post(self, mock_send_grid, app, user_one, user_two, url, payload): + def test_post(self, app, user_one, user_two, url, payload): # Logged out res = app.post_json_api(url, payload, expect_errors=True) assert res.status_code == 401 @@ -56,20 +57,23 @@ def test_post(self, mock_send_grid, app, user_one, user_two, url, payload): # Logged in assert user_one.email_last_sent is None - res = app.post_json_api(url, payload, auth=user_one.auth) + with capture_notifications() as notification: + res = app.post_json_api(url, payload, auth=user_one.auth) + assert len(notification) == 1 + assert notification[0]['type'] == NotificationType.Type.USER_ACCOUNT_EXPORT_FORM assert res.status_code == 204 user_one.reload() assert user_one.email_last_sent is not None - assert mock_send_grid.call_count == 1 - def test_post_invalid_type(self, mock_send_grid, app, user_one, url, payload): + def test_post_invalid_type(self, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' - res = app.post_json_api(url, payload, auth=user_one.auth, expect_errors=True) + with capture_notifications() as notification: + res = app.post_json_api(url, payload, auth=user_one.auth, expect_errors=True) + assert not notification assert res.status_code == 409 user_one.reload() assert user_one.email_last_sent is None - assert mock_send_grid.call_count == 0 def test_exceed_throttle(self, app, user_one, url, payload): assert user_one.email_last_sent is None diff --git a/api_tests/users/views/test_user_settings_detail.py b/api_tests/users/views/test_user_settings_detail.py index cc02e6ae145..02fa4c2e646 100644 --- a/api_tests/users/views/test_user_settings_detail.py +++ b/api_tests/users/views/test_user_settings_detail.py @@ -4,6 +4,7 @@ from osf_tests.factories import ( AuthUserFactory, ) +from tests.utils import capture_notifications from website.settings import MAILCHIMP_GENERAL_LIST, OSF_HELP_LIST @@ -227,7 +228,6 @@ def test_unauthorized_patch_403(self, app, url, payload, user_two): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestUpdateRequestedDeactivation: @pytest.fixture() @@ -271,14 +271,15 @@ def test_patch_requested_deactivation(self, app, user_one, user_two, url, payloa user_one.reload() assert user_one.requested_deactivation is False - def test_patch_invalid_type(self, mock_send_grid, app, user_one, url, payload): + def test_patch_invalid_type(self, app, user_one, url, payload): assert user_one.email_last_sent is None payload['data']['type'] = 'Invalid Type' - res = app.patch_json_api(url, payload, auth=user_one.auth, expect_errors=True) + with capture_notifications() as notifications: + res = app.patch_json_api(url, payload, auth=user_one.auth, expect_errors=True) + assert not notifications assert res.status_code == 409 user_one.reload() assert user_one.email_last_sent is None - assert mock_send_grid.call_count == 0 def test_exceed_throttle(self, app, user_one, url, payload): assert user_one.email_last_sent is None diff --git a/api_tests/users/views/test_user_settings_reset_password.py b/api_tests/users/views/test_user_settings_reset_password.py index 94730ec4fa9..0dbdbaec996 100644 --- a/api_tests/users/views/test_user_settings_reset_password.py +++ b/api_tests/users/views/test_user_settings_reset_password.py @@ -3,13 +3,15 @@ from api.base.settings.defaults import API_BASE from api.base.settings import CSRF_COOKIE_NAME +from osf.models import NotificationType from osf_tests.factories import ( UserFactory, ) from django.middleware import csrf -@pytest.mark.usefixtures('mock_send_grid') -@pytest.mark.usefixtures('mock_notification_send') +from tests.utils import capture_notifications + + class TestResetPassword: @pytest.fixture() @@ -28,20 +30,22 @@ def url(self): def csrf_token(self): return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) - def test_get(self, mock_notification_send, app, url, user_one): + def test_get(self, app, url, user_one): encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' - res = app.get(url) + with capture_notifications() as notification: + res = app.get(url) + assert len(notification) == 1 + assert notification[0]['type'] == NotificationType.Type.RESET_PASSWORD_CONFIRMATION assert res.status_code == 200 - user_one.reload() - assert mock_notification_send.called - def test_get_invalid_email(self, mock_send_grid, app, url): + def test_get_invalid_email(self, app, url): url = f'{url}?email={'invalid_email'}' - res = app.get(url) + with capture_notifications() as notification: + res = app.get(url) + assert not notification assert res.status_code == 200 - assert not mock_send_grid.called def test_post(self, app, url, user_one, csrf_token): app.set_cookie(CSRF_COOKIE_NAME, csrf_token) diff --git a/conftest.py b/conftest.py index f7b7bf72b07..b30cb6271a1 100644 --- a/conftest.py +++ b/conftest.py @@ -363,22 +363,6 @@ def helpful_thing(self): yield from rolledback_transaction('function_transaction') -@pytest.fixture() -def mock_send_grid(): - with mock.patch.object(website_settings, 'USE_EMAIL', True): - with mock.patch.object(website_settings, 'USE_CELERY', False): - with mock.patch('framework.email.tasks.send_email') as mock_sendgrid: - mock_sendgrid.return_value = True - yield mock_sendgrid - - -def start_mock_send_grid(test_case): - patcher = mock.patch('framework.email.tasks.send_email') - mocked_send = patcher.start() - test_case.addCleanup(patcher.stop) - mocked_send.return_value = True - return mocked_send - @pytest.fixture def mock_gravy_valet_get_verified_links(): """This fixture is used to mock a GV request which is made during node's identifier update. More specifically, when @@ -394,23 +378,6 @@ def mock_gravy_valet_get_verified_links(): yield mock_get_verified_links -@pytest.fixture() -def mock_notification_send(): - with mock.patch.object(website_settings, 'USE_EMAIL', True): - with mock.patch.object(website_settings, 'USE_CELERY', False): - with mock.patch('osf.models.notification.Notification.send') as mock_emit: - mock_emit.return_value = None # Or True, if needed - yield mock_emit - - -def start_mock_notification_send(test_case): - patcher = mock.patch('osf.models.notification.Notification.send') - mocked_emit = patcher.start() - test_case.addCleanup(patcher.stop) - mocked_emit.return_value = None - return mocked_emit - - @pytest.fixture(autouse=True) def load_notification_types(db, *args, **kwargs): populate_notification_types(*args, **kwargs) diff --git a/docker-compose.yml b/docker-compose.yml index e9ba66bc37e..7c0f08992d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -67,9 +67,9 @@ services: - elasticsearch_data_vol:/usr/share/elasticsearch/data stdin_open: true - # Temporary: Remove when we've upgraded to ES6 elasticsearch6: - image: docker.elastic.co/elasticsearch/elasticsearch:6.3.1 + image: quay.io/centerforopenscience/elasticsearch:es6-arm-6.3.1 + platform: linux/arm64 ports: - 9201:9200 volumes: diff --git a/framework/email/__init__.py b/framework/email/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/framework/email/tasks.py b/framework/email/tasks.py deleted file mode 100644 index cf43395222e..00000000000 --- a/framework/email/tasks.py +++ /dev/null @@ -1,227 +0,0 @@ -import logging -import smtplib -from base64 import b64encode -from email.mime.text import MIMEText -from io import BytesIO - -from sendgrid import SendGridAPIClient -from sendgrid.helpers.mail import ( - Mail, - Bcc, - ReplyTo, - Category, - Attachment, - FileContent, - Email, - To, - Personalization, - Cc, - FileName, - Disposition, -) - - -from framework import sentry -from framework.celery_tasks import app -from website import settings - -logger = logging.getLogger(__name__) - - -@app.task -def send_email( - from_addr: str, - to_addr: str, - subject: str, - message: str, - reply_to: bool = False, - ttls: bool = True, - login: bool = True, - bcc_addr: [] = None, - username: str = None, - password: str = None, - categories=None, - attachment_name: str = None, - attachment_content: str | bytes | BytesIO = None, -): - """Send email to specified destination. - Email is sent from the email specified in FROM_EMAIL settings in the - settings module. - - Uses the Sendgrid API if ``settings.SENDGRID_API_KEY`` is set. - - :param from_addr: A string, the sender email - :param to_addr: A string, the recipient - :param subject: subject of email - :param message: body of message - :param categories: Categories to add to the email using SendGrid's - SMTPAPI. Used for email analytics. - See https://sendgrid.com/docs/User_Guide/Statistics/categories.html - This parameter is only respected if using the Sendgrid API. - ``settings.SENDGRID_API_KEY`` must be set. - - :return: True if successful - """ - if not settings.USE_EMAIL: - return - if settings.SENDGRID_API_KEY: - return _send_with_sendgrid( - from_addr=from_addr, - to_addr=to_addr, - subject=subject, - message=message, - categories=categories, - attachment_name=attachment_name, - attachment_content=attachment_content, - reply_to=reply_to, - bcc_addr=bcc_addr, - ) - else: - return _send_with_smtp( - from_addr=from_addr, - to_addr=to_addr, - subject=subject, - message=message, - ttls=ttls, - login=login, - username=username, - password=password, - reply_to=reply_to, - bcc_addr=bcc_addr, - ) - - -def _send_with_smtp( - from_addr, - to_addr, - subject, - message, - ttls=True, - login=True, - username=None, - password=None, - bcc_addr=None, - reply_to=None, -): - username = username or settings.MAIL_USERNAME - password = password or settings.MAIL_PASSWORD - - if login and (username is None or password is None): - logger.error('Mail username and password not set; skipping send.') - return False - - msg = MIMEText( - message, - 'html', - _charset='utf-8', - ) - msg['Subject'] = subject - msg['From'] = from_addr - msg['To'] = to_addr - - if reply_to: - msg['Reply-To'] = reply_to - - # Combine recipients for SMTP - recipients = [to_addr] + (bcc_addr or []) - - # Establish SMTP connection and send the email - with smtplib.SMTP(settings.MAIL_SERVER) as server: - server.ehlo() - if ttls: - server.starttls() - server.ehlo() - if login: - server.login(username, password) - server.sendmail( - from_addr=from_addr, - to_addrs=recipients, - msg=msg.as_string() - ) - return True - - -def _send_with_sendgrid( - from_addr: str, - to_addr: str, - subject: str, - message: str, - categories=None, - attachment_name: str = None, - attachment_content=None, - cc_addr=None, - bcc_addr=None, - reply_to=None, - client=None, -): - in_allowed_list = to_addr in settings.SENDGRID_EMAIL_WHITELIST - if settings.SENDGRID_WHITELIST_MODE and not in_allowed_list: - sentry.log_message( - f'SENDGRID_WHITELIST_MODE is True. Failed to send emails to non-whitelisted recipient {to_addr}.' - ) - return False - - client = client or SendGridAPIClient(settings.SENDGRID_API_KEY) - mail = Mail( - from_email=Email(from_addr), - html_content=message, - subject=subject, - ) - - # Personalization to handle To, CC, and BCC sendgrid client concept - personalization = Personalization() - - personalization.add_to(To(to_addr)) - - if cc_addr: - if isinstance(cc_addr, str): - cc_addr = [cc_addr] - for email in cc_addr: - personalization.add_cc(Cc(email)) - - if bcc_addr: - if isinstance(bcc_addr, str): - bcc_addr = [bcc_addr] - for email in bcc_addr: - personalization.add_bcc(Bcc(email)) - - if reply_to: - mail.reply_to = ReplyTo(reply_to) - - mail.add_personalization(personalization) - - if categories: - mail.add_category([Category(x) for x in categories]) - - if attachment_name and attachment_content: - attachment = Attachment( - file_content=FileContent(b64encode(attachment_content).decode()), - file_name=FileName(attachment_name), - disposition=Disposition('attachment') - ) - mail.add_attachment(attachment) - - response = client.send(mail) - if response.status_code not in (200, 201, 202): - sentry.log_message( - f'{response.status_code} error response from sendgrid.' - f'from_addr: {from_addr}\n' - f'to_addr: {to_addr}\n' - f'subject: {subject}\n' - 'mimetype: html\n' - f'message: {response.body[:30]}\n' - f'categories: {categories}\n' - f'attachment_name: {attachment_name}\n' - ) - else: - return True - -def _content_to_bytes(attachment_content: BytesIO | str | bytes) -> bytes: - if isinstance(attachment_content, bytes): - return attachment_content - elif isinstance(attachment_content, BytesIO): - return attachment_content.getvalue() - elif isinstance(attachment_content, str): - return attachment_content.encode() - else: - return str(attachment_content).encode() diff --git a/notifications.yaml b/notifications.yaml index 8b3e1fc7ea3..2e8b08ee6f6 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -326,10 +326,14 @@ notification_types: object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' #### DESK - - name: desk_archive_job_exceeded - __docs__: Archive job failed due to size exceeded. Sent to support desk. + - name: addon_boa_job_failure + __docs__: ... object_content_type_model_name: desk - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/addon_boa_job_failure.html.mako' + - name: addon_boa_job_complete + __docs__: ... + object_content_type_model_name: desk + template: 'website/templates/emails/addon_boa_job_complete.html.mako' - name: desk_archive_job_copy_error __docs__: Archive job failed due to copy error. Sent to support desk. object_content_type_model_name: desk diff --git a/osf/email/__init__.py b/osf/email/__init__.py index 2d35db074c1..9ac0a16e0b4 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -3,7 +3,7 @@ from email.mime.text import MIMEText import waffle -from sendgrid import SendGridAPIClient +from sendgrid import SendGridAPIClient, Personalization, To, Cc, Category, ReplyTo, Bcc from sendgrid.helpers.mail import Mail from osf import features @@ -11,7 +11,7 @@ from django.core.mail import EmailMessage, get_connection -def send_email_over_smtp(to_addr, notification_type, context): +def send_email_over_smtp(to_addr, notification_type, context, email_context): """Send an email notification using SMTP. This is typically not used in productions as other 3rd party mail services are preferred. This is to be used for tests and on staging environments and special situations. @@ -19,6 +19,7 @@ def send_email_over_smtp(to_addr, notification_type, context): to_addr (str): The recipient's email address. notification_type (str): The subject of the notification. context (dict): The email content context. + email_context (dict): The email context for sending, such as header changes for BCC or reply-to """ if not settings.MAIL_SERVER: raise NotImplementedError('MAIL_SERVER is not set') @@ -53,7 +54,7 @@ def send_email_over_smtp(to_addr, notification_type, context): ) -def send_email_with_send_grid(to_addr, notification_type, context): +def send_email_with_send_grid(to_addr, notification_type, context, email_context): """Send an email notification using SendGrid. Args: @@ -70,6 +71,39 @@ def send_email_with_send_grid(to_addr, notification_type, context): subject=notification_type, html_content=context.get('message', '') ) + in_allowed_list = to_addr in settings.SENDGRID_EMAIL_WHITELIST + if settings.SENDGRID_WHITELIST_MODE and not in_allowed_list: + from framework.sentry import sentry + + sentry.log_message( + f'SENDGRID_WHITELIST_MODE is True. Failed to send emails to non-whitelisted recipient {to_addr}.' + ) + return False + + # Personalization to handle To, CC, and BCC sendgrid client concept + personalization = Personalization() + + personalization.add_to(To(to_addr)) + + if cc_addr := email_context.get('cc_addr'): + if isinstance(cc_addr, str): + cc_addr = [cc_addr] + for email in cc_addr: + personalization.add_cc(Cc(email)) + + if bcc_addr := email_context.get('cc_addr'): + if isinstance(bcc_addr, str): + bcc_addr = [bcc_addr] + for email in bcc_addr: + personalization.add_bcc(Bcc(email)) + + if reply_to := email_context.get('reply_to'): + message.reply_to = ReplyTo(reply_to) + + message.add_personalization(personalization) + + if email_categories := email_context.get('email_categories'): + message.add_category([Category(x) for x in email_categories]) try: sg = SendGridAPIClient(settings.SENDGRID_API_KEY) diff --git a/osf/models/notification.py b/osf/models/notification.py index 4294eb797eb..1b749af2b9b 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -18,9 +18,15 @@ class Notification(models.Model): seen = models.DateTimeField(null=True, blank=True) created = models.DateTimeField(auto_now_add=True) - def send(self, protocol_type='email', destination_address=None): - if not settings.USE_EMAIL: - return + def send( + self, + protocol_type='email', + destination_address=None, + email_context=None, + ): + """ + + """ if not protocol_type == 'email': raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') @@ -30,7 +36,8 @@ def send(self, protocol_type='email', destination_address=None): email.send_email_over_smtp( recipient_address, self.subscription.notification_type, - self.event_context + self.event_context, + email_context ) elif protocol_type == 'email' and settings.DEV_MODE: if not api_settings.CI_ENV: @@ -39,12 +46,14 @@ def send(self, protocol_type='email', destination_address=None): f"\nto={recipient_address}" f"\ntype={self.subscription.notification_type.name}" f"\ncontext={self.event_context}" + f"\nemail_context={self.email_context}" ) elif protocol_type == 'email': email.send_email_with_send_grid( self.subscription.user, self.subscription.notification_type, - self.event_context + self.event_context, + email_context ) else: raise NotImplementedError(f'protocol `{protocol_type}` is not supported.') diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 7c651c511b5..66e58281db4 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -26,6 +26,9 @@ class NotificationType(models.Model): class Type(str, Enum): # Desk notifications + ADDONS_BOA_JOB_FAILURE = 'addon_boa_job_failure' + ADDONS_BOA_JOB_COMPLETE = 'addon_boa_job_complete' + DESK_REQUEST_EXPORT = 'desk_request_export' DESK_REQUEST_DEACTIVATION = 'desk_request_deactivation' DESK_OSF_SUPPORT_EMAIL = 'desk_osf_support_email' @@ -202,7 +205,8 @@ def emit( destination_address=None, subscribed_object=None, message_frequency='instantly', - event_context=None + event_context=None, + email_context=None, ): """Emit a notification to a user by creating Notification and NotificationSubscription objects. @@ -212,6 +216,7 @@ def emit( subscribed_object (optional): The object the subscription is related to. message_frequency (optional): Initializing message frequency. event_context (dict, optional): Context for rendering the notification template. + email_context (dict, optional): Context for additional email notification information, so as blind cc etc """ from osf.models.notification_subscription import NotificationSubscription subscription, created = NotificationSubscription.objects.get_or_create( @@ -225,7 +230,10 @@ def emit( Notification.objects.create( subscription=subscription, event_context=event_context - ).send(destination_address=destination_address) + ).send( + destination_address=destination_address, + email_context=email_context + ) def add_user_to_subscription(self, user, *args, **kwargs): """ diff --git a/osf/models/user_message.py b/osf/models/user_message.py index ac77cefe629..126e4be5bd6 100644 --- a/osf/models/user_message.py +++ b/osf/models/user_message.py @@ -3,8 +3,8 @@ from django.db.models.signals import post_save from django.dispatch import receiver +from . import NotificationType from .base import BaseModel, ObjectIDMixin -from website.mails import send_mail, USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST class MessageTypes(models.TextChoices): @@ -31,7 +31,7 @@ def get_template(cls: Type['MessageTypes'], message_type: str) -> str: str: The email template string for the specified message type. """ return { - cls.INSTITUTIONAL_REQUEST: USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST + cls.INSTITUTIONAL_REQUEST: NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST }[message_type] @@ -84,18 +84,20 @@ def send_institution_request(self) -> None: """ Sends an institutional access request email to the recipient of the message. """ - send_mail( - mail=MessageTypes.get_template(self.message_type), - to_addr=self.recipient.username, - bcc_addr=[self.sender.username] if self.is_sender_BCCed else None, - reply_to=self.sender.username if self.reply_to else None, + NotificationType.objects.get( + name=MessageTypes.get_template(self.message_type) + ).emit( user=self.recipient, - **{ + event_context={ 'sender': self.sender, 'recipient': self.recipient, 'message_text': self.message_text, 'institution': self.institution, }, + email_context={ + 'bcc_addr': [self.sender.username] if self.is_sender_BCCed else None, + 'reply_to': self.sender.username if self.reply_to else None, + } ) diff --git a/osf_tests/management_commands/test_check_crossref_dois.py b/osf_tests/management_commands/test_check_crossref_dois.py index 993c7e6731e..802ce4fde0b 100644 --- a/osf_tests/management_commands/test_check_crossref_dois.py +++ b/osf_tests/management_commands/test_check_crossref_dois.py @@ -4,6 +4,10 @@ import json from datetime import timedelta import responses + +from osf.models import NotificationType +from tests.utils import capture_notifications + HERE = os.path.dirname(os.path.abspath(__file__)) @@ -14,7 +18,6 @@ @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestCheckCrossrefDOIs: @pytest.fixture() @@ -61,7 +64,9 @@ def test_check_crossref_dois(self, crossref_response, stuck_preprint, preprint): assert stuck_preprint.identifiers.count() == 1 assert stuck_preprint.identifiers.first().value == doi - def test_report_stuck_dois(self, mock_send_grid, stuck_preprint): - report_stuck_dois(dry_run=False) + def test_report_stuck_dois(self, stuck_preprint): + with capture_notifications() as notifications: + report_stuck_dois(dry_run=False) - mock_send_grid.assert_called() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED diff --git a/osf_tests/management_commands/test_email_all_users.py b/osf_tests/management_commands/test_email_all_users.py index 14df656ee52..9141e6b50d4 100644 --- a/osf_tests/management_commands/test_email_all_users.py +++ b/osf_tests/management_commands/test_email_all_users.py @@ -2,11 +2,13 @@ from django.utils import timezone +from osf.models import NotificationType from osf_tests.factories import UserFactory from osf.management.commands.email_all_users import email_all_users +from tests.utils import capture_notifications + -@pytest.mark.usefixtures('mock_send_grid') class TestEmailAllUsers: @pytest.fixture() @@ -41,25 +43,29 @@ def unregistered_user(self): return UserFactory(is_registered=False) @pytest.mark.django_db - def test_email_all_users_dry(self, mock_send_grid, superuser): - email_all_users('TOU_NOTIF', dry_run=True) - - mock_send_grid.assert_called() + def test_email_all_users_dry(self, superuser): + with capture_notifications() as notifications: + email_all_users('TOU_NOTIF', dry_run=True) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED @pytest.mark.django_db def test_dont_email_inactive_users( - self, mock_send_grid, deleted_user, inactive_user, unconfirmed_user, unregistered_user): - - email_all_users('TOU_NOTIF') + self, deleted_user, inactive_user, unconfirmed_user, unregistered_user): - mock_send_grid.assert_not_called() + with capture_notifications() as notifications: + email_all_users('TOU_NOTIF') + assert not notifications @pytest.mark.django_db - def test_email_all_users_offset(self, mock_send_grid, user, user2): - email_all_users('TOU_NOTIF', offset=1, start_id=0) + def test_email_all_users_offset(self, user, user2): + with capture_notifications() as notifications: + email_all_users('TOU_NOTIF', offset=1, start_id=0) - email_all_users('TOU_NOTIF', offset=1, start_id=1) + email_all_users('TOU_NOTIF', offset=1, start_id=1) - email_all_users('TOU_NOTIF', offset=1, start_id=2) + email_all_users('TOU_NOTIF', offset=1, start_id=2) - assert mock_send_grid.call_count == 2 + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 65ebc719789..f653b20ea25 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -22,7 +22,7 @@ from website.archiver import listeners from website.archiver.tasks import * # noqa: F403 -from osf.models import Guid, RegistrationSchema, Registration +from osf.models import Guid, RegistrationSchema, Registration, NotificationType from osf.models.archive import ArchiveTarget, ArchiveJob from osf.models.base import generate_object_id from osf.utils.migrations import map_schema_to_schemablocks @@ -32,8 +32,7 @@ from osf_tests import factories from tests.base import OsfTestCase, fake from tests import utils as test_utils -from tests.utils import unique as _unique -from conftest import start_mock_send_grid +from tests.utils import unique as _unique, capture_notifications pytestmark = pytest.mark.django_db @@ -721,45 +720,49 @@ def test_archive_success_same_file_in_component(self): assert child_reg._id in question['extra'][0]['viewUrl'] -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverUtils(ArchiverTestCase): - def setUp(self): - super().setUp() - self.mock_send_grid = start_mock_send_grid(self) - def test_handle_archive_fail(self): - archiver_utils.handle_archive_fail( - ARCHIVER_NETWORK_ERROR, - self.src, - self.dst, - self.user, - {} - ) - assert self.mock_send_grid.call_count == 2 + with capture_notifications() as notifications: + archiver_utils.handle_archive_fail( + ARCHIVER_NETWORK_ERROR, + self.src, + self.dst, + self.user, + {} + ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED self.dst.reload() assert self.dst.is_deleted def test_handle_archive_fail_copy(self): - archiver_utils.handle_archive_fail( - ARCHIVER_NETWORK_ERROR, - self.src, - self.dst, - self.user, - {} - ) - assert self.mock_send_grid.call_count == 2 + with capture_notifications() as notifications: + archiver_utils.handle_archive_fail( + ARCHIVER_NETWORK_ERROR, + self.src, + self.dst, + self.user, + {} + ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED def test_handle_archive_fail_size(self): - archiver_utils.handle_archive_fail( - ARCHIVER_SIZE_EXCEEDED, - self.src, - self.dst, - self.user, - {} - ) - assert self.mock_send_grid.call_count == 2 + with capture_notifications() as notifications: + archiver_utils.handle_archive_fail( + ARCHIVER_SIZE_EXCEEDED, + self.src, + self.dst, + self.user, + {} + ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED def test_aggregate_file_tree_metadata(self): a_stat_result = archiver_utils.aggregate_file_tree_metadata('dropbox', FILE_TREE, self.user) @@ -846,14 +849,9 @@ def test_get_file_map_memoization(self): archiver_utils.get_file_map(node) assert mock_get_file_tree.call_count == call_count -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverListeners(ArchiverTestCase): - def setUp(self): - super().setUp() - self.mock_send_grid = start_mock_send_grid(self) - @mock.patch('website.archiver.tasks.archive') @mock.patch('website.archiver.utils.before_archive') def test_after_register(self, mock_before_archive, mock_archive): @@ -905,8 +903,9 @@ def test_archive_callback_pending(self, mock_delay): ) self.dst.archive_job.save() with mock.patch('website.archiver.utils.handle_archive_fail') as mock_fail: - listeners.archive_callback(self.dst) - assert not self.mock_send_grid.called + with capture_notifications() as notifications: + listeners.archive_callback(self.dst) + assert not notifications assert not mock_fail.called assert mock_delay.called @@ -914,8 +913,9 @@ def test_archive_callback_pending(self, mock_delay): def test_archive_callback_done_success(self, mock_archive_success): self.dst.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) self.dst.archive_job.save() - listeners.archive_callback(self.dst) - assert self.mock_send_grid.call_count == 0 + with capture_notifications() as notifications: + listeners.archive_callback(self.dst) + assert not notifications @mock.patch('website.archiver.tasks.archive_success.delay') def test_archive_callback_done_embargoed(self, mock_archive_success): @@ -929,8 +929,9 @@ def test_archive_callback_done_embargoed(self, mock_archive_success): self.dst.embargo_registration(self.user, end_date) self.dst.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) self.dst.save() - listeners.archive_callback(self.dst) - assert self.mock_send_grid.call_count == 0 + with capture_notifications() as notifications: + listeners.archive_callback(self.dst) + assert not notifications def test_archive_callback_done_errors(self): self.dst.archive_job.update_target('osfstorage', ARCHIVER_FAILURE) @@ -1021,16 +1022,19 @@ def test_archive_callback_on_tree_sends_only_one_email(self, mock_arhive_success node.archive_job.update_target('osfstorage', ARCHIVER_INITIATED) rchild.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild.save() - listeners.archive_callback(rchild) - assert not self.mock_send_grid.called + with capture_notifications() as notifications: + listeners.archive_callback(rchild) + assert not notifications reg.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) reg.save() - listeners.archive_callback(reg) - assert not self.mock_send_grid.called + with capture_notifications() as notifications: + listeners.archive_callback(reg) + assert not notifications rchild2.archive_job.update_target('osfstorage', ARCHIVER_SUCCESS) rchild2.save() - listeners.archive_callback(rchild2) - assert not self.mock_send_grid.called + with capture_notifications() as notifications: + listeners.archive_callback(rchild2) + assert not notifications class TestArchiverScripts(ArchiverTestCase): @@ -1078,14 +1082,9 @@ def test_find_failed_registrations(self): assert pk not in failed -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverBehavior(OsfTestCase): - def setUp(self): - super().setUp() - self.mock_send_grid = start_mock_send_grid(self) - @mock.patch('osf.models.AbstractNode.update_search') def test_archiving_registrations_not_added_to_search_before_archival(self, mock_update_search): proj = factories.ProjectFactory() diff --git a/osf_tests/test_collection.py b/osf_tests/test_collection.py index c28dea3eb99..0e39c011f65 100644 --- a/osf_tests/test_collection.py +++ b/osf_tests/test_collection.py @@ -5,8 +5,9 @@ from framework.auth import Auth -from osf.models import Collection +from osf.models import Collection, NotificationType from osf.exceptions import NodeStateError +from tests.utils import capture_notifications from website.views import find_bookmark_collection from .factories import ( UserFactory, @@ -71,7 +72,6 @@ def test_can_remove_root_folder_structure_without_cascading(self, user, auth): @pytest.mark.enable_bookmark_creation -@pytest.mark.usefixtures('mock_send_grid') class TestImplicitRemoval: @pytest.fixture @@ -126,22 +126,23 @@ def test_node_removed_from_collection_on_privacy_change(self, auth, collected_no assert associated_collections.filter(collection=bookmark_collection).exists() @mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits - def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provider_collected_node, bookmark_collection, mock_send_grid): + def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provider_collected_node, bookmark_collection): associated_collections = provider_collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - mock_send_grid.reset_mock() - provider_collected_node.set_privacy('private', auth=auth) - assert mock_send_grid.called - assert len(mock_send_grid.call_args_list) == 1 + with capture_notifications() as notifications: + provider_collected_node.set_privacy('private', auth=auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED @mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits - def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection, mock_send_grid): + def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection): associated_collections = collected_node.guids.first().collectionsubmission_set assert associated_collections.count() == 3 - collected_node.set_privacy('private', auth=auth) - assert not mock_send_grid.called + with capture_notifications() as notifications: + collected_node.set_privacy('private', auth=auth) + assert not notifications def test_node_removed_from_collection_on_delete(self, collected_node, bookmark_collection, auth): associated_collections = collected_node.guids.first().collectionsubmission_set diff --git a/osf_tests/test_collection_submission.py b/osf_tests/test_collection_submission.py index 76baa2de752..d2dd906b692 100644 --- a/osf_tests/test_collection_submission.py +++ b/osf_tests/test_collection_submission.py @@ -303,7 +303,6 @@ def test_cancel_succeeds(self, node, moderated_collection_submission): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestUnmoderatedCollectionSubmission: def test_moderated_submit(self, unmoderated_collection_submission): @@ -386,7 +385,6 @@ def test_cancel_succeeds(self, node, unmoderated_collection_submission): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestHybridModeratedCollectionSubmission: @pytest.mark.parametrize('user_role', UserRoles.excluding(UserRoles.MODERATOR)) diff --git a/osf_tests/test_institution.py b/osf_tests/test_institution.py index eca6737b6e5..d4442ad8590 100644 --- a/osf_tests/test_institution.py +++ b/osf_tests/test_institution.py @@ -4,7 +4,7 @@ import pytest from addons.osfstorage.models import Region -from osf.models import Institution, InstitutionStorageRegion +from osf.models import Institution, InstitutionStorageRegion, NotificationType from osf_tests.factories import ( AuthUserFactory, InstitutionFactory, @@ -12,6 +12,7 @@ RegionFactory, UserFactory, ) +from tests.utils import capture_notifications @pytest.mark.django_db @@ -109,7 +110,6 @@ def test_non_group_member_doesnt_have_perms(self, institution, user): @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestInstitutionManager: def test_deactivated_institution_not_in_default_queryset(self): @@ -146,7 +146,7 @@ def test_reactivate_institution(self): institution.reactivate() assert institution.deactivated is None - def test_send_deactivation_email_call_count(self, mock_send_grid): + def test_send_deactivation_email_call_count(self): institution = InstitutionFactory() user_1 = UserFactory() user_1.add_or_update_affiliated_institution(institution) @@ -154,16 +154,21 @@ def test_send_deactivation_email_call_count(self, mock_send_grid): user_2 = UserFactory() user_2.add_or_update_affiliated_institution(institution) user_2.save() - institution._send_deactivation_email() - assert mock_send_grid.call_count == 2 + with capture_notifications() as notifications: + institution._send_deactivation_email() + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED + assert notifications[1]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED - def test_send_deactivation_email_call_args(self, mock_send_grid): + def test_send_deactivation_email_call_args(self): institution = InstitutionFactory() user = UserFactory() user.add_or_update_affiliated_institution(institution) user.save() - institution._send_deactivation_email() - mock_send_grid.assert_called() + with capture_notifications() as notifications: + institution._send_deactivation_email() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED def test_deactivate_inactive_institution_noop(self): institution = InstitutionFactory() diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index ee13c7bc107..2a7400bd40d 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -21,17 +21,15 @@ from importlib import import_module from django.conf import settings as django_conf_settings from osf.models import UserSessionMap -from tests.utils import run_celery_tasks +from tests.utils import run_celery_tasks, capture_notifications from waffle.testutils import override_flag from osf.features import ENABLE_GV -from conftest import start_mock_send_grid SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @pytest.mark.enable_implicit_clean @pytest.mark.enable_bookmark_creation -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestUserMerging(OsfTestCase): def setUp(self): @@ -39,7 +37,6 @@ def setUp(self): self.user = UserFactory() with self.context: handlers.celery_before_request() - self.mock_send_grid = start_mock_send_grid(self) def _add_unconfirmed_user(self): self.unconfirmed = UnconfirmedUserFactory() @@ -294,7 +291,8 @@ def test_merge_doesnt_send_signal(self): #Explictly reconnect signal as it is disconnected by default for test contributor_added.connect(notify_added_contributor) other_user = UserFactory() - with override_flag(ENABLE_GV, active=True): - self.user.merge_user(other_user) + with capture_notifications() as notifications: + with override_flag(ENABLE_GV, active=True): + self.user.merge_user(other_user) + assert not notifications assert other_user.merged_by._id == self.user._id - assert self.mock_send_grid.called is False diff --git a/osf_tests/test_queued_mail.py b/osf_tests/test_queued_mail.py deleted file mode 100644 index 395b770a61d..00000000000 --- a/osf_tests/test_queued_mail.py +++ /dev/null @@ -1,155 +0,0 @@ -# Ported from tests.test_mails -import datetime as dt - - -import pytest -from django.utils import timezone -from waffle.testutils import override_switch - -from .factories import UserFactory, NodeFactory - -from osf.features import DISABLE_ENGAGEMENT_EMAILS -from osf.models.queued_mail import ( - queue_mail, WELCOME_OSF4M, - NO_LOGIN, NO_ADDON, NEW_PUBLIC_PROJECT -) -from website.mails import mails -from website.settings import DOMAIN - -@pytest.fixture() -def user(): - return UserFactory(is_registered=True) - -@pytest.mark.django_db -class TestQueuedMail: - - def queue_mail(self, mail, user, send_at=None, **kwargs): - mail = queue_mail( - to_addr=user.username if user else user.username, - send_at=send_at or timezone.now(), - user=user, - mail=mail, - fullname=user.fullname if user else user.username, - **kwargs - ) - return mail - - def test_no_login_presend_for_active_user(self, user): - mail = self.queue_mail(mail=NO_LOGIN, user=user) - user.date_last_login = timezone.now() + dt.timedelta(seconds=10) - user.save() - assert mail.send_mail() is False - - def test_no_login_presend_for_inactive_user(self, user): - mail = self.queue_mail(mail=NO_LOGIN, user=user) - user.date_last_login = timezone.now() - dt.timedelta(weeks=10) - user.save() - assert timezone.now() - dt.timedelta(days=1) > user.date_last_login - assert bool(mail.send_mail()) is True - - def test_no_addon_presend(self, user): - mail = self.queue_mail(mail=NO_ADDON, user=user) - assert mail.send_mail() is True - - def test_new_public_project_presend_for_no_project(self, user): - mail = self.queue_mail( - mail=NEW_PUBLIC_PROJECT, - user=user, - project_title='Oh noes', - nid='', - ) - assert bool(mail.send_mail()) is False - - def test_new_public_project_presend_success(self, user): - node = NodeFactory(is_public=True) - mail = self.queue_mail( - mail=NEW_PUBLIC_PROJECT, - user=user, - project_title='Oh yass', - nid=node._id - ) - assert bool(mail.send_mail()) is True - - def test_welcome_osf4m_presend(self, user): - user.date_last_login = timezone.now() - dt.timedelta(days=13) - user.save() - mail = self.queue_mail( - mail=WELCOME_OSF4M, - user=user, - conference='Buttjamz conference', - fid='', - domain=DOMAIN - ) - assert bool(mail.send_mail()) is True - assert mail.data['downloads'] == 0 - - def test_finding_other_emails_sent_to_user(self, user): - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert len(mail.find_sent_of_same_type_and_user()) == 0 - mail.send_mail() - assert len(mail.find_sent_of_same_type_and_user()) == 1 - - def test_user_is_active(self, user): - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert bool(mail.send_mail()) is True - - def test_user_is_not_active_no_password(self): - user = UserFactory.build() - user.set_unusable_password() - user.save() - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - def test_user_is_not_active_not_registered(self): - user = UserFactory(is_registered=False) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - def test_user_is_not_active_is_merged(self): - other_user = UserFactory() - user = UserFactory(merged_by=other_user) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - def test_user_is_not_active_is_disabled(self): - user = UserFactory(date_disabled=timezone.now()) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - def test_user_is_not_active_is_not_confirmed(self): - user = UserFactory(date_confirmed=None) - mail = self.queue_mail( - user=user, - mail=NO_ADDON, - ) - assert mail.send_mail() is False - - def test_disabled_queued_emails_not_sent_if_switch_active(self, user): - with override_switch(DISABLE_ENGAGEMENT_EMAILS, active=True): - assert self.queue_mail(mail=NO_ADDON, user=user) is False - assert self.queue_mail(mail=NO_LOGIN, user=user) is False - assert self.queue_mail(mail=WELCOME_OSF4M, user=user) is False - assert self.queue_mail(mail=NEW_PUBLIC_PROJECT, user=user) is False - - def test_disabled_triggered_emails_not_sent_if_switch_active(self): - with override_switch(DISABLE_ENGAGEMENT_EMAILS, active=True): - assert mails.send_mail(to_addr='', mail=mails.WELCOME) is False - assert mails.send_mail(to_addr='', mail=mails.WELCOME_OSF4I) is False diff --git a/osf_tests/test_sanctions.py b/osf_tests/test_sanctions.py index de4161ced4a..54530dc1324 100644 --- a/osf_tests/test_sanctions.py +++ b/osf_tests/test_sanctions.py @@ -135,7 +135,6 @@ def registration(self, request, contributor): registration.save() return registration - @mock.patch('website.mails.settings.USE_EMAIL', False) @pytest.mark.parametrize('reviews_workflow', [None, 'pre-moderation']) @pytest.mark.parametrize('branched_from_node', [True, False]) def test_render_admin_emails(self, registration, reviews_workflow, branched_from_node): @@ -149,7 +148,6 @@ def test_render_admin_emails(self, registration, reviews_workflow, branched_from registration.sanction.ask([(registration.creator, registration)]) assert True # mail rendered successfully - @mock.patch('website.mails.settings.USE_EMAIL', False) @pytest.mark.parametrize('reviews_workflow', [None, 'pre-moderation']) @pytest.mark.parametrize('branched_from_node', [True, False]) def test_render_non_admin_emails( diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 40965c7cf31..3b3af1458cf 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -4,11 +4,12 @@ from api.providers.workflows import Workflows from framework.exceptions import PermissionsError from osf.exceptions import PreviousSchemaResponseError, SchemaResponseStateError, SchemaResponseUpdateError -from osf.models import RegistrationSchema, RegistrationSchemaBlock, SchemaResponseBlock +from osf.models import RegistrationSchema, RegistrationSchemaBlock, SchemaResponseBlock, NotificationType from osf.models import schema_response # import module for mocking purposes from osf.utils.workflows import ApprovalStates, SchemaResponseTriggers from osf_tests.factories import AuthUserFactory, ProjectFactory, RegistrationFactory, RegistrationProviderFactory from osf_tests.utils import get_default_test_schema, _ensure_subscriptions +from tests.utils import capture_notifications from website.notifications import emails @@ -95,7 +96,6 @@ def revised_response(initial_response): @pytest.mark.enable_bookmark_creation @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestCreateSchemaResponse(): def test_create_initial_response_sets_attributes(self, registration, schema): @@ -142,11 +142,12 @@ def test_create_initial_response_assigns_default_values(self, registration): for block in response.response_blocks.all(): assert block.response == DEFAULT_SCHEMA_RESPONSE_VALUES[block.schema_key] - def test_create_initial_response_does_not_notify(self, registration, admin_user, mock_send_grid): - schema_response.SchemaResponse.create_initial_response( - parent=registration, initiator=admin_user - ) - assert not mock_send_grid.called + def test_create_initial_response_does_not_notify(self, registration, admin_user): + with capture_notifications() as notifications: + schema_response.SchemaResponse.create_initial_response( + parent=registration, initiator=admin_user + ) + assert not notifications def test_create_initial_response_fails_if_no_schema_and_no_parent_schema(self, registration): registration.registered_schema.clear() @@ -252,13 +253,14 @@ def test_create_from_previous_response(self, registration, initial_response): assert set(revised_response.response_blocks.all()) == set(initial_response.response_blocks.all()) def test_create_from_previous_response_notification( - self, initial_response, admin_user, notification_recipients, mock_send_grid): - - schema_response.SchemaResponse.create_from_previous_response( - previous_response=initial_response, initiator=admin_user - ) + self, initial_response, admin_user, notification_recipients): - assert mock_send_grid.called + with capture_notifications() as notifications: + schema_response.SchemaResponse.create_from_previous_response( + previous_response=initial_response, initiator=admin_user + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED @pytest.mark.parametrize( 'invalid_response_state', @@ -542,7 +544,6 @@ def test_delete_fails_if_state_is_invalid(self, invalid_response_state, initial_ @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestUnmoderatedSchemaResponseApprovalFlows(): def test_submit_response_adds_pending_approvers( @@ -574,23 +575,25 @@ def test_submit_response_writes_schema_response_action(self, initial_response, a assert new_action.trigger == SchemaResponseTriggers.SUBMIT.db_name def test_submit_response_notification( - self, revised_response, admin_user, notification_recipients, mock_send_grid): + self, revised_response, admin_user, notification_recipients): revised_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) revised_response.update_responses({'q1': 'must change one response or can\'t submit'}) revised_response.revision_justification = 'has for valid revision_justification for submission' revised_response.save() - revised_response.submit(user=admin_user, required_approvers=[admin_user]) - - assert mock_send_grid.called + with capture_notifications() as notifications: + revised_response.submit(user=admin_user, required_approvers=[admin_user]) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - def test_no_submit_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): + def test_no_submit_notification_on_initial_response(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) initial_response.update_responses({'q1': 'must change one response or can\'t submit'}) initial_response.revision_justification = 'has for valid revision_justification for submission' initial_response.save() - initial_response.submit(user=admin_user, required_approvers=[admin_user]) - assert not mock_send_grid.called + with capture_notifications() as notifications: + initial_response.submit(user=admin_user, required_approvers=[admin_user]) + assert not notifications def test_submit_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) @@ -672,23 +675,26 @@ def test_approve_response_writes_schema_response_action( ).count() == 2 def test_approve_response_notification( - self, revised_response, admin_user, alternate_user, notification_recipients, mock_send_grid): + self, revised_response, admin_user, alternate_user, notification_recipients): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user, alternate_user) - mock_send_grid.reset_mock() - revised_response.approve(user=admin_user) - assert not mock_send_grid.called # Should only send email on final approval - revised_response.approve(user=alternate_user) - assert mock_send_grid.called + with capture_notifications() as notifications: + revised_response.approve(user=admin_user) + assert not notifications # Should only send email on final approval + with capture_notifications() as notifications: + revised_response.approve(user=alternate_user) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - def test_no_approve_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): + def test_no_approve_notification_on_initial_response(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) initial_response.save() initial_response.pending_approvers.add(admin_user) - initial_response.approve(user=admin_user) - assert not mock_send_grid.called + with capture_notifications() as notifications: + initial_response.approve(user=admin_user) + assert not notifications def test_approve_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -739,22 +745,24 @@ def test_reject_response_writes_schema_response_action(self, initial_response, a assert new_action.trigger == SchemaResponseTriggers.ADMIN_REJECT.db_name def test_reject_response_notification( - self, revised_response, admin_user, notification_recipients, mock_send_grid): + self, revised_response, admin_user, notification_recipients): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user) - revised_response.reject(user=admin_user) - - assert mock_send_grid.called + with capture_notifications() as notifications: + revised_response.reject(user=admin_user) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - def test_no_reject_notification_on_initial_response(self, initial_response, admin_user, mock_send_grid): + def test_no_reject_notification_on_initial_response(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) initial_response.save() initial_response.pending_approvers.add(admin_user) - initial_response.reject(user=admin_user) - assert not mock_send_grid.called + with capture_notifications() as notifications: + initial_response.reject(user=admin_user) + assert not notifications def test_reject_response_requires_user(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -801,7 +809,6 @@ def test_internal_accept_clears_pending_approvers(self, initial_response, admin_ @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestModeratedSchemaResponseApprovalFlows(): @pytest.fixture @@ -848,13 +855,15 @@ def test_schema_response_action_to_state_following_moderated_approve_is_pending_ assert new_action.to_state == ApprovalStates.PENDING_MODERATION.db_name assert new_action.trigger == SchemaResponseTriggers.APPROVE.db_name - def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user, mock_send_grid): + def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user) - revised_response.approve(user=admin_user) - assert mock_send_grid.called + with capture_notifications() as notifications: + revised_response.approve(user=admin_user) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED def test_moderators_notified_on_admin_approval(self, revised_response, admin_user, moderator): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -900,21 +909,23 @@ def test_moderator_accept_writes_schema_response_action(self, initial_response, assert new_action.trigger == SchemaResponseTriggers.ACCEPT.db_name def test_moderator_accept_notification( - self, revised_response, moderator, notification_recipients, mock_send_grid): + self, revised_response, moderator, notification_recipients): revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - revised_response.accept(user=moderator) - - assert mock_send_grid.called + with capture_notifications() as notifications: + revised_response.accept(user=moderator) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED def test_no_moderator_accept_notification_on_initial_response( - self, initial_response, moderator, mock_send_grid): + self, initial_response, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - initial_response.accept(user=moderator) - assert not mock_send_grid.called + with capture_notifications() as notifications: + initial_response.accept(user=moderator) + assert not notifications def test_moderator_reject(self, initial_response, admin_user, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) @@ -938,21 +949,23 @@ def test_moderator_reject_writes_schema_response_action( assert new_action.trigger == SchemaResponseTriggers.MODERATOR_REJECT.db_name def test_moderator_reject_notification( - self, revised_response, moderator, notification_recipients, mock_send_grid): + self, revised_response, moderator, notification_recipients): revised_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) revised_response.save() - revised_response.reject(user=moderator) - - assert mock_send_grid.called + with capture_notifications() as notifications: + revised_response.reject(user=moderator) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED def test_no_moderator_reject_notification_on_initial_response( - self, initial_response, moderator, mock_send_grid): + self, initial_response, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) initial_response.save() - initial_response.reject(user=moderator) - assert not mock_send_grid.called + with capture_notifications() as notifications: + initial_response.reject(user=moderator) + assert not notifications def test_moderator_cannot_submit(self, initial_response, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index 70d3a7ceb17..8a8a6f29d72 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -18,6 +18,7 @@ from framework.auth.signals import user_account_merged from framework.analytics import get_total_activity_count from framework.exceptions import PermissionsError +from tests.utils import capture_notifications from website import settings from website import filters from website.views import find_bookmark_collection @@ -32,7 +33,7 @@ DraftRegistrationContributor, DraftRegistration, DraftNode, - UserSessionMap, + UserSessionMap, NotificationType, ) from osf.models.institution_affiliation import get_user_by_institution_identity from addons.github.tests.factories import GitHubAccountFactory @@ -885,8 +886,6 @@ def test_get_user_by_cookie_no_session(self): assert OSFUser.from_cookie(cookie) is None -@pytest.mark.usefixtures('mock_send_grid') -@pytest.mark.usefixtures('mock_notification_send') class TestChangePassword: def test_change_password(self, user): @@ -898,19 +897,23 @@ def test_change_password(self, user): user.change_password(old_password, new_password, confirm_password) assert bool(user.check_password(new_password)) is True - def test_set_password_notify_default(self, mock_notification_send, user): + def test_set_password_notify_default(self, user): old_password = 'password' - user.set_password(old_password) - user.save() - assert mock_notification_send.called is True + with capture_notifications() as notifications: + user.set_password(old_password) + user.save() - def test_set_password_no_notify(self, mock_notification_send, user): + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PASSWORD_CHANGED + + def test_set_password_no_notify(self, user): old_password = 'password' - user.set_password(old_password, notify=False) - user.save() - assert mock_notification_send.called is False + with capture_notifications() as notifications: + user.set_password(old_password, notify=False) + user.save() + assert not notifications - def test_check_password_upgrade_hasher_no_notify(self, mock_notification_send, user, settings): + def test_check_password_upgrade_hasher_no_notify(self, user, settings): # NOTE: settings fixture comes from pytest-django. # changes get reverted after tests run settings.PASSWORD_HASHERS = ( @@ -919,9 +922,10 @@ def test_check_password_upgrade_hasher_no_notify(self, mock_notification_send, u ) raw_password = 'password' user.password = 'sha1$lNb72DKWDv6P$e6ae16dada9303ae0084e14fc96659da4332bb05' - user.check_password(raw_password) + with capture_notifications() as notifications: + user.check_password(raw_password) + assert not notifications assert user.password.startswith('md5$') - assert mock_notification_send.called is False def test_change_password_invalid(self, old_password=None, new_password=None, confirm_password=None, error_message='Old password is invalid'): diff --git a/scripts/tests/test_deactivate_requested_accounts.py b/scripts/tests/test_deactivate_requested_accounts.py index 07e43f74bf2..1b0da9a89b5 100644 --- a/scripts/tests/test_deactivate_requested_accounts.py +++ b/scripts/tests/test_deactivate_requested_accounts.py @@ -1,12 +1,13 @@ import pytest +from osf.models import NotificationType from osf_tests.factories import ProjectFactory, AuthUserFactory from osf.management.commands.deactivate_requested_accounts import deactivate_requested_accounts +from tests.utils import capture_notifications @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') class TestDeactivateRequestedAccount: @pytest.fixture() @@ -24,21 +25,25 @@ def user_requested_deactivation_with_node(self): user.save() return user - def test_deactivate_user_with_no_content(self, mock_send_grid, user_requested_deactivation): + def test_deactivate_user_with_no_content(self, user_requested_deactivation): - deactivate_requested_accounts(dry_run=False) + with capture_notifications() as notifications: + deactivate_requested_accounts(dry_run=False) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.DESK_REQUEST_DEACTIVATION user_requested_deactivation.reload() assert user_requested_deactivation.requested_deactivation assert user_requested_deactivation.contacted_deactivation assert user_requested_deactivation.is_disabled - mock_send_grid.assert_called() - def test_deactivate_user_with_content(self, mock_send_grid, user_requested_deactivation_with_node): + def test_deactivate_user_with_content(self, user_requested_deactivation_with_node): - deactivate_requested_accounts(dry_run=False) + with capture_notifications() as notifications: + deactivate_requested_accounts(dry_run=False) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.DESK_REQUEST_DEACTIVATION user_requested_deactivation_with_node.reload() assert user_requested_deactivation_with_node.requested_deactivation assert not user_requested_deactivation_with_node.is_disabled - mock_send_grid.assert_called() diff --git a/scripts/tests/test_send_queued_mails.py b/scripts/tests/test_send_queued_mails.py deleted file mode 100644 index 2815b85f5d9..00000000000 --- a/scripts/tests/test_send_queued_mails.py +++ /dev/null @@ -1,84 +0,0 @@ -from unittest import mock -from datetime import timedelta - -from django.utils import timezone - -from tests.base import OsfTestCase -from osf_tests.factories import UserFactory -from osf.models.queued_mail import QueuedMail, queue_mail, NO_ADDON, NO_LOGIN_TYPE - -from scripts.send_queued_mails import main, pop_and_verify_mails_for_each_user, find_queued_mails_ready_to_be_sent -from website import settings - -@mock.patch('website.mails.settings.USE_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) -class TestSendQueuedMails(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.user.date_last_login = timezone.now() - self.user.osf_mailing_lists[settings.OSF_HELP_LIST] = True - self.user.save() - - from conftest import start_mock_send_grid - self.mock_send_grid = start_mock_send_grid(self) - - - def queue_mail(self, mail_type=NO_ADDON, user=None, send_at=None): - return queue_mail( - to_addr=user.username if user else self.user.username, - mail=mail_type, - send_at=send_at or timezone.now(), - user=user if user else self.user, - fullname=user.fullname if user else self.user.fullname, - ) - - def test_queue_addon_mail(self): - self.queue_mail() - main(dry_run=False) - assert self.mock_send_grid.called - - def test_no_two_emails_to_same_person(self): - user = UserFactory() - user.osf_mailing_lists[settings.OSF_HELP_LIST] = True - user.save() - self.queue_mail(user=user) - self.queue_mail(user=user) - main(dry_run=False) - assert self.mock_send_grid.call_count == 1 - - def test_pop_and_verify_mails_for_each_user(self): - user_with_email_sent = UserFactory() - user_with_multiple_emails = UserFactory() - user_with_no_emails_sent = UserFactory() - time = timezone.now() - timedelta(days=1) - mail_sent = QueuedMail( - user=user_with_email_sent, - send_at=time, - to_addr=user_with_email_sent.username, - email_type=NO_LOGIN_TYPE - ) - mail_sent.save() - mail1 = self.queue_mail(user=user_with_email_sent) - mail2 = self.queue_mail(user=user_with_multiple_emails) - mail3 = self.queue_mail(user=user_with_multiple_emails) - mail4 = self.queue_mail(user=user_with_no_emails_sent) - user_queue = { - user_with_email_sent._id: [mail1], - user_with_multiple_emails._id: [mail2, mail3], - user_with_no_emails_sent._id: [mail4] - } - mails_ = list(pop_and_verify_mails_for_each_user(user_queue)) - assert len(mails_) == 2 - user_mails = [mail.user for mail in mails_] - assert not (user_with_email_sent in user_mails) - assert user_with_multiple_emails in user_mails - assert user_with_no_emails_sent in user_mails - - def test_find_queued_mails_ready_to_be_sent(self): - mail1 = self.queue_mail() - mail2 = self.queue_mail(send_at=timezone.now()+timedelta(days=1)) - mail3 = self.queue_mail(send_at=timezone.now()) - mails = find_queued_mails_ready_to_be_sent() - assert mails.count() == 2 diff --git a/tests/base.py b/tests/base.py index e1024f8e266..b308b9dca17 100644 --- a/tests/base.py +++ b/tests/base.py @@ -175,9 +175,6 @@ class ApiTestCase(DbTestCase, ApiAppTestCase, SearchTestCase): API application. Note: superclasses must call `super` in order for all setup and teardown methods to be called correctly. """ - def setUp(self): - super().setUp() - settings.USE_EMAIL = False class ApiAddonTestCase(ApiTestCase): """Base `TestCase` for tests that require interaction with addons. diff --git a/tests/framework_tests/test_email.py b/tests/framework_tests/test_email.py deleted file mode 100644 index c19596b7ed8..00000000000 --- a/tests/framework_tests/test_email.py +++ /dev/null @@ -1,108 +0,0 @@ -import unittest -import smtplib - -from unittest import mock -from unittest.mock import MagicMock - -import sendgrid -from sendgrid import SendGridAPIClient -from sendgrid.helpers.mail import Mail, Email, To, Category - -from framework.email.tasks import send_email, _send_with_sendgrid -from website import settings -from tests.base import fake -from osf_tests.factories import fake_email - -# Check if local mail server is running -SERVER_RUNNING = True -try: - s = smtplib.SMTP(settings.MAIL_SERVER) - s.quit() -except Exception as err: - SERVER_RUNNING = False - - -class TestEmail(unittest.TestCase): - - @unittest.skipIf(not SERVER_RUNNING, - "Mailserver isn't running. Run \"invoke mailserver\".") - @unittest.skipIf(not settings.USE_EMAIL, - 'settings.USE_EMAIL is False') - def test_sending_email(self): - assert send_email('foo@bar.com', 'baz@quux.com', subject='no subject', - message='

    Greetings!

    ', ttls=False, login=False) - - def setUp(self): - settings.SENDGRID_WHITELIST_MODE = False - - def tearDown(self): - settings.SENDGRID_WHITELIST_MODE = True - - @mock.patch(f'{_send_with_sendgrid.__module__}.Mail', autospec=True) - def test_send_with_sendgrid_success(self, mock_mail: MagicMock): - mock_client = mock.MagicMock(autospec=SendGridAPIClient) - mock_client.send.return_value = mock.Mock(status_code=200, body='success') - from_addr, to_addr = fake_email(), fake_email() - category1, category2 = fake.word(), fake.word() - subject = fake.bs() - message = fake.text() - ret = _send_with_sendgrid( - from_addr=from_addr, - to_addr=to_addr, - subject=subject, - message=message, - client=mock_client, - categories=(category1, category2) - ) - assert ret - - # Check Mail object arguments - mock_mail.assert_called_once() - kwargs = mock_mail.call_args.kwargs - assert kwargs['from_email'].email == from_addr - assert kwargs['subject'] == subject - assert kwargs['html_content'] == message - - mock_mail.return_value.add_personalization.assert_called_once() - - # Capture the categories added via add_category - mock_mail.return_value.add_category.assert_called_once() - added_categories = mock_mail.return_value.add_category.call_args.args[0] - assert len(added_categories) == 2 - assert isinstance(added_categories[0], Category) - assert isinstance(added_categories[1], Category) - assert added_categories[0].get() == category1 - assert added_categories[1].get() == category2 - - mock_client.send.assert_called_once_with(mock_mail.return_value) - - @mock.patch(f'{_send_with_sendgrid.__module__}.sentry.log_message', autospec=True) - @mock.patch(f'{_send_with_sendgrid.__module__}.Mail', autospec=True) - def test_send_with_sendgrid_failure_returns_false(self, mock_mail, sentry_mock): - mock_client = mock.MagicMock() - mock_client.send.return_value = mock.Mock(status_code=400, body='failed') - from_addr, to_addr = fake_email(), fake_email() - subject = fake.bs() - message = fake.text() - ret = _send_with_sendgrid( - from_addr=from_addr, - to_addr=to_addr, - subject=subject, - message=message, - client=mock_client - ) - assert not ret - sentry_mock.assert_called_once() - - # Check Mail object arguments - mock_mail.assert_called_once() - kwargs = mock_mail.call_args.kwargs - assert kwargs['from_email'].email == from_addr - assert kwargs['subject'] == subject - assert kwargs['html_content'] == message - - mock_client.send.assert_called_once_with(mock_mail.return_value) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_auth.py b/tests/test_auth.py index 52156529d92..4e6ebf2265c 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -24,8 +24,9 @@ from framework.auth import Auth from framework.auth.decorators import must_be_logged_in from framework.sessions import get_session -from osf.models import OSFUser +from osf.models import OSFUser, NotificationType from osf.utils import permissions +from tests.utils import capture_notifications from website import mails from website import settings from website.project.decorators import ( @@ -36,21 +37,17 @@ must_have_addon, must_be_addon_authorizer, ) from website.util import api_url_for -from conftest import start_mock_send_grid, start_mock_notification_send from tests.test_cas_authentication import generate_external_user_with_resp logger = logging.getLogger(__name__) -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthUtils(OsfTestCase): def setUp(self): super().setUp() - self.mock_send_grid = start_mock_send_grid(self) - self.start_mock_notification_send = start_mock_notification_send(self) def test_citation_with_only_fullname(self): user = UserFactory() @@ -91,24 +88,25 @@ def test_confirm_email(self): user.reload() token = user.get_confirmation_token(user.username) - res = self.app.get(f'/confirm/{user._id}/{token}') - res = self.app.resolve_redirect(res) + with capture_notifications() as notifications: + res = self.app.get(f'/confirm/{user._id}/{token}') + res = self.app.resolve_redirect(res) + assert not notifications assert res.status_code == 302 assert 'login?service=' in res.location user.reload() - self.mock_send_grid.assert_not_called() + with capture_notifications() as notifications: + self.app.set_cookie(settings.COOKIE_NAME, user.get_or_create_cookie().decode()) + res = self.app.get(f'/confirm/{user._id}/{token}') - self.app.set_cookie(settings.COOKIE_NAME, user.get_or_create_cookie().decode()) - res = self.app.get(f'/confirm/{user._id}/{token}') - - res = self.app.resolve_redirect(res) + res = self.app.resolve_redirect(res) assert res.status_code == 302 assert '/' == urlparse(res.location).path - assert len(self.mock_send_grid.call_args_list) == 0 + assert not notifications assert len(get_session()['status']) == 1 def test_get_user_by_id(self): @@ -172,9 +170,11 @@ def test_successful_external_first_login_without_attributes(self, mock_service_v def test_password_change_sends_email(self): user = UserFactory() - user.set_password('killerqueen') - user.save() - assert len(self.start_mock_notification_send.call_args_list) == 1 + with capture_notifications() as notifications: + user.set_password('killerqueen') + user.save() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD @mock.patch('framework.auth.utils.requests.post') def test_validate_recaptcha_success(self, req_post): @@ -216,11 +216,15 @@ def test_sign_up_twice_sends_two_confirmation_emails_only(self): 'password': 'brutusisajerk' } - self.app.post(url, json=sign_up_data) - assert len(self.mock_send_grid.call_args_list) == 1 + with capture_notifications() as notifications: + self.app.post(url, json=sign_up_data) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD - self.app.post(url, json=sign_up_data) - assert len(self.mock_send_grid.call_args_list) == 2 + with capture_notifications() as notifications: + self.app.post(url, json=sign_up_data) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD class TestAuthObject(OsfTestCase): diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index 4d385b68dd6..7f2b4c4136a 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -40,11 +40,9 @@ ) from website import mails, settings from website.util import api_url_for, web_url_for -from conftest import start_mock_send_grid pytestmark = pytest.mark.django_db -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthViews(OsfTestCase): @@ -53,8 +51,6 @@ def setUp(self): self.user = AuthUserFactory() self.auth = self.user.auth - self.mock_send_grid = start_mock_send_grid(self) - def test_register_ok(self): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index 27c2a3e383c..d9c735b97dd 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -49,7 +49,6 @@ from website.project.views.node import _should_show_wiki_widget from website.util import web_url_for from website.util import rubeus -from conftest import start_mock_send_grid from tests.utils import capture_notifications pytestmark = pytest.mark.django_db @@ -362,7 +361,6 @@ def test_explore(self): assert res.status_code == 200 -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestExternalAuthViews(OsfTestCase): @@ -385,8 +383,6 @@ def setUp(self): self.user.save() self.auth = (self.user.username, password) - self.mock_send_grid = start_mock_send_grid(self) - def test_external_login_email_get_with_invalid_session(self): url = web_url_for('external_login_email_get') resp = self.app.get(url) @@ -410,13 +406,13 @@ def test_external_login_confirm_email_get_create(self): # TODO: check in qa url encoding assert not self.user.is_registered url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') - res = self.app.get(url) + with capture_notifications() as notifications: + res = self.app.get(url) + assert not notifications assert res.status_code == 302, 'redirects to cas login' assert '/login?service=' in res.location assert quote_plus('new=true') in res.location - assert self.mock_send_grid.call_count == 0 - self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert self.user.is_registered @@ -436,7 +432,6 @@ def test_external_login_confirm_email_get_link(self): assert '/login?service=' in res.location assert 'new=true' not in parse.unquote(res.location) - self.user.reload() assert self.user.external_identity['orcid'][self.provider_id] == 'VERIFIED' assert self.user.is_registered @@ -446,13 +441,13 @@ def test_external_login_confirm_email_get_duped_id(self): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'CREATE'}}) assert dupe_user.external_identity == self.user.external_identity url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') - res = self.app.get(url) + with capture_notifications() as notifications: + res = self.app.get(url) + assert not notifications assert res.status_code == 302, 'redirects to cas login' assert 'You should be redirected automatically' in str(res.html) assert '/login?service=' in res.location - assert self.mock_send_grid.call_count == 0 - self.user.reload() dupe_user.reload() @@ -462,11 +457,11 @@ def test_external_login_confirm_email_get_duped_id(self): def test_external_login_confirm_email_get_duping_id(self): dupe_user = UserFactory(external_identity={'orcid': {self.provider_id: 'VERIFIED'}}) url = self.user.get_confirmation_url(self.user.username, external_id_provider='orcid', destination='dashboard') - res = self.app.get(url) + with capture_notifications() as notifications: + res = self.app.get(url) + assert not notifications assert res.status_code == 403, 'only allows one user to link an id' - assert self.mock_send_grid.call_count == 0 - self.user.reload() dupe_user.reload() diff --git a/tests/test_preprints.py b/tests/test_preprints.py index df1be915bab..9f16edc1e58 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -53,8 +53,6 @@ update_or_enqueue_on_preprint_updated, should_update_preprint_identifiers ) -from conftest import start_mock_send_grid - SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @@ -971,7 +969,7 @@ def test_confirm_ham_on_public_preprint_stays_public(self, preprint, user): @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) @pytest.mark.skip('Technically still true, but skipping because mocking is outdated') - def test_check_spam_on_private_preprint_bans_new_spam_user(self, mock_send_mail, preprint, user): + def test_check_spam_on_private_preprint_bans_new_spam_user(self, preprint, user): preprint.is_public = False preprint.save() with mock.patch('osf.models.Preprint._get_spam_content', mock.Mock(return_value='some content!')): @@ -1001,7 +999,7 @@ def test_check_spam_on_private_preprint_bans_new_spam_user(self, mock_send_mail, @mock.patch('website.mailchimp_utils.unsubscribe_mailchimp') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) - def test_check_spam_on_private_preprint_does_not_ban_existing_user(self, mock_send_mail, preprint, user): + def test_check_spam_on_private_preprint_does_not_ban_existing_user(self, preprint, user): preprint.is_public = False preprint.save() with mock.patch('osf.models.Preprint._get_spam_content', mock.Mock(return_value='some content!')): @@ -1985,7 +1983,6 @@ def test_update_or_enqueue_on_preprint_doi_created(self): assert should_update_preprint_identifiers(self.private_preprint, {}) -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class TestPreprintConfirmationEmails(OsfTestCase): def setUp(self): @@ -1996,7 +1993,6 @@ def setUp(self): self.preprint = PreprintFactory(creator=self.user, project=self.project, provider=PreprintProviderFactory(_id='osf'), is_published=False) self.preprint.add_contributor(self.write_contrib, permissions=WRITE) self.preprint_branded = PreprintFactory(creator=self.user, is_published=False) - self.mock_send_grid = start_mock_send_grid(self) def test_creator_gets_email(self): with capture_notifications() as notifications: diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py index 992a968f224..7b06887c86b 100644 --- a/tests/test_registrations/test_embargoes.py +++ b/tests/test_registrations/test_embargoes.py @@ -29,7 +29,7 @@ from osf.models.sanctions import SanctionCallbackMixin, Embargo from osf.utils import permissions from osf.models import Registration, Contributor, OSFUser, SpamStatus -from conftest import start_mock_notification_send +from tests.utils import capture_notifications DUMMY_TOKEN = tokens.encode({ 'dummy': 'token' @@ -1060,7 +1060,6 @@ def test_GET_from_authorized_user_with_registration_rej_token_deleted_node(self) @pytest.mark.enable_bookmark_creation -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationEmbargoViewsTestCase(OsfTestCase): def setUp(self): @@ -1101,9 +1100,6 @@ def setUp(self): } }) - self.start_mock_notification_send = start_mock_notification_send(self) - - @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_embargoed_registration_set_privacy_requests_embargo_termination(self, mock_ask): # Initiate and approve embargo @@ -1154,13 +1150,14 @@ def test_embargoed_registration_set_privacy_sends_mail(self): self.registration.embargo.approve_embargo(OSFUser.load(user_id), approval_token) self.registration.refresh_from_db() - self.registration.set_privacy('public', Auth(self.registration.creator)) + with capture_notifications() as notifications: + self.registration.set_privacy('public', Auth(self.registration.creator)) admin_contributors = [] for contributor in self.registration.contributors: if Contributor.objects.get(user_id=contributor.id, node_id=self.registration.id).permission == permissions.ADMIN: admin_contributors.append(contributor) for admin in admin_contributors: - assert any([each[1]['to_addr'] == admin.username for each in self.start_mock_notification_send.call_args_list]) + assert any([each['kwargs']['user'] == admin for each in notifications]) @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_make_child_embargoed_registration_public_asks_all_admins_in_tree(self, mock_ask): diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index d3f8cb72abf..5874fad6fa6 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -22,10 +22,9 @@ InvalidSanctionApprovalToken, InvalidSanctionRejectionToken, NodeStateError, ) -from osf.models import Contributor, Retraction +from osf.models import Contributor, Retraction, NotificationType from osf.utils import permissions -from conftest import start_mock_notification_send - +from tests.utils import capture_notifications @pytest.mark.enable_bookmark_creation @@ -753,7 +752,6 @@ def test_POST_retraction_to_subproject_component_returns_HTTPError_BAD_REQUEST(s @pytest.mark.enable_bookmark_creation @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') -@mock.patch('website.mails.settings.USE_EMAIL', True) @mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationRetractionViewsTestCase(OsfTestCase): def setUp(self): @@ -767,8 +765,6 @@ def setUp(self): self.retraction_get_url = self.registration.web_url_for('node_registration_retraction_get') self.justification = fake.sentence() - self.start_mock_notification_send = start_mock_notification_send(self) - def test_GET_retraction_page_when_pending_retraction_returns_HTTPError_BAD_REQUEST(self): self.registration.retract_registration(self.user) self.registration.save() @@ -802,12 +798,14 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): existing_user=unreg ) self.registration.save() - self.app.post( - self.retraction_post_url, - json={'justification': ''}, - auth=self.user.auth, - ) - assert self.start_mock_notification_send.call_count == 1 + with capture_notifications() as notifications: + self.app.post( + self.retraction_post_url, + json={'justification': ''}, + auth=self.user.auth, + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -892,12 +890,14 @@ def test_valid_POST_retraction_when_pending_retraction_raises_400(self): assert res.status_code == 400 def test_valid_POST_calls_send_mail_with_username(self): - self.app.post( - self.retraction_post_url, - json={'justification': ''}, - auth=self.user.auth, - ) - assert self.start_mock_notification_send.called + with capture_notifications() as notifications: + self.app.post( + self.retraction_post_url, + json={'justification': ''}, + auth=self.user.auth, + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED def test_non_contributor_GET_approval_returns_HTTPError_FORBIDDEN(self): non_contributor = AuthUserFactory() diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index 0713d0b4c54..af509272425 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -10,22 +10,24 @@ from tests.base import DbTestCase from osf_tests.factories import UserFactory, CommentFactory, ProjectFactory, PreprintFactory, RegistrationFactory, AuthUserFactory -from osf.models import NotableDomain, SpamStatus +from osf.models import NotableDomain, SpamStatus, NotificationType +from tests.utils import capture_notifications from website import settings, mails @pytest.mark.django_db -@pytest.mark.usefixtures('mock_send_grid') -def test_throttled_autoban(mock_send_grid): +def test_throttled_autoban(): settings.SPAM_THROTTLE_AUTOBAN = True user = AuthUserFactory() projects = [] - for _ in range(7): - proj = ProjectFactory(creator=user) - proj.flag_spam() - proj.save() - projects.append(proj) - mock_send_grid.assert_called() + with capture_notifications() as notifications: + for _ in range(7): + proj = ProjectFactory(creator=user) + proj.flag_spam() + proj.save() + projects.append(proj) + assert len(notifications) == 7 + assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL user.reload() assert user.is_disabled for project in projects: diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 20095abfba1..49ff6076d34 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -27,7 +27,6 @@ from website import mailchimp_utils from website.settings import MAILCHIMP_GENERAL_LIST from website.util import api_url_for, web_url_for -from conftest import start_mock_send_grid @pytest.mark.enable_enqueue_task @@ -515,8 +514,6 @@ def setUp(self): self.user.auth = (self.user.username, 'password') self.user.save() - self.mock_send_grid = start_mock_send_grid(self) - def test_password_change_valid(self, old_password='password', new_password='Pa$$w0rd', diff --git a/website/mails/mails.py b/website/mails/mails.py index b98b7c37b87..84e82d4632a 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -176,17 +176,16 @@ def send_mail( ) logger.debug('Preparing to send...') - if settings.USE_EMAIL: - if settings.USE_CELERY and celery: - logger.debug('Sending via celery...') - return mailer.apply_async(kwargs=kwargs, link=callback) - else: - logger.debug('Sending without celery') - ret = mailer(**kwargs) - if callback: - callback() - - return ret + if settings.USE_CELERY and celery: + logger.debug('Sending via celery...') + return mailer.apply_async(kwargs=kwargs, link=callback) + else: + logger.debug('Sending without celery') + ret = mailer(**kwargs) + if callback: + callback() + + return ret def get_english_article(word): diff --git a/website/notifications/tasks.py b/website/notifications/tasks.py deleted file mode 100644 index 6b7353ccdc0..00000000000 --- a/website/notifications/tasks.py +++ /dev/null @@ -1,227 +0,0 @@ -""" -Tasks for making even transactional emails consolidated. -""" -import itertools - -from django.db import connection - -from framework.celery_tasks import app as celery_app -from framework.sentry import log_message -from osf.models import ( - OSFUser, - AbstractNode, - AbstractProvider, - RegistrationProvider, - CollectionProvider, - NotificationDigest, -) -from osf.registrations.utils import get_registration_provider_submissions_url -from osf.utils.permissions import ADMIN -from website import mails, settings -from website.notifications.utils import NotificationsDict - - -@celery_app.task(name='website.notifications.tasks.send_users_email', max_retries=0) -def send_users_email(send_type): - """Send pending emails. - - :param send_type - :return: - """ - _send_global_and_node_emails(send_type) - _send_reviews_moderator_emails(send_type) - - -def _send_global_and_node_emails(send_type): - """ - Called by `send_users_email`. Send all global and node-related notification emails. - """ - grouped_emails = get_users_emails(send_type) - for group in grouped_emails: - user = OSFUser.load(group['user_id']) - if not user: - log_message(f"User with id={group['user_id']} not found") - continue - info = group['info'] - notification_ids = [message['_id'] for message in info] - sorted_messages = group_by_node(info) - if sorted_messages: - if not user.is_disabled: - # If there's only one node in digest we can show it's preferences link in the template. - notification_nodes = list(sorted_messages['children'].keys()) - node = AbstractNode.load(notification_nodes[0]) if len( - notification_nodes) == 1 else None - mails.send_mail( - to_addr=user.username, - can_change_node_preferences=bool(node), - node=node, - mail=mails.DIGEST, - name=user.fullname, - message=sorted_messages, - ) - remove_notifications(email_notification_ids=notification_ids) - - -def _send_reviews_moderator_emails(send_type): - """ - Called by `send_users_email`. Send all reviews triggered emails. - """ - grouped_emails = get_moderators_emails(send_type) - for group in grouped_emails: - user = OSFUser.load(group['user_id']) - info = group['info'] - notification_ids = [message['_id'] for message in info] - provider = AbstractProvider.objects.get(id=group['provider_id']) - additional_context = dict() - if isinstance(provider, RegistrationProvider): - provider_type = 'registration' - submissions_url = get_registration_provider_submissions_url(provider) - withdrawals_url = f'{submissions_url}?state=pending_withdraw' - notification_settings_url = f'{settings.DOMAIN}registries/{provider._id}/moderation/notifications' - if provider.brand: - additional_context = { - 'logo_url': provider.brand.hero_logo_image, - 'top_bar_color': provider.brand.primary_color - } - elif isinstance(provider, CollectionProvider): - provider_type = 'collection' - submissions_url = f'{settings.DOMAIN}collections/{provider._id}/moderation/' - notification_settings_url = f'{settings.DOMAIN}registries/{provider._id}/moderation/notifications' - if provider.brand: - additional_context = { - 'logo_url': provider.brand.hero_logo_image, - 'top_bar_color': provider.brand.primary_color - } - withdrawals_url = '' - else: - provider_type = 'preprint' - submissions_url = f'{settings.DOMAIN}reviews/preprints/{provider._id}', - withdrawals_url = '' - notification_settings_url = f'{settings.DOMAIN}reviews/{provider_type}s/{provider._id}/notifications' - - if not user.is_disabled: - mails.send_mail( - to_addr=user.username, - mail=mails.DIGEST_REVIEWS_MODERATORS, - name=user.fullname, - message=info, - provider_name=provider.name, - reviews_submissions_url=submissions_url, - notification_settings_url=notification_settings_url, - reviews_withdrawal_url=withdrawals_url, - is_reviews_moderator_notification=True, - is_admin=provider.get_group(ADMIN).user_set.filter(id=user.id).exists(), - provider_type=provider_type, - **additional_context - ) - remove_notifications(email_notification_ids=notification_ids) - - -def get_moderators_emails(send_type): - """Get all emails for reviews moderators that need to be sent, grouped by users AND providers. - :param send_type: from NOTIFICATION_TYPES, could be "email_digest" or "email_transactional" - :return Iterable of dicts of the form: - [ - 'user_id': 'se8ea', - 'provider_id': '1', - 'info': [ - { - 'message': 'Hana Xie submitted Gravity', - '_id': NotificationDigest._id, - } - ], - ] - """ - sql = """ - SELECT json_build_object( - 'user_id', osf_guid._id, - 'provider_id', nd.provider_id, - 'info', json_agg( - json_build_object( - 'message', nd.message, - '_id', nd._id - ) - ) - ) - FROM osf_notificationdigest AS nd - LEFT JOIN osf_guid ON nd.user_id = osf_guid.object_id - WHERE send_type = %s AND (event = 'new_pending_submissions' OR event = 'new_pending_withdraw_requests') - AND osf_guid.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser') - GROUP BY osf_guid.id, nd.provider_id - ORDER BY osf_guid.id ASC - """ - - with connection.cursor() as cursor: - cursor.execute(sql, [send_type, ]) - return itertools.chain.from_iterable(cursor.fetchall()) - - -def get_users_emails(send_type): - """Get all emails that need to be sent. - NOTE: These do not include reviews triggered emails for moderators. - - :param send_type: from NOTIFICATION_TYPES - :return: Iterable of dicts of the form: - { - 'user_id': 'se8ea', - 'info': [{ - 'message': { - 'message': 'Freddie commented on your project Open Science', - 'timestamp': datetime object - }, - 'node_lineage': ['parent._id', 'node._id'], - '_id': NotificationDigest._id - }, ... - }] - { - 'user_id': ... - } - } - """ - - sql = """ - SELECT json_build_object( - 'user_id', osf_guid._id, - 'info', json_agg( - json_build_object( - 'message', nd.message, - 'node_lineage', nd.node_lineage, - '_id', nd._id - ) - ) - ) - FROM osf_notificationdigest AS nd - LEFT JOIN osf_guid ON nd.user_id = osf_guid.object_id - WHERE send_type = %s - AND event != 'new_pending_submissions' - AND event != 'new_pending_withdraw_requests' - AND osf_guid.content_type_id = (SELECT id FROM django_content_type WHERE model = 'osfuser') - GROUP BY osf_guid.id - ORDER BY osf_guid.id ASC - """ - - with connection.cursor() as cursor: - cursor.execute(sql, [send_type, ]) - return itertools.chain.from_iterable(cursor.fetchall()) - - -def group_by_node(notifications, limit=15): - """Take list of notifications and group by node. - - :param notifications: List of stored email notifications - :return: - """ - emails = NotificationsDict() - for notification in notifications[:15]: - emails.add_message(notification['node_lineage'], notification['message']) - return emails - - -def remove_notifications(email_notification_ids=None): - """Remove sent emails. - - :param email_notification_ids: - :return: - """ - if email_notification_ids: - NotificationDigest.objects.filter(_id__in=email_notification_ids).delete() diff --git a/website/settings/defaults.py b/website/settings/defaults.py index badafc32862..5d39c01ab90 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -12,6 +12,8 @@ from collections import OrderedDict import enum +from celery.schedules import crontab + os_env = os.environ @@ -140,7 +142,6 @@ def parent_dir(path): # External services USE_CDN_FOR_CLIENT_LIBS = True -USE_EMAIL = True FROM_EMAIL = 'openscienceframework-noreply@osf.io' ENABLE_TEST_EMAIL = False # support email @@ -550,7 +551,6 @@ class CeleryConfig: # Modules to import when celery launches imports = ( 'framework.celery_tasks', - 'framework.email.tasks', 'osf.external.chronos.tasks', 'osf.management.commands.data_storage_usage', 'osf.management.commands.registration_schema_metrics', @@ -598,149 +598,104 @@ class CeleryConfig: # 'scripts.analytics.upload', # ) - # celery.schedule will not be installed when running invoke requirements the first time. - try: - from celery.schedules import crontab - except ImportError: - pass - else: - # Setting up a scheduler, essentially replaces an independent cron job - # Note: these times must be in UTC - beat_schedule = { - '5-minute-emails': { - 'task': 'website.notifications.tasks.send_users_email', - 'schedule': crontab(minute='*/5'), - 'args': ('email_transactional',), - }, - 'daily-emails': { - 'task': 'website.notifications.tasks.send_users_email', - 'schedule': crontab(minute=0, hour=5), # Daily at 12 a.m. EST - 'args': ('email_digest',), - }, - # 'refresh_addons': { # Handled by GravyValet now - # 'task': 'scripts.refresh_addon_tokens', - # 'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m - # 'kwargs': {'dry_run': False, 'addons': { - # 'box': 60, # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens - # 'googledrive': 14, # https://developers.google.com/identity/protocols/OAuth2#expiration - # 'mendeley': 14 # http://dev.mendeley.com/reference/topics/authorization_overview.html - # }}, - # }, - 'retract_registrations': { - 'task': 'scripts.retract_registrations', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - 'embargo_registrations': { - 'task': 'scripts.embargo_registrations', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - 'add_missing_identifiers_to_preprints': { - 'task': 'scripts.add_missing_identifiers_to_preprints', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - 'approve_registrations': { - 'task': 'scripts.approve_registrations', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - 'approve_embargo_terminations': { - 'task': 'scripts.approve_embargo_terminations', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - 'triggered_mails': { - 'task': 'scripts.triggered_mails', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - 'clear_expired_sessions': { - 'task': 'osf.management.commands.clear_expired_sessions', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - 'send_queued_mails': { - 'task': 'scripts.send_queued_mails', - 'schedule': crontab(minute=0, hour=17), # Daily 12 p.m. - 'kwargs': {'dry_run': False}, - }, - 'new-and-noteworthy': { - 'task': 'scripts.populate_new_and_noteworthy_projects', - 'schedule': crontab(minute=0, hour=7, day_of_week=6), # Saturday 2:00 a.m. - 'kwargs': {'dry_run': False} - }, - 'registration_schema_metrics': { - 'task': 'management.commands.registration_schema_metrics', - 'schedule': crontab(minute=45, hour=7, day_of_month=3), # Third day of month 2:45 a.m. - 'kwargs': {'dry_run': False} - }, - 'daily_reporters_go': { - 'task': 'management.commands.daily_reporters_go', - 'schedule': crontab(minute=0, hour=6), # Daily 1:00 a.m. - }, - 'monthly_reporters_go': { - 'task': 'management.commands.monthly_reporters_go', - 'schedule': crontab(minute=30, hour=6, day_of_month=2), # Second day of month 1:30 a.m. - }, - # 'data_storage_usage': { - # 'task': 'management.commands.data_storage_usage', - # 'schedule': crontab(day_of_month=1, minute=30, hour=4), # Last of the month at 11:30 p.m. - # }, - # 'migrate_pagecounter_data': { - # 'task': 'management.commands.migrate_pagecounter_data', - # 'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m. - # }, - # 'migrate_deleted_date': { - # 'task': 'management.commands.migrate_deleted_date', - # 'schedule': crontab(minute=0, hour=3), - # 'addon_deleted_date': { - # 'task': 'management.commands.addon_deleted_date', - # 'schedule': crontab(minute=0, hour=3), # Daily 11:00 p.m. - # }, - 'generate_sitemap': { - 'task': 'scripts.generate_sitemap', - 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. - }, - 'deactivate_requested_accounts': { - 'task': 'management.commands.deactivate_requested_accounts', - 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. - }, - 'check_crossref_doi': { - 'task': 'management.commands.check_crossref_dois', - 'schedule': crontab(minute=0, hour=4), # Daily 11:00 p.m. - }, - 'update_institution_project_counts': { - 'task': 'management.commands.update_institution_project_counts', - 'schedule': crontab(minute=0, hour=9), # Daily 05:00 a.m. EDT - }, -# 'archive_registrations_on_IA': { -# 'task': 'osf.management.commands.archive_registrations_on_IA', -# 'schedule': crontab(minute=0, hour=5), # Daily 4:00 a.m. -# 'kwargs': {'dry_run': False} -# }, - 'delete_withdrawn_or_failed_registration_files': { - 'task': 'management.commands.delete_withdrawn_or_failed_registration_files', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': { - 'dry_run': False, - 'batch_size_withdrawn': 10, - 'batch_size_stuck': 10 - } - }, - 'monitor_registration_bulk_upload_jobs': { - 'task': 'api.providers.tasks.monitor_registration_bulk_upload_jobs', - # 'schedule': crontab(hour='*/3'), # Every 3 hours - 'schedule': crontab(minute='*/5'), # Every 5 minutes for staging server QA test - 'kwargs': {'dry_run': False} - }, - 'approve_registration_updates': { - 'task': 'osf.management.commands.approve_pending_schema_responses', - 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m - 'kwargs': {'dry_run': False}, - }, - } + # Setting up a scheduler, essentially replaces an independent cron job + # Note: these times must be in UTC + beat_schedule = { + 'retract_registrations': { + 'task': 'scripts.retract_registrations', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'embargo_registrations': { + 'task': 'scripts.embargo_registrations', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'add_missing_identifiers_to_preprints': { + 'task': 'scripts.add_missing_identifiers_to_preprints', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'approve_registrations': { + 'task': 'scripts.approve_registrations', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'approve_embargo_terminations': { + 'task': 'scripts.approve_embargo_terminations', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'triggered_mails': { + 'task': 'scripts.triggered_mails', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'clear_expired_sessions': { + 'task': 'osf.management.commands.clear_expired_sessions', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'send_queued_mails': { + 'task': 'scripts.send_queued_mails', + 'schedule': crontab(minute=0, hour=17), # Daily 12 p.m. + 'kwargs': {'dry_run': False}, + }, + 'new-and-noteworthy': { + 'task': 'scripts.populate_new_and_noteworthy_projects', + 'schedule': crontab(minute=0, hour=7, day_of_week=6), # Saturday 2:00 a.m. + 'kwargs': {'dry_run': False} + }, + 'registration_schema_metrics': { + 'task': 'management.commands.registration_schema_metrics', + 'schedule': crontab(minute=45, hour=7, day_of_month=3), # Third day of month 2:45 a.m. + 'kwargs': {'dry_run': False} + }, + 'daily_reporters_go': { + 'task': 'management.commands.daily_reporters_go', + 'schedule': crontab(minute=0, hour=6), # Daily 1:00 a.m. + }, + 'monthly_reporters_go': { + 'task': 'management.commands.monthly_reporters_go', + 'schedule': crontab(minute=30, hour=6, day_of_month=2), # Second day of month 1:30 a.m. + }, + 'generate_sitemap': { + 'task': 'scripts.generate_sitemap', + 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. + }, + 'deactivate_requested_accounts': { + 'task': 'management.commands.deactivate_requested_accounts', + 'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m. + }, + 'check_crossref_doi': { + 'task': 'management.commands.check_crossref_dois', + 'schedule': crontab(minute=0, hour=4), # Daily 11:00 p.m. + }, + 'update_institution_project_counts': { + 'task': 'management.commands.update_institution_project_counts', + 'schedule': crontab(minute=0, hour=9), # Daily 05:00 a.m. EDT + }, + 'delete_withdrawn_or_failed_registration_files': { + 'task': 'management.commands.delete_withdrawn_or_failed_registration_files', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': { + 'dry_run': False, + 'batch_size_withdrawn': 10, + 'batch_size_stuck': 10 + } + }, + 'monitor_registration_bulk_upload_jobs': { + 'task': 'api.providers.tasks.monitor_registration_bulk_upload_jobs', + # 'schedule': crontab(hour='*/3'), # Every 3 hours + 'schedule': crontab(minute='*/5'), # Every 5 minutes for staging server QA test + 'kwargs': {'dry_run': False} + }, + 'approve_registration_updates': { + 'task': 'osf.management.commands.approve_pending_schema_responses', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + } # Tasks that need metrics and release requirements # beat_schedule.update({ diff --git a/website/settings/local-ci.py b/website/settings/local-ci.py index c63fce5a86a..2cab1ca4252 100644 --- a/website/settings/local-ci.py +++ b/website/settings/local-ci.py @@ -44,7 +44,6 @@ SEARCH_ENGINE = 'elastic' -USE_EMAIL = False USE_CELERY = False # Email diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py index 212b9926f7e..4124d621450 100644 --- a/website/settings/local-dist.py +++ b/website/settings/local-dist.py @@ -57,7 +57,6 @@ ELASTIC_TIMEOUT = 10 # Email -USE_EMAIL = False MAIL_SERVER = 'localhost:1025' # For local testing MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' From 8c6785a7900ef39b4103ecc095b2b957bd096a2c Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Sun, 27 Jul 2025 17:16:15 -0400 Subject: [PATCH 124/176] update tasks for queued_email system --- .../views/test_draft_registration_list.py | 1 - framework/auth/campaigns.py | 9 - notifications.yaml | 27 ++- .../commands/check_crossref_dois.py | 3 +- osf/management/commands/email_all_users.py | 4 +- osf/management/commands/find_spammy_files.py | 3 +- osf/migrations/0033_delete_queuedmail.py | 16 ++ osf/models/__init__.py | 1 - osf/models/institution.py | 3 +- osf/models/notification_type.py | 1 + osf/models/queued_mail.py | 162 --------------- scripts/send_queued_mails.py | 66 ------ scripts/stuck_registration_audit.py | 19 +- scripts/tests/test_triggered_mails.py | 56 ----- scripts/triggered_mails.py | 50 ----- website/app.py | 1 - website/archiver/utils.py | 9 +- website/conferences/views.py | 74 +------ website/mails/listeners.py | 44 ---- website/mails/mails.py | 193 ------------------ website/mails/presends.py | 55 ----- website/notifications/constants.py | 13 +- website/notifications/listeners.py | 24 +-- website/notifications/utils.py | 18 +- website/reviews/listeners.py | 9 +- 25 files changed, 89 insertions(+), 772 deletions(-) create mode 100644 osf/migrations/0033_delete_queuedmail.py delete mode 100644 osf/models/queued_mail.py delete mode 100644 scripts/send_queued_mails.py delete mode 100644 scripts/tests/test_triggered_mails.py delete mode 100644 scripts/triggered_mails.py delete mode 100644 website/mails/listeners.py delete mode 100644 website/mails/presends.py diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index b90493825ee..cc409555e10 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -428,7 +428,6 @@ def test_admin_can_create_draft( assert draft.has_permission(user, ADMIN) is True def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload): - # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor with capture_notifications() as notifications: app.post_json_api( f'{url_draft_registrations}?embed=branched_from&embed=initiator', diff --git a/framework/auth/campaigns.py b/framework/auth/campaigns.py index a47b3cf637b..74445e6c259 100644 --- a/framework/auth/campaigns.py +++ b/framework/auth/campaigns.py @@ -91,15 +91,6 @@ def get_campaigns(): } }) - newest_campaigns.update({ - 'agu_conference_2023': { - 'system_tag': CampaignSourceTags.AguConference2023.value, - 'redirect_url': furl(DOMAIN).add(path='dashboard/').url, - 'confirmation_email_template': mails.CONFIRM_EMAIL_AGU_CONFERENCE_2023, - 'login_type': 'native', - } - }) - newest_campaigns.update({ 'agu_conference': { 'system_tag': CampaignSourceTags.AguConference.value, diff --git a/notifications.yaml b/notifications.yaml index 2e8b08ee6f6..fe5186bb8fd 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -4,6 +4,7 @@ notification_types: #### User Notifications - name: user_pending_verification_registered + subject: 'Received request to be a contributor' __docs__: This email is sent when a user requests access to a node and has confirm their identity, `referrer` is sent an email to forward the confirmation link. object_content_type_model_name: osfuser @@ -37,10 +38,12 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_preprint_node_from_osf.html.mako' - name: user_external_login_link_success + subject: 'OSF Verification Success' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_success.html.mako' - name: user_confirm_email + subject: 'Add a new email to your OSF account' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/confirm.html.mako' @@ -73,25 +76,28 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/pending_invite.html.mako' - name: user_forward_invite_registered + subject: 'Please forward to ${fullname}' __docs__: ... object_content_type_model_name: osfuser - template: 'website/templates/emails/forward_invite.html.mako' + template: 'website/templates/emails/forward_invite_registered.html.mako' - name: user_forward_invite + subject: 'Please forward to ${fullname}' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forward_invite.html.mako' - name: user_initial_confirm_email - __docs__: ... + subject: 'OSF Account Verification' + __docs__: 'Sign up confirmation emails for OSF, native campaigns and branded campaigns' object_content_type_model_name: osfuser template: 'website/templates/emails/initial_confirm.html.mako' - name: user_export_data_request __docs__: ... object_content_type_model_name: osfuser - template: 'website/templates/emails/initial_confirm.html.mako' + template: 'website/templates/emails/.html.mako' - name: user_request_deactivation __docs__: ... object_content_type_model_name: osfuser - template: 'website/templates/emails/initial_confirm.html.mako' + template: 'website/templates/emails/.html.mako' - name: user_storage_cap_exceeded_announcement __docs__: ... object_content_type_model_name: osfuser @@ -100,19 +106,17 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/duplicate_accounts_sso_osf4i.html.mako' - - name: user_external_confirm_success_lik - __docs__: ... - object_content_type_model_name: osfuser - template: 'website/templates/emails/external_confirm_success.html.mako' - name: user_duplicate_accounts_osf4i __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/duplicate_accounts_sso_osf4i.html.mako' - name: user_forgot_password + subject: 'Reset Password' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password.html.mako' - name: user_forgot_password_institution + subject: 'Set Password' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password_institution.html.mako' @@ -133,7 +137,8 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/registration_bulk_upload_failure_duplicates.html.mako' - name: user_external_login_email_confirm_link - __docs__: ... + subject: 'OSF Account Verification' + __docs__: 'Emails for first-time login through external identity providers.' object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_link.html.mako' - name: user_external_login_confirm_email_create @@ -253,12 +258,14 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/access_request_submitted.html.mako' - name: node_fork_failed + subject: 'Your fork has failed' __docs__: This email is sent when a fork fails to be created, this could be due to addons or network outages or technical errors. object_content_type_model_name: abstractnode template: 'website/templates/emails/fork_failed.html.mako' - name: node_fork_completed - __docs__: This email is sent when a fork is successfully created, + subject: 'Your fork has completed' + __docs__: 'This email is sent when a fork is successfully created,' object_content_type_model_name: abstractnode template: 'website/templates/emails/fork_completed.html.mako' - name: node_schema_response_initiated diff --git a/osf/management/commands/check_crossref_dois.py b/osf/management/commands/check_crossref_dois.py index bee66856747..bff7ca7e07f 100644 --- a/osf/management/commands/check_crossref_dois.py +++ b/osf/management/commands/check_crossref_dois.py @@ -3,6 +3,7 @@ import requests import django +from django.core.mail import send_mail from django.core.management.base import BaseCommand from django.utils import timezone django.setup() @@ -123,7 +124,7 @@ def report_stuck_dois(dry_run=True): if preprints_with_pending_dois: guids = ', '.join(preprints_with_pending_dois.values_list('guids___id', flat=True)) if not dry_run: - mails.send_mail( + send_mail( to_addr=settings.OSF_SUPPORT_EMAIL, mail=mails.CROSSREF_DOIS_PENDING, pending_doi_count=preprints_with_pending_dois.count(), diff --git a/osf/management/commands/email_all_users.py b/osf/management/commands/email_all_users.py index f5cbd677fb7..774f8b5af2d 100644 --- a/osf/management/commands/email_all_users.py +++ b/osf/management/commands/email_all_users.py @@ -6,6 +6,8 @@ import django +from django.core.mail import send_mail + django.setup() from django.core.management.base import BaseCommand @@ -44,7 +46,7 @@ def email_all_users(email_template, dry_run=False, ids=None, start_id=0, offset= for user in active_users.iterator(): logger.info(f'Sending email to {user.id}') try: - mails.send_mail( + send_mail( to_addr=user.email, mail=template, given_name=user.given_name or user.fullname, diff --git a/osf/management/commands/find_spammy_files.py b/osf/management/commands/find_spammy_files.py index 33d25366ea1..7feeab508fa 100644 --- a/osf/management/commands/find_spammy_files.py +++ b/osf/management/commands/find_spammy_files.py @@ -3,6 +3,7 @@ from datetime import timedelta import logging +from django.core.mail import send_mail from django.core.management.base import BaseCommand from django.utils import timezone @@ -52,7 +53,7 @@ def find_spammy_files(sniff_r=None, n=None, t=None, to_addrs=None): if ct: if to_addrs: for addr in to_addrs: - mails.send_mail( + send_mail( mail=mails.SPAM_FILES_DETECTED, to_addr=addr, ct=ct, diff --git a/osf/migrations/0033_delete_queuedmail.py b/osf/migrations/0033_delete_queuedmail.py new file mode 100644 index 00000000000..febe0843df5 --- /dev/null +++ b/osf/migrations/0033_delete_queuedmail.py @@ -0,0 +1,16 @@ +# Generated by Django 4.2.13 on 2025-07-27 21:30 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0032_alter_notificationsubscription_options_and_more'), + ] + + operations = [ + migrations.DeleteModel( + name='QueuedMail', + ), + ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index d09e350adfe..669059d9c4c 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -84,7 +84,6 @@ RegistrationProvider, WhitelistedSHAREPreprintProvider, ) -from .queued_mail import QueuedMail from .registrations import ( DraftRegistration, DraftRegistrationLog, diff --git a/osf/models/institution.py b/osf/models/institution.py index 5dce3c1df36..737233ca7b8 100644 --- a/osf/models/institution.py +++ b/osf/models/institution.py @@ -7,6 +7,7 @@ from django.conf import settings as django_conf_settings from django.contrib.postgres import fields +from django.core.mail import send_mail from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver @@ -221,7 +222,7 @@ def _send_deactivation_email(self): for user in self.get_institution_users(): try: attempts += 1 - mails.send_mail( + send_mail( to_addr=user.username, mail=mails.INSTITUTION_DEACTIVATION, user=user, diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 66e58281db4..34aee20a357 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -29,6 +29,7 @@ class Type(str, Enum): ADDONS_BOA_JOB_FAILURE = 'addon_boa_job_failure' ADDONS_BOA_JOB_COMPLETE = 'addon_boa_job_complete' + DESK_ARCHIVE_REGISTRATION_STUCK = 'desk_archive_registration_stuck' DESK_REQUEST_EXPORT = 'desk_request_export' DESK_REQUEST_DEACTIVATION = 'desk_request_deactivation' DESK_OSF_SUPPORT_EMAIL = 'desk_osf_support_email' diff --git a/osf/models/queued_mail.py b/osf/models/queued_mail.py deleted file mode 100644 index 844465d5193..00000000000 --- a/osf/models/queued_mail.py +++ /dev/null @@ -1,162 +0,0 @@ -import waffle - -from django.db import models -from django.utils import timezone - -from osf.utils.fields import NonNaiveDateTimeField -from website.mails import Mail, send_mail -from website.mails import presends -from website import settings as osf_settings - -from osf import features -from .base import BaseModel, ObjectIDMixin -from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField - - -class QueuedMail(ObjectIDMixin, BaseModel): - user = models.ForeignKey('OSFUser', db_index=True, null=True, on_delete=models.CASCADE) - to_addr = models.CharField(max_length=255) - send_at = NonNaiveDateTimeField(db_index=True, null=False) - - # string denoting the template, presend to be used. Has to be an index of queue_mail types - email_type = models.CharField(max_length=255, db_index=True, null=False) - - # dictionary with variables used to populate mako template and store information used in presends - # Example: - # self.data = { - # 'nid' : 'ShIpTo', - # 'fullname': 'Florence Welch', - #} - data = DateTimeAwareJSONField(default=dict, blank=True) - sent_at = NonNaiveDateTimeField(db_index=True, null=True, blank=True) - - def __repr__(self): - if self.sent_at is not None: - return ''.format( - self._id, self.email_type, self.to_addr, self.sent_at - ) - return ''.format( - self._id, self.email_type, self.to_addr, self.send_at - ) - - def send_mail(self): - """ - Grabs the data from this email, checks for user subscription to help mails, - - constructs the mail object and checks presend. Then attempts to send the email - through send_mail() - :return: boolean based on whether email was sent. - """ - mail_struct = queue_mail_types[self.email_type] - presend = mail_struct['presend'](self) - mail = Mail( - mail_struct['template'], - subject=mail_struct['subject'], - categories=mail_struct.get('categories', None) - ) - self.data['osf_url'] = osf_settings.DOMAIN - if presend and self.user.is_active and self.user.osf_mailing_lists.get(osf_settings.OSF_HELP_LIST): - send_mail(self.to_addr or self.user.username, mail, **(self.data or {})) - self.sent_at = timezone.now() - self.save() - return True - else: - self.__class__.delete(self) - return False - - def find_sent_of_same_type_and_user(self): - """ - Queries up for all emails of the same type as self, sent to the same user as self. - Does not look for queue-up emails. - :return: a list of those emails - """ - return self.__class__.objects.filter(email_type=self.email_type, user=self.user).exclude(sent_at=None) - - -def queue_mail(to_addr, mail, send_at, user, **context): - """ - Queue an email to be sent using send_mail after a specified amount - of time and if the presend returns True. The presend is attached to - the template under mail. - - :param to_addr: the address email is to be sent to - :param mail: the type of mail. Struct following template: - { 'presend': function(), - 'template': mako template name, - 'subject': mail subject } - :param send_at: datetime object of when to send mail - :param user: user object attached to mail - :param context: IMPORTANT kwargs to be attached to template. - Sending mail will fail if needed for template kwargs are - not parameters. - :return: the QueuedMail object created - """ - if waffle.switch_is_active(features.DISABLE_ENGAGEMENT_EMAILS) and mail.get('engagement', False): - return False - new_mail = QueuedMail( - user=user, - to_addr=to_addr, - send_at=send_at, - email_type=mail['template'], - data=context - ) - new_mail.save() - return new_mail - - -# Predefined email templates. Structure: -#EMAIL_TYPE = { -# 'template': the mako template used for email_type, -# 'subject': subject used for the actual email, -# 'categories': categories to attach to the email using Sendgrid's SMTPAPI. -# 'engagement': Whether this is an engagement email that can be disabled with the disable_engagement_emails waffle flag -# 'presend': predicate function that determines whether an email should be sent. May also -# modify mail.data. -#} - -NO_ADDON = { - 'template': 'no_addon', - 'subject': 'Link an add-on to your OSF project', - 'presend': presends.no_addon, - 'categories': ['engagement', 'engagement-no-addon'], - 'engagement': True -} - -NO_LOGIN = { - 'template': 'no_login', - 'subject': 'What you\'re missing on the OSF', - 'presend': presends.no_login, - 'categories': ['engagement', 'engagement-no-login'], - 'engagement': True -} - -NEW_PUBLIC_PROJECT = { - 'template': 'new_public_project', - 'subject': 'Now, public. Next, impact.', - 'presend': presends.new_public_project, - 'categories': ['engagement', 'engagement-new-public-project'], - 'engagement': True -} - - -WELCOME_OSF4M = { - 'template': 'welcome_osf4m', - 'subject': 'The benefits of sharing your presentation', - 'presend': presends.welcome_osf4m, - 'categories': ['engagement', 'engagement-welcome-osf4m'], - 'engagement': True -} - -NO_ADDON_TYPE = 'no_addon' -NO_LOGIN_TYPE = 'no_login' -NEW_PUBLIC_PROJECT_TYPE = 'new_public_project' -WELCOME_OSF4M_TYPE = 'welcome_osf4m' - - -# Used to keep relationship from stored string 'email_type' to the predefined queued_email objects. -queue_mail_types = { - NO_ADDON_TYPE: NO_ADDON, - NO_LOGIN_TYPE: NO_LOGIN, - NEW_PUBLIC_PROJECT_TYPE: NEW_PUBLIC_PROJECT, - WELCOME_OSF4M_TYPE: WELCOME_OSF4M -} diff --git a/scripts/send_queued_mails.py b/scripts/send_queued_mails.py deleted file mode 100644 index 7c70c7685a0..00000000000 --- a/scripts/send_queued_mails.py +++ /dev/null @@ -1,66 +0,0 @@ -import logging - -import django -from django.db import transaction -from django.utils import timezone -django.setup() - -from framework.celery_tasks import app as celery_app - -from osf.models.queued_mail import QueuedMail -from website.app import init_app -from website import settings - -from scripts.utils import add_file_logger - - -logger = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO) - - -def main(dry_run=True): - # find all emails to be sent, pops the top one for each user(to obey the once - # a week requirement), checks to see if one has been sent this week, and if - # not send the email, otherwise leave it in the queue - - user_queue = {} - for email in find_queued_mails_ready_to_be_sent(): - user_queue.setdefault(email.user._id, []).append(email) - - emails_to_be_sent = pop_and_verify_mails_for_each_user(user_queue) - - logger.info(f'Emails being sent at {timezone.now().isoformat()}') - - for mail in emails_to_be_sent: - if not dry_run: - with transaction.atomic(): - try: - sent_ = mail.send_mail() - message = f'Email of type {mail.email_type} sent to {mail.to_addr}' if sent_ else \ - f'Email of type {mail.email_type} failed to be sent to {mail.to_addr}' - logger.info(message) - except Exception as error: - logger.error(f'Email of type {mail.email_type} to be sent to {mail.to_addr} caused an ERROR') - logger.exception(error) - pass - else: - logger.info(f'Email of type {mail.email_type} will be sent to {mail.to_addr}') - - -def find_queued_mails_ready_to_be_sent(): - return QueuedMail.objects.filter(send_at__lt=timezone.now(), sent_at__isnull=True) - -def pop_and_verify_mails_for_each_user(user_queue): - for user_emails in user_queue.values(): - mail = user_emails[0] - mails_past_week = mail.user.queuedmail_set.filter(sent_at__gt=timezone.now() - settings.WAIT_BETWEEN_MAILS) - if not mails_past_week.count(): - yield mail - - -@celery_app.task(name='scripts.send_queued_mails') -def run_main(dry_run=True): - init_app(routes=False) - if not dry_run: - add_file_logger(logger, __file__) - main(dry_run=dry_run) diff --git a/scripts/stuck_registration_audit.py b/scripts/stuck_registration_audit.py index b5445873faf..07a5d9a68c6 100644 --- a/scripts/stuck_registration_audit.py +++ b/scripts/stuck_registration_audit.py @@ -15,7 +15,7 @@ from framework.auth import Auth from framework.celery_tasks import app as celery_app from osf.management.commands import force_archive as fa -from osf.models import ArchiveJob, Registration +from osf.models import ArchiveJob, Registration, NotificationType from website.archiver import ARCHIVER_INITIATED from website.settings import ARCHIVE_TIMEOUT_TIMEDELTA, ADDONS_REQUESTED @@ -97,13 +97,16 @@ def main(): dict_writer.writeheader() dict_writer.writerows(broken_registrations) - mails.send_mail( - mail=mails.ARCHIVE_REGISTRATION_STUCK_DESK, - to_addr=settings.OSF_SUPPORT_EMAIL, - broken_registrations=broken_registrations, - attachment_name=filename, - attachment_content=output.getvalue(), - can_change_preferences=False, + NotificationType.objects.get( + name=NotificationType.Type.DESK_ARCHIVE_REGISTRATION_STUCK + ).emit( + destination_address=settings.OSF_SUPPORT_EMAIL, + event_context={ + 'broken_registrations': broken_registrations, + 'attachment_name': filename, + 'attachement_content': output.getvalue(), + 'can_change_preferences': False + } ) logger.info(f'{len(broken_registrations)} broken registrations found') diff --git a/scripts/tests/test_triggered_mails.py b/scripts/tests/test_triggered_mails.py deleted file mode 100644 index b0b94a7f7c5..00000000000 --- a/scripts/tests/test_triggered_mails.py +++ /dev/null @@ -1,56 +0,0 @@ -from unittest import mock -from datetime import timedelta - -from django.utils import timezone - -from tests.base import OsfTestCase -from osf_tests.factories import UserFactory - -from scripts.triggered_mails import main, find_inactive_users_with_no_inactivity_email_sent_or_queued -from website import mails - - -class TestTriggeredMails(OsfTestCase): - - def setUp(self): - super().setUp() - self.user = UserFactory() - self.user.date_last_login = timezone.now() - self.user.save() - - @mock.patch('website.mails.queue_mail') - def test_dont_trigger_no_login_mail(self, mock_queue): - self.user.date_last_login = timezone.now() - timedelta(seconds=6) - self.user.save() - main(dry_run=False) - assert not mock_queue.called - - @mock.patch('website.mails.queue_mail') - def test_trigger_no_login_mail(self, mock_queue): - self.user.date_last_login = timezone.now() - timedelta(weeks=6) - self.user.save() - main(dry_run=False) - mock_queue.assert_called_with( - user=mock.ANY, - fullname=self.user.fullname, - to_addr=self.user.username, - mail={'callback': mock.ANY, 'template': 'no_login', 'subject': mock.ANY}, - send_at=mock.ANY, - ) - - def test_find_inactive_users_with_no_inactivity_email_sent_or_queued(self): - user_active = UserFactory(fullname='Spot') - user_inactive = UserFactory(fullname='Nucha') - user_already_received_mail = UserFactory(fullname='Pep') - user_active.date_last_login = timezone.now() - timedelta(seconds=6) - user_inactive.date_last_login = timezone.now() - timedelta(weeks=6) - user_already_received_mail.date_last_login = timezone.now() - timedelta(weeks=6) - user_active.save() - user_inactive.save() - user_already_received_mail.save() - mails.queue_mail(to_addr=user_already_received_mail.username, - send_at=timezone.now(), - user=user_already_received_mail, - mail=mails.NO_LOGIN) - users = find_inactive_users_with_no_inactivity_email_sent_or_queued() - assert len(users) == 1 diff --git a/scripts/triggered_mails.py b/scripts/triggered_mails.py deleted file mode 100644 index 3e0c4fea73a..00000000000 --- a/scripts/triggered_mails.py +++ /dev/null @@ -1,50 +0,0 @@ -import logging - -from django.db import transaction -from django.db.models import Q -from django.utils import timezone - -from framework.celery_tasks import app as celery_app -from osf.models import OSFUser -from osf.models.queued_mail import NO_LOGIN_TYPE, NO_LOGIN, QueuedMail, queue_mail -from website.app import init_app -from website import settings - -from scripts.utils import add_file_logger - -logger = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO) - - -def main(dry_run=True): - for user in find_inactive_users_with_no_inactivity_email_sent_or_queued(): - if dry_run: - logger.warning('Dry run mode') - logger.warning(f'Email of type no_login queued to {user.username}') - if not dry_run: - with transaction.atomic(): - queue_mail( - to_addr=user.username, - mail=NO_LOGIN, - send_at=timezone.now(), - user=user, - fullname=user.fullname, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) - - -def find_inactive_users_with_no_inactivity_email_sent_or_queued(): - users_sent_ids = QueuedMail.objects.filter(email_type=NO_LOGIN_TYPE).values_list('user__guids___id') - return (OSFUser.objects - .filter( - (Q(date_last_login__lt=timezone.now() - settings.NO_LOGIN_WAIT_TIME) & ~Q(tags__name='osf4m')) | - Q(date_last_login__lt=timezone.now() - settings.NO_LOGIN_OSF4M_WAIT_TIME, tags__name='osf4m'), - is_active=True) - .exclude(guids___id__in=users_sent_ids)) - -@celery_app.task(name='scripts.triggered_mails') -def run_main(dry_run=True): - init_app(routes=False) - if not dry_run: - add_file_logger(logger, __file__) - main(dry_run=dry_run) diff --git a/website/app.py b/website/app.py index 5db655a2164..7d9842348e4 100644 --- a/website/app.py +++ b/website/app.py @@ -19,7 +19,6 @@ from framework.transactions import handlers as transaction_handlers # Imports necessary to connect signals from website.archiver import listeners # noqa -from website.mails import listeners # noqa from website.notifications import listeners # noqa from website.identifiers import listeners # noqa from website.reviews import listeners # noqa diff --git a/website/archiver/utils.py b/website/archiver/utils.py index 72bffee47f8..9768c43a894 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -5,7 +5,6 @@ from django.db.models import CharField, OuterRef, Subquery from framework.auth import Auth from framework.utils import sanitize_html -from osf.models.notification_type import NotificationType from website import settings from website.archiver import ( @@ -27,6 +26,8 @@ def normalize_unicode_filenames(filename): def send_archiver_size_exceeded_mails(src, user, stat_result, url): + from osf.models.notification_type import NotificationType + NotificationType.objects.get( name=NotificationType.Type.DESK_ARCHIVE_JOB_EXCEEDED ).emit( @@ -51,6 +52,8 @@ def send_archiver_size_exceeded_mails(src, user, stat_result, url): def send_archiver_copy_error_mails(src, user, results, url): + from osf.models.notification_type import NotificationType + NotificationType.objects.get( name=NotificationType.Type.DESK_ARCHIVE_JOB_COPY_ERROR ).emit( @@ -76,6 +79,8 @@ def send_archiver_copy_error_mails(src, user, results, url): ) def send_archiver_file_not_found_mails(src, user, results, url): + from osf.models.notification_type import NotificationType + NotificationType.objects.get( name=NotificationType.Type.DESK_ARCHIVE_JOB_FILE_NOT_FOUND ).emit( @@ -100,6 +105,8 @@ def send_archiver_file_not_found_mails(src, user, results, url): ) def send_archiver_uncaught_error_mails(src, user, results, url): + from osf.models.notification_type import NotificationType + NotificationType.objects.get( name=NotificationType.Type.DESK_ARCHIVE_JOB_UNCAUGHT_ERROR ).emit( diff --git a/website/conferences/views.py b/website/conferences/views.py index cf7dbfd6d3b..1460d4dd78e 100644 --- a/website/conferences/views.py +++ b/website/conferences/views.py @@ -1,13 +1,11 @@ from rest_framework import status as http_status import logging -from flask import request -import waffle from django.db import transaction, connection from django.contrib.contenttypes.models import ContentType from framework.auth import get_or_create_user -from framework.exceptions import HTTPError, ServiceDiscontinuedError +from framework.exceptions import HTTPError from framework.flask import redirect from framework.transactions.handlers import no_auto_transaction from osf import features @@ -16,8 +14,6 @@ from website.conferences import utils from website.conferences.message import ConferenceMessage, ConferenceError from website.ember_osf_web.decorators import ember_flag_is_active -from website.mails import CONFERENCE_SUBMITTED, CONFERENCE_INACTIVE, CONFERENCE_FAILED, CONFERENCE_DEPRECATION -from website.mails import send_mail from website.util import web_url_for from website.util.metrics import CampaignSourceTags @@ -30,17 +26,6 @@ def meeting_hook(): """ message = ConferenceMessage() - if waffle.flag_is_active(request, features.DISABLE_MEETINGS): - send_mail( - message.sender_email, - CONFERENCE_DEPRECATION, - fullname=message.sender_display, - support_email=settings.OSF_SUPPORT_EMAIL, - can_change_preferences=False, - logo=settings.OSF_MEETINGS_LOGO, - ) - raise ServiceDiscontinuedError() - try: message.verify() except ConferenceError as error: @@ -54,14 +39,6 @@ def meeting_hook(): raise HTTPError(http_status.HTTP_406_NOT_ACCEPTABLE) if not conference.active: - send_mail( - message.sender_email, - CONFERENCE_INACTIVE, - fullname=message.sender_display, - presentations_url=web_url_for('conference_view', _absolute=True), - can_change_preferences=False, - logo=settings.OSF_MEETINGS_LOGO, - ) raise HTTPError(http_status.HTTP_406_NOT_ACCEPTABLE) add_poster_by_email(conference=conference, message=message) @@ -72,16 +49,6 @@ def add_poster_by_email(conference, message): :param Conference conference: :param ConferenceMessage message: """ - # Fail if no attachments - if not message.attachments: - return send_mail( - message.sender_email, - CONFERENCE_FAILED, - fullname=message.sender_display, - can_change_preferences=False, - logo=settings.OSF_MEETINGS_LOGO - ) - with transaction.atomic(): user, user_created = get_or_create_user( message.sender_display, @@ -97,16 +64,6 @@ def add_poster_by_email(conference, message): user.update_date_last_login() user.save() - # must save the user first before accessing user._id - set_password_url = web_url_for( - 'reset_password_get', - uid=user._id, - token=user.verification_key_v2['token'], - _absolute=True, - ) - else: - set_password_url = None - # Always create a new meeting node node = Node.objects.create( title=message.subject, @@ -125,35 +82,6 @@ def add_poster_by_email(conference, message): utils.upload_attachments(user, node, message.attachments) - download_url = node.web_url_for( - 'addon_view_or_download_file', - path=message.attachments[0].filename, - provider='osfstorage', - action='download', - _absolute=True, - ) - - # Send confirmation email - send_mail( - message.sender_email, - CONFERENCE_SUBMITTED, - conf_full_name=conference.name, - conf_view_url=web_url_for( - 'conference_results', - meeting=message.conference_name, - _absolute=True, - ), - fullname=message.sender_display, - user_created=user_created, - set_password_url=set_password_url, - profile_url=user.absolute_url, - node_url=node.absolute_url, - file_url=download_url, - presentation_type=message.conference_category.lower(), - is_spam=message.is_spam, - can_change_preferences=False, - logo=settings.OSF_MEETINGS_LOGO - ) def conference_data(meeting): try: diff --git a/website/mails/listeners.py b/website/mails/listeners.py deleted file mode 100644 index 3f411d52f87..00000000000 --- a/website/mails/listeners.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Functions that listen for event signals and queue up emails. -All triggered emails live here. -""" - -from django.utils import timezone - -from website import settings -from framework.auth import signals as auth_signals -from website.project import signals as project_signals - - -@auth_signals.unconfirmed_user_created.connect -def queue_no_addon_email(user): - """Queue an email for user who has not connected an addon after - `settings.NO_ADDON_WAIT_TIME` months of signing up for the OSF. - """ - from osf.models.queued_mail import queue_mail, NO_ADDON - queue_mail( - to_addr=user.username, - mail=NO_ADDON, - send_at=timezone.now() + settings.NO_ADDON_WAIT_TIME, - user=user, - fullname=user.fullname - ) - -@project_signals.privacy_set_public.connect -def queue_first_public_project_email(user, node, meeting_creation): - """Queue and email after user has made their first - non-OSF4M project public. - """ - from osf.models.queued_mail import queue_mail, QueuedMail, NEW_PUBLIC_PROJECT_TYPE, NEW_PUBLIC_PROJECT - if not meeting_creation: - sent_mail = QueuedMail.objects.filter(user=user, email_type=NEW_PUBLIC_PROJECT_TYPE) - if not sent_mail.exists(): - queue_mail( - to_addr=user.username, - mail=NEW_PUBLIC_PROJECT, - send_at=timezone.now() + settings.NEW_PUBLIC_PROJECT_WAIT_TIME, - user=user, - nid=node._id, - fullname=user.fullname, - project_title=node.title, - osf_support_email=settings.OSF_SUPPORT_EMAIL, - ) diff --git a/website/mails/mails.py b/website/mails/mails.py index 84e82d4632a..83ab3afc613 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -19,14 +19,10 @@ """ import os import logging -import waffle from mako.lookup import TemplateLookup, Template -from framework.email import tasks -from osf import features from website import settings -from django.core.mail import EmailMessage, get_connection logger = logging.getLogger(__name__) @@ -38,11 +34,6 @@ HTML_EXT = '.html.mako' -DISABLED_MAILS = [ - 'welcome', - 'welcome_osf4i' -] - class Mail: """An email object. @@ -75,119 +66,6 @@ def render_message(tpl_name, **context): tpl = _tpl_lookup.get_template(tpl_name) return tpl.render(**context) - -def send_to_mailhog(subject, message, from_email, to_email, attachment_name=None, attachment_content=None): - email = EmailMessage( - subject=subject, - body=message, - from_email=from_email, - to=[to_email], - connection=get_connection( - backend='django.core.mail.backends.smtp.EmailBackend', - host=settings.MAILHOG_HOST, - port=settings.MAILHOG_PORT, - username='', - password='', - use_tls=False, - use_ssl=False, - ) - ) - email.content_subtype = 'html' - - if attachment_name and attachment_content: - email.attach(attachment_name, attachment_content) - - try: - email.send() - except ConnectionRefusedError: - logger.debug('Mailhog is not running. Please start it to send emails.') - return - - -def send_mail( - to_addr, - mail, - from_addr=None, - bcc_addr=None, - reply_to=None, - mailer=None, - celery=True, - username=None, - password=None, - callback=None, - attachment_name=None, - attachment_content=None, - **context): - """ - Send an email from the OSF. - Example: - from website import mails - - mails.send_email('foo@bar.com', mails.TEST, name="Foo") - - :param str to_addr: The recipient's email address - :param str bcc_addr: The BCC senders's email address (or list of addresses) - :param str reply_to: The sender's email address will appear in the reply-to header - :param Mail mail: The mail object - :param str mimetype: Either 'plain' or 'html' - :param function callback: celery task to execute after send_mail completes - :param **context: Context vars for the message template - - .. note: - Uses celery if available - """ - if waffle.switch_is_active(features.DISABLE_ENGAGEMENT_EMAILS) and mail.engagement: - return False - - from_addr = from_addr or settings.FROM_EMAIL - mailer = mailer or tasks.send_email - subject = mail.subject(**context) - message = mail.html(**context) - # Don't use ttls and login in DEBUG_MODE - ttls = login = not settings.DEBUG_MODE - logger.debug('Sending email...') - logger.debug(f'To: {to_addr}\nFrom: {from_addr}\nSubject: {subject}\nMessage: {message}') - - if waffle.switch_is_active(features.ENABLE_MAILHOG): - logger.debug('Intercepting email: sending via MailHog') - send_to_mailhog( - subject=subject, - message=message, - from_email=from_addr, - to_email=to_addr, - attachment_name=attachment_name, - attachment_content=attachment_content - ) - - kwargs = dict( - from_addr=from_addr, - to_addr=to_addr, - subject=subject, - message=message, - ttls=ttls, - login=login, - username=username, - password=password, - categories=mail.categories, - attachment_name=attachment_name, - attachment_content=attachment_content, - bcc_addr=bcc_addr, - reply_to=reply_to, - ) - - logger.debug('Preparing to send...') - if settings.USE_CELERY and celery: - logger.debug('Sending via celery...') - return mailer.apply_async(kwargs=kwargs, link=callback) - else: - logger.debug('Sending without celery') - ret = mailer(**kwargs) - if callback: - callback() - - return ret - - def get_english_article(word): """ Decide whether to use 'a' or 'an' for a given English word. @@ -199,51 +77,10 @@ def get_english_article(word): # Predefined Emails - -TEST = Mail('test', subject='A test email to ${name}', categories=['test']) - -# Emails for first-time login through external identity providers. -EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = Mail( - 'external_confirm_create', - subject='OSF Account Verification' -) - -FORK_COMPLETED = Mail( - 'fork_completed', - subject='Your fork has completed' -) - -FORK_FAILED = Mail( - 'fork_failed', - subject='Your fork has failed' -) - -EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = Mail( - 'external_confirm_link', - subject='OSF Account Verification' -) -EXTERNAL_LOGIN_LINK_SUCCESS = Mail( - 'external_confirm_success', - subject='OSF Account Verification Success' -) - -# Sign up confirmation emails for OSF, native campaigns and branded campaigns -INITIAL_CONFIRM_EMAIL = Mail( - 'initial_confirm', - subject='OSF Account Verification' -) -CONFIRM_EMAIL = Mail( - 'confirm', - subject='Add a new email to your OSF account' -) CONFIRM_EMAIL_ERPC = Mail( 'confirm_erpc', subject='OSF Account Verification, Election Research Preacceptance Competition' ) -CONFIRM_EMAIL_AGU_CONFERENCE_2023 = Mail( - 'confirm_agu_conference_2023', - subject='OSF Account Verification, from the American Geophysical Union Conference' -) CONFIRM_EMAIL_AGU_CONFERENCE = Mail( 'confirm_agu_conference', subject='OSF Account Verification, from the American Geophysical Union Conference' @@ -340,15 +177,6 @@ def get_english_article(word): 'contributor_added_access_request', subject='Your access request to an OSF project has been approved' ) -FORWARD_INVITE = Mail('forward_invite', subject='Please forward to ${fullname}') -FORWARD_INVITE_REGISTERED = Mail('forward_invite_registered', subject='Please forward to ${fullname}') - -FORGOT_PASSWORD = Mail('forgot_password', subject='Reset Password') -FORGOT_PASSWORD_INSTITUTION = Mail('forgot_password_institution', subject='Set Password') -PASSWORD_RESET = Mail('password_reset', subject='Your OSF password has been reset') -PENDING_VERIFICATION = Mail('pending_invite', subject='Your account is almost ready!') -PENDING_VERIFICATION_REGISTERED = Mail('pending_registered', subject='Received request to be a contributor') - REQUEST_EXPORT = Mail('support_request', subject='[via OSF] Export Request') REQUEST_DEACTIVATION = Mail('support_request', subject='[via OSF] Deactivation Request') @@ -360,22 +188,6 @@ def get_english_article(word): subject='[auto] Spam files audit' ) -CONFERENCE_SUBMITTED = Mail( - 'conference_submitted', - subject='Project created on OSF', -) -CONFERENCE_INACTIVE = Mail( - 'conference_inactive', - subject='OSF Error: Conference inactive', -) -CONFERENCE_FAILED = Mail( - 'conference_failed', - subject='OSF Error: No files attached', -) -CONFERENCE_DEPRECATION = Mail( - 'conference_deprecation', - subject='Meeting Service Discontinued', -) DIGEST = Mail( 'digest', subject='OSF Notifications', @@ -387,11 +199,6 @@ def get_english_article(word): subject='Recent submissions to ${provider_name}', ) -TRANSACTIONAL = Mail( - 'transactional', subject='OSF: ${subject}', - categories=['notifications', 'notifications-transactional'] -) - # Retraction related Mail objects PENDING_RETRACTION_ADMIN = Mail( 'pending_retraction_admin', diff --git a/website/mails/presends.py b/website/mails/presends.py deleted file mode 100644 index 3a3175c99ee..00000000000 --- a/website/mails/presends.py +++ /dev/null @@ -1,55 +0,0 @@ -from django.utils import timezone - -from website import settings - -def no_addon(email): - return len([addon for addon in email.user.get_addons() if addon.config.short_name != 'osfstorage']) == 0 - -def no_login(email): - from osf.models.queued_mail import QueuedMail, NO_LOGIN_TYPE - sent = QueuedMail.objects.filter(user=email.user, email_type=NO_LOGIN_TYPE).exclude(_id=email._id) - if sent.exists(): - return False - return email.user.date_last_login < timezone.now() - settings.NO_LOGIN_WAIT_TIME - -def new_public_project(email): - """ Will check to make sure the project that triggered this presend is still public - before sending the email. It also checks to make sure this is the first (and only) - new public project email to be sent - - :param email: QueuedMail object, with 'nid' in its data field - :return: boolean based on whether the email should be sent - """ - - # In line import to prevent circular importing - from osf.models import AbstractNode - - node = AbstractNode.load(email.data['nid']) - - if not node: - return False - public = email.find_sent_of_same_type_and_user() - return node.is_public and not len(public) - - -def welcome_osf4m(email): - """ presend has two functions. First is to make sure that the user has not - converted to a regular OSF user by logging in. Second is to populate the - data field with downloads by finding the file/project (node_settings) and - counting downloads of all files within that project - - :param email: QueuedMail object with data field including fid - :return: boolean based on whether the email should be sent - """ - # In line import to prevent circular importing - from addons.osfstorage.models import OsfStorageFileNode - if email.user.date_last_login: - if email.user.date_last_login > timezone.now() - settings.WELCOME_OSF4M_WAIT_TIME_GRACE: - return False - upload = OsfStorageFileNode.load(email.data['fid']) - if upload: - email.data['downloads'] = upload.get_download_count() - else: - email.data['downloads'] = 0 - email.save() - return True diff --git a/website/notifications/constants.py b/website/notifications/constants.py index 66bb575b765..35e3559d252 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -1,17 +1,14 @@ -NODE_SUBSCRIPTIONS_AVAILABLE = { - 'node_file_updated': 'Files updated' -} # Note: if the subscription starts with 'global_', it will be treated like a default # subscription. If no notification type has been assigned, the user subscription # will default to 'email_transactional'. -USER_SUBSCRIPTIONS_AVAILABLE = { - 'global_file_updated': 'Files updated', - 'global_reviews': 'Preprint submissions updated' -} +USER_SUBSCRIPTIONS_AVAILABLE = [ + 'user_file_updated', + 'user_reviews' +] PROVIDER_SUBSCRIPTIONS_AVAILABLE = { - 'new_pending_submissions': 'New preprint submissions for moderators to review.' + 'provider_new_pending_submissions': 'New preprint submissions for moderators to review.' } # Note: the python value None mean inherit from parent diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index 21aed1df9e3..ed9a936492f 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,6 +1,4 @@ import logging -from website.notifications.exceptions import InvalidSubscriptionError -from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications from website.project.signals import contributor_added, project_created from framework.auth.signals import user_confirmed @@ -10,25 +8,15 @@ def subscribe_creator(node): if node.is_collection or node.is_deleted: return None - try: - subscribe_user_to_notifications(node, node.creator) - except InvalidSubscriptionError as err: - user = node.creator._id if node.creator else 'None' - logger.warning(f'Skipping subscription of user {user} to node {node._id}') - logger.warning(f'Reason: {str(err)}') + from website.notifications.utils import subscribe_user_to_notifications + subscribe_user_to_notifications(node, node.creator) @contributor_added.connect def subscribe_contributor(node, contributor, auth=None, *args, **kwargs): - try: - subscribe_user_to_notifications(node, contributor) - except InvalidSubscriptionError as err: - logger.warning(f'Skipping subscription of user {contributor} to node {node._id}') - logger.warning(f'Reason: {str(err)}') + from website.notifications.utils import subscribe_user_to_notifications + subscribe_user_to_notifications(node, contributor) @user_confirmed.connect def subscribe_confirmed_user(user): - try: - subscribe_user_to_global_notifications(user) - except InvalidSubscriptionError as err: - logger.warning(f'Skipping subscription of user {user} to global subscriptions') - logger.warning(f'Reason: {str(err)}') + from website.notifications.utils import subscribe_user_to_global_notifications + subscribe_user_to_global_notifications(user) diff --git a/website/notifications/utils.py b/website/notifications/utils.py index 38707ac24a6..d9ceadfc39b 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -40,8 +40,10 @@ def find_subscription_type(subscription): """Find subscription type string within specific subscription. Essentially removes extraneous parts of the string to get the type. """ - subs_available = list(constants.USER_SUBSCRIPTIONS_AVAILABLE.keys()) - subs_available.extend(list(constants.NODE_SUBSCRIPTIONS_AVAILABLE.keys())) + subs_available = constants.USER_SUBSCRIPTIONS_AVAILABLE + subs_available.extend(list({ + 'node_file_updated': 'Files updated' + }.keys())) for available in subs_available: if available in subscription: return available @@ -279,7 +281,7 @@ def format_data(user, nodes): # user is contributor on a component of the project/node if can_read: - node_sub_available = list(constants.NODE_SUBSCRIPTIONS_AVAILABLE.keys()) + node_sub_available = ['node_file_updated'] subscriptions = get_all_node_subscriptions(user, node, user_subscriptions=user_subscriptions).filter(event_name__in=node_sub_available) for subscription in subscriptions: @@ -314,7 +316,7 @@ def format_data(user, nodes): def format_user_subscriptions(user): """ Format user-level subscriptions (e.g. comment replies across the OSF) for user settings page""" - user_subs_available = list(constants.USER_SUBSCRIPTIONS_AVAILABLE.keys()) + user_subs_available = constants.USER_SUBSCRIPTIONS_AVAILABLE subscriptions = [ serialize_event( user, subscription, @@ -338,8 +340,8 @@ def format_file_subscription(user, node_id, path, provider): return serialize_event(user, node=node, event_description='file_updated') -all_subs = constants.NODE_SUBSCRIPTIONS_AVAILABLE.copy() -all_subs.update(constants.USER_SUBSCRIPTIONS_AVAILABLE) +all_subs = ['node_file_updated'] +all_subs += constants.USER_SUBSCRIPTIONS_AVAILABLE def serialize_event(user, subscription=None, node=None, event_description=None): """ @@ -464,10 +466,8 @@ def subscribe_user_to_notifications(node, user): if getattr(node, 'is_registration', False): raise InvalidSubscriptionError('Registrations are invalid targets for subscriptions') - events = constants.NODE_SUBSCRIPTIONS_AVAILABLE - if user.is_registered: - for event in events: + for event in ['node_file_updated',]: subscription, _ = NotificationSubscription.objects.get_or_create( user=user, notification_type__name=event diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index a48d601e071..6fa873e53a9 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -1,6 +1,3 @@ -from django.contrib.contenttypes.models import ContentType -from website.profile.utils import get_profile_image_url -from osf.models import NotificationSubscription, NotificationType from website.settings import DOMAIN from website.reviews import signals as reviews_signals @@ -9,12 +6,15 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user, resource): context['referrer_fullname'] = user.fullname provider = resource.provider + from django.contrib.contenttypes.models import ContentType + from osf.models import NotificationSubscription, NotificationType provider_subscription, _ = NotificationSubscription.objects.get_or_create( notification_type__name=NotificationType.Type.PROVIDER_REVIEWS_WITHDRAWAL_REQUESTED, object_id=provider.id, content_type=ContentType.objects.get_for_model(provider.__class__), ) + from website.profile.utils import get_profile_image_url context['message'] = f'has requested withdrawal of "{resource.title}".' context['profile_image_url'] = get_profile_image_url(user) @@ -33,12 +33,15 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, def reviews_withdrawal_requests_notification(self, timestamp, context): preprint = context['reviewable'] preprint_word = preprint.provider.preprint_word + from django.contrib.contenttypes.models import ContentType + from osf.models import NotificationSubscription, NotificationType provider_subscription, _ = NotificationSubscription.objects.get_or_create( notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, object_id=preprint.provider.id, content_type=ContentType.objects.get_for_model(preprint.provider.__class__), ) + from website.profile.utils import get_profile_image_url context['message'] = f'has requested withdrawal of the {preprint_word} "{preprint.title}".' context['profile_image_url'] = get_profile_image_url(context['requester']) From 3d80365bc41719310adaa6cf0cee43d2784cdc46 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Sun, 27 Jul 2025 18:04:22 -0400 Subject: [PATCH 125/176] improve automatic subscription methods --- addons/base/views.py | 9 ++- api/preprints/serializers.py | 2 +- api_tests/mailhog/test_mailhog.py | 8 +- notifications.yaml | 24 +++++- ...ion_provider_notification_subscriptions.py | 40 ---------- ...ion_provider_notification_subscriptions.py | 45 ----------- osf/models/mixins.py | 6 +- osf/models/node.py | 26 +++++-- osf/models/notification.py | 2 +- osf/models/provider.py | 21 ++++-- osf/models/registrations.py | 3 +- osf_tests/test_collection_submission.py | 6 -- osf_tests/test_schema_responses.py | 17 ++--- osf_tests/utils.py | 14 ++-- tests/test_events.py | 43 +++++------ website/mails/mails.py | 14 ---- website/notifications/emails.py | 28 +------ website/notifications/events/files.py | 20 ++++- website/notifications/listeners.py | 28 +++++-- website/notifications/utils.py | 74 +++++-------------- website/project/views/contributor.py | 12 +-- 21 files changed, 173 insertions(+), 269 deletions(-) delete mode 100644 osf/management/commands/populate_collection_provider_notification_subscriptions.py delete mode 100644 osf/management/commands/populate_registration_provider_notification_subscriptions.py diff --git a/addons/base/views.py b/addons/base/views.py index c621658287c..4547112e44b 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -582,7 +582,7 @@ def create_waterbutler_log(payload, **kwargs): if payload.get('errors'): notification_type = NotificationType.Type.FILE_OPERATION_FAILED - NotificationType.objects.get(name=notification_type.value).emit( + NotificationType.objects.get(name=notification_type).emit( user=user, event_context={ 'action': payload['action'], @@ -608,7 +608,12 @@ def create_waterbutler_log(payload, **kwargs): if target_node and payload['action'] != 'download_file': update_storage_usage_with_size(payload) - file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload) + file_signals.file_updated.send( + target=node, + user=user, + event_type=action, + payload=payload + ) match f'node_{action}': case NotificationType.Type.NODE_FILE_ADDED: diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index d22bb00ab81..b5cdd0f1f93 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -468,7 +468,7 @@ def update(self, preprint, validated_data): preprint, contributor=author, auth=auth, - email_template='preprint', + notification_type='preprint', ) return preprint diff --git a/api_tests/mailhog/test_mailhog.py b/api_tests/mailhog/test_mailhog.py index 997947f9588..573df3a4fbe 100644 --- a/api_tests/mailhog/test_mailhog.py +++ b/api_tests/mailhog/test_mailhog.py @@ -1,7 +1,6 @@ import requests import pytest from django.core.mail import send_mail -from website.mails import TEST from waffle.testutils import override_switch from osf import features from website import settings @@ -31,7 +30,12 @@ def test_mailhog_received_mail(self): mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' requests.delete(mailhog_v1) - send_mail('to_addr@mail.com', TEST, name='Mailhog') + send_mail( + 'test email', + 'test message', + from_email=settings.OSF_CONTACT_EMAIL, + recipient_list=['to_addr@mail.com',] + ) res = requests.get(mailhog_v2).json() assert res['count'] == 1 assert res['items'][0]['Content']['Headers']['To'][0] == 'to_addr@mail.com' diff --git a/notifications.yaml b/notifications.yaml index fe5186bb8fd..65ffa263181 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -93,11 +93,17 @@ notification_types: - name: user_export_data_request __docs__: ... object_content_type_model_name: osfuser - template: 'website/templates/emails/.html.mako' + template: 'website/templates/emails/support_request.html.mako' - name: user_request_deactivation + subject: '[via OSF] Deactivation Request' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/support_request.html.mako' + - name: user_request_deactivation_complete + subject: '[via OSF] OSF account deactivated' __docs__: ... object_content_type_model_name: osfuser - template: 'website/templates/emails/.html.mako' + template: 'website/templates/emails/request_deactivation_complete.html.mako' - name: user_storage_cap_exceeded_announcement __docs__: ... object_content_type_model_name: osfuser @@ -153,6 +159,16 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/spam_user_banned.html.mako' + - name: user_file_operation_success + subject: 'Your ${action} has finished' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/file_operation_success.html.mako' + - name: user_file_operation_failed + subject: 'Your ${action} has failed' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/file_operation_failed.html.mako' #### PROVIDER - name: provider_new_pending_submissions @@ -336,11 +352,11 @@ notification_types: - name: addon_boa_job_failure __docs__: ... object_content_type_model_name: desk - template: 'website/templates/emails/addon_boa_job_failure.html.mako' + template: 'website/templates/emails/addons_boa_job_failure.html.mako' - name: addon_boa_job_complete __docs__: ... object_content_type_model_name: desk - template: 'website/templates/emails/addon_boa_job_complete.html.mako' + template: 'website/templates/emails/addons_boa_job_complete.html.mako' - name: desk_archive_job_copy_error __docs__: Archive job failed due to copy error. Sent to support desk. object_content_type_model_name: desk diff --git a/osf/management/commands/populate_collection_provider_notification_subscriptions.py b/osf/management/commands/populate_collection_provider_notification_subscriptions.py deleted file mode 100644 index c3a21eb8d20..00000000000 --- a/osf/management/commands/populate_collection_provider_notification_subscriptions.py +++ /dev/null @@ -1,40 +0,0 @@ -import logging - -from django.core.management.base import BaseCommand -from osf.models import NotificationSubscriptionLegacy, CollectionProvider - -logger = logging.getLogger(__file__) - - -def populate_collection_provider_notification_subscriptions(): - for provider in CollectionProvider.objects.all(): - provider_admins = provider.get_group('admin').user_set.all() - provider_moderators = provider.get_group('moderator').user_set.all() - - for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscriptionLegacy.objects.get_or_create( - _id=f'{provider._id}_{subscription}', - event_name=subscription, - provider=provider - ) - - if created: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object has been created') - else: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object exists') - - for user in provider_admins | provider_moderators: - # add user to subscription list but set their notification to none by default - instance.add_user_to_subscription(user, 'email_transactional', save=True) - logger.info(f'User {user._id} is subscribed to {provider._id}_{subscription}') - - -class Command(BaseCommand): - help = """ - Creates NotificationSubscriptions for existing RegistrationProvider objects - and adds RegistrationProvider moderators/admins to subscriptions - """ - - # Management command handler - def handle(self, *args, **options): - populate_collection_provider_notification_subscriptions() diff --git a/osf/management/commands/populate_registration_provider_notification_subscriptions.py b/osf/management/commands/populate_registration_provider_notification_subscriptions.py deleted file mode 100644 index db4b44acba5..00000000000 --- a/osf/management/commands/populate_registration_provider_notification_subscriptions.py +++ /dev/null @@ -1,45 +0,0 @@ -import logging - -from django.contrib.auth.models import Group -from django.core.management.base import BaseCommand -from osf.models import RegistrationProvider, NotificationSubscriptionLegacy - -logger = logging.getLogger(__file__) - - -def populate_registration_provider_notification_subscriptions(): - for provider in RegistrationProvider.objects.all(): - try: - provider_admins = provider.get_group('admin').user_set.all() - provider_moderators = provider.get_group('moderator').user_set.all() - except Group.DoesNotExist: - logger.warning(f'Unable to find groups for provider "{provider._id}", assuming there are no subscriptions to create.') - continue - - for subscription in provider.DEFAULT_SUBSCRIPTIONS: - instance, created = NotificationSubscriptionLegacy.objects.get_or_create( - _id=f'{provider._id}_{subscription}', - event_name=subscription, - provider=provider - ) - - if created: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object has been created') - else: - logger.info(f'{provider._id}_{subscription} NotificationSubscription object exists') - - for user in provider_admins | provider_moderators: - # add user to subscription list but set their notification to none by default - instance.add_user_to_subscription(user, 'email_transactional', save=True) - logger.info(f'User {user._id} is subscribed to {provider._id}_{subscription}') - - -class Command(BaseCommand): - help = """ - Creates NotificationSubscriptions for existing RegistrationProvider objects - and adds RegistrationProvider moderators/admins to subscriptions - """ - - # Management command handler - def handle(self, *args, **options): - populate_registration_provider_notification_subscriptions() diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 3cbb2283aab..33463e09924 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1029,7 +1029,9 @@ class Meta: reviews_comments_private = models.BooleanField(null=True, blank=True) reviews_comments_anonymous = models.BooleanField(null=True, blank=True) - DEFAULT_SUBSCRIPTIONS = ['new_pending_submissions'] + DEFAULT_SUBSCRIPTIONS = [ + NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + ] @property def is_reviewed(self): @@ -1463,7 +1465,7 @@ def add_contributor(self, contributor, permissions=None, visible=True, self, contributor=contributor, auth=auth, - email_template=send_email, + notification_type=send_email, permissions=permissions ) diff --git a/osf/models/node.py b/osf/models/node.py index fb7a7f1e102..f4fdb2c2122 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -1339,11 +1339,17 @@ def subscribe_contributors_to_node(self): and send emails to users that they have been added to the project. (DraftNodes are hidden until registration). """ + from . import NotificationType + for user in self.contributors.filter(is_registered=True): perm = self.contributor_set.get(user=user).permission - project_signals.contributor_added.send(self, - contributor=user, - auth=None, email_template='default', permissions=perm) + project_signals.contributor_added.send( + self, + contributor=user, + auth=None, + notification_type=NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + permissions=perm + ) def register_node(self, schema, auth, draft_registration, parent=None, child_ids=None, provider=None, manual_guid=None): """Make a frozen copy of a node. @@ -1671,7 +1677,12 @@ def fork_node(self, auth, title=None, parent=None): forked.save() # Need to call this after save for the notifications to be created with the _primary_key - project_signals.contributor_added.send(forked, contributor=user, auth=auth, email_template='false') + project_signals.contributor_added.send( + forked, + contributor=user, + auth=auth, + notification_type=None + ) return forked @@ -1780,7 +1791,12 @@ def use_as_template(self, auth, changes=None, top_level=True, parent=None): new.save(suppress_log=True) # Need to call this after save for the notifications to be created with the _primary_key - project_signals.contributor_added.send(new, contributor=auth.user, auth=auth, email_template='false') + project_signals.contributor_added.send( + new, + contributor=auth.user, + auth=auth, + notification_type=None + ) # Log the creation new.add_log( diff --git a/osf/models/notification.py b/osf/models/notification.py index 1b749af2b9b..fb9922078e4 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -46,7 +46,7 @@ def send( f"\nto={recipient_address}" f"\ntype={self.subscription.notification_type.name}" f"\ncontext={self.event_context}" - f"\nemail_context={self.email_context}" + f"\nemail_context={email_context}" ) elif protocol_type == 'email': email.send_email_with_send_grid( diff --git a/osf/models/provider.py b/osf/models/provider.py index b8dacc174bf..38b0affb035 100644 --- a/osf/models/provider.py +++ b/osf/models/provider.py @@ -2,6 +2,7 @@ import requests from django.apps import apps +from django.contrib.contenttypes.models import ContentType from django.contrib.postgres import fields from django.core.exceptions import ValidationError from django.db import connection @@ -14,12 +15,12 @@ from guardian.models import GroupObjectPermissionBase, UserObjectPermissionBase from framework import sentry +from . import NotificationType, NotificationSubscription from .base import BaseModel, TypedObjectIDMixin from .mixins import ReviewProviderMixin from .brand import Brand from .citation import CitationStyle from .licenses import NodeLicense -from .notifications import NotificationSubscriptionLegacy from .storage import ProviderAssetFile from .subject import Subject from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField @@ -252,7 +253,9 @@ def setup_share_source(self, provider_home_page): class CollectionProvider(AbstractProvider): - DEFAULT_SUBSCRIPTIONS = ['new_pending_submissions'] + DEFAULT_SUBSCRIPTIONS = [ + NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + ] class Meta: permissions = ( @@ -292,7 +295,11 @@ class RegistrationProvider(AbstractProvider): REVIEW_STATES = RegistrationModerationStates STATE_FIELD_NAME = 'moderation_state' - DEFAULT_SUBSCRIPTIONS = ['new_pending_submissions', 'new_pending_withdraw_requests'] + DEFAULT_SUBSCRIPTIONS = [ + NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, + NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS, + + ] # A list of dictionaries describing new fields that providers want to surface on their registrations # Each entry must provide a 'field_name' key. In the future, other keys may be supported to enable @@ -464,10 +471,10 @@ def create_provider_auth_groups(sender, instance, created, **kwargs): def create_provider_notification_subscriptions(sender, instance, created, **kwargs): if created: for subscription in instance.DEFAULT_SUBSCRIPTIONS: - NotificationSubscriptionLegacy.objects.get_or_create( - _id=f'{instance._id}_{subscription}', - event_name=subscription, - provider=instance + NotificationSubscription.objects.get_or_create( + notification_type__name=subscription, + object_id=instance.id, + content_type=ContentType.objects.get_for_model(instance) ) diff --git a/osf/models/registrations.py b/osf/models/registrations.py index e62bf5f14bf..e4f4106b563 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -21,6 +21,7 @@ from osf.exceptions import NodeStateError, DraftRegistrationStateError from osf.external.internet_archive.tasks import archive_to_ia, update_ia_metadata from osf.metrics import RegistriesModerationMetrics +from . import NotificationType from .action import RegistrationAction from .archive import ArchiveJob from .contributor import DraftRegistrationContributor @@ -1316,7 +1317,7 @@ def create_from_node(cls, user, schema, node=None, data=None, provider=None): draft, contributor=user, auth=None, - email_template='draft_registration', + notification_type=NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION, permissions=initiator_permissions ) diff --git a/osf_tests/test_collection_submission.py b/osf_tests/test_collection_submission.py index d2dd906b692..e169d3c5582 100644 --- a/osf_tests/test_collection_submission.py +++ b/osf_tests/test_collection_submission.py @@ -12,7 +12,6 @@ from osf.utils.workflows import CollectionSubmissionStates from framework.exceptions import PermissionsError from api_tests.utils import UserRoles -from osf.management.commands.populate_collection_provider_notification_subscriptions import populate_collection_provider_notification_subscriptions from django.utils import timezone from tests.utils import capture_notifications @@ -150,10 +149,6 @@ class TestModeratedCollectionSubmission: MOCK_NOW = timezone.now() - @pytest.fixture(autouse=True) - def setup(self): - populate_collection_provider_notification_subscriptions() - def test_submit(self, moderated_collection_submission): # .submit on post_save assert moderated_collection_submission.state == CollectionSubmissionStates.PENDING @@ -179,7 +174,6 @@ def test_notify_moderators_pending(self, node, moderated_collection): collection=moderated_collection, creator=node.creator, ) - populate_collection_provider_notification_subscriptions() collection_submission.save() assert len(notifications) == 2 assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_SUBMITTED diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 3b3af1458cf..1226c24c353 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -1,4 +1,3 @@ -from unittest import mock import pytest from api.providers.workflows import Workflows @@ -11,8 +10,6 @@ from osf_tests.utils import get_default_test_schema, _ensure_subscriptions from tests.utils import capture_notifications -from website.notifications import emails - from transitions import MachineError # See osf_tests.utils.default_test_schema for block types and valid answers @@ -870,13 +867,11 @@ def test_moderators_notified_on_admin_approval(self, revised_response, admin_use revised_response.save() revised_response.pending_approvers.add(admin_user) - store_emails = emails.store_emails - with mock.patch.object(emails, 'store_emails', autospec=True) as mock_store: - mock_store.side_effect = store_emails + with capture_notifications() as notifications: revised_response.approve(user=admin_user) - - assert mock_store.called - assert mock_store.call_args[0][0] == [moderator._id] + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['kwargs']['user'] == moderator def test_no_moderator_notification_on_admin_approval_of_initial_response( self, initial_response, admin_user): @@ -884,9 +879,9 @@ def test_no_moderator_notification_on_admin_approval_of_initial_response( initial_response.save() initial_response.pending_approvers.add(admin_user) - with mock.patch.object(emails, 'store_emails', autospec=True) as mock_store: + with capture_notifications() as notifications: initial_response.approve(user=admin_user) - assert not mock_store.called + assert not notifications def test_moderator_accept(self, initial_response, moderator): initial_response.approvals_state_machine.set_state(ApprovalStates.PENDING_MODERATION) diff --git a/osf_tests/utils.py b/osf_tests/utils.py index b3f3c92bc88..ecfd046d1b2 100644 --- a/osf_tests/utils.py +++ b/osf_tests/utils.py @@ -3,6 +3,8 @@ import functools from unittest import mock +from django.contrib.contenttypes.models import ContentType + from framework.auth import Auth from django.utils import timezone from google.cloud.storage import Client, Bucket, Blob @@ -16,7 +18,7 @@ Sanction, RegistrationProvider, RegistrationSchema, - NotificationSubscriptionLegacy + NotificationSubscription ) from osf.utils.migrations import create_schema_blocks_for_atomic_schema @@ -228,11 +230,11 @@ def _ensure_subscriptions(provider): This has led to observed race conditions and probabalistic test failures. Avoid that. ''' - for subscription in provider.DEFAULT_SUBSCRIPTIONS: - NotificationSubscriptionLegacy.objects.get_or_create( - _id=f'{provider._id}_{subscription}', - event_name=subscription, - provider=provider + for notification_type in provider.DEFAULT_SUBSCRIPTIONS: + NotificationSubscription.objects.get_or_create( + notification_type=notification_type, + object_id=provider.id, + content_type=ContentType.objects.get_for_model(provider) ) def assert_notification_correctness(send_mail_mock, expected_template, expected_recipients): diff --git a/tests/test_events.py b/tests/test_events.py index e98119e61b9..812eb1608a1 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -135,7 +135,7 @@ def setUp(self): self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) # subscription - self.sub = factories.NotificationSubscriptionLegacyFactory( + self.sub = factories.NotificationSubscriptionFactory( _id=self.project._id + 'file_updated', owner=self.project, event_name='file_updated', @@ -161,7 +161,7 @@ def setUp(self): self.user = factories.UserFactory() self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() - self.project_subscription = factories.NotificationSubscriptionLegacyFactory( + self.project_subscription = factories.NotificationSubscriptionFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' @@ -253,7 +253,7 @@ def setUp(self): self.user_2 = factories.AuthUserFactory() self.project = factories.ProjectFactory(creator=self.user_1) # subscription - self.sub = factories.NotificationSubscriptionLegacyFactory( + self.sub = factories.NotificationSubscriptionFactory( _id=self.project._id + 'file_updated', owner=self.project, event_name='file_updated', @@ -307,21 +307,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionLegacyFactory( + self.sub = factories.NotificationSubscriptionFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionLegacyFactory( + self.private_sub = factories.NotificationSubscriptionFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionLegacyFactory( + self.file_sub = factories.NotificationSubscriptionFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id @@ -337,8 +337,7 @@ def test_info_formed_correct(self): # assert 'moved file "{}".' == self.event.html_message # assert 'created folder "Three/".' == self.event.text_message - @mock.patch('website.notifications.emails.store_emails') - def test_user_performing_action_no_email(self, mock_store): + def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications self.sub.email_digest.add(self.user_2) @@ -346,16 +345,13 @@ def test_user_performing_action_no_email(self, mock_store): self.event.perform() assert 0 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_perform_store_called_once(self, mock_store): - # Move Event: Tests that store_emails is called once from perform + def test_perform_store_called_once(self): self.sub.email_transactional.add(self.user_1) self.sub.save() self.event.perform() assert 1 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_perform_store_one_of_each(self, mock_store): + def test_perform_store_one_of_each(self): # Move Event: Tests that store_emails is called 3 times, one in # each category self.sub.email_transactional.add(self.user_1) @@ -372,8 +368,7 @@ def test_perform_store_one_of_each(self, mock_store): self.event.perform() assert 3 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_remove_user_sent_once(self, mock_store): + def test_remove_user_sent_once(self): # Move Event: Tests removed user is removed once. Regression self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() @@ -402,21 +397,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionLegacyFactory( + self.sub = factories.NotificationSubscriptionFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionLegacyFactory( + self.private_sub = factories.NotificationSubscriptionFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionLegacyFactory( + self.file_sub = factories.NotificationSubscriptionFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id @@ -436,8 +431,7 @@ def test_info_correct(self): ' in Consolidate to "Two/Paper13.txt" in OSF' ' Storage in Consolidate.') == self.event.text_message - @mock.patch('website.notifications.emails.store_emails') - def test_copied_one_of_each(self, mock_store): + def test_copied_one_of_each(self): # Copy Event: Tests that store_emails is called 2 times, two with # permissions, one without self.sub.email_transactional.add(self.user_1) @@ -454,8 +448,7 @@ def test_copied_one_of_each(self, mock_store): self.event.perform() assert 2 == mock_store.call_count - @mock.patch('website.notifications.emails.store_emails') - def test_user_performing_action_no_email(self, mock_store): + def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications self.sub.email_digest.add(self.user_2) @@ -484,21 +477,21 @@ def setUp(self): ) # Subscriptions # for parent node - self.sub = factories.NotificationSubscriptionLegacyFactory( + self.sub = factories.NotificationSubscriptionFactory( _id=self.project._id + '_file_updated', owner=self.project, event_name='file_updated' ) self.sub.save() # for private node - self.private_sub = factories.NotificationSubscriptionLegacyFactory( + self.private_sub = factories.NotificationSubscriptionFactory( _id=self.private_node._id + '_file_updated', owner=self.private_node, event_name='file_updated' ) self.private_sub.save() # for file subscription - self.file_sub = factories.NotificationSubscriptionLegacyFactory( + self.file_sub = factories.NotificationSubscriptionFactory( _id='{pid}_{wbid}_file_updated'.format( pid=self.project._id, wbid=self.event.waterbutler_id diff --git a/website/mails/mails.py b/website/mails/mails.py index 83ab3afc613..0dec8be81a2 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -178,9 +178,6 @@ def get_english_article(word): subject='Your access request to an OSF project has been approved' ) REQUEST_EXPORT = Mail('support_request', subject='[via OSF] Export Request') -REQUEST_DEACTIVATION = Mail('support_request', subject='[via OSF] Deactivation Request') - -REQUEST_DEACTIVATION_COMPLETE = Mail('request_deactivation_complete', subject='[via OSF] OSF account deactivated') SPAM_USER_BANNED = Mail('spam_user_banned', subject='[OSF] Account flagged as spam') SPAM_FILES_DETECTED = Mail( @@ -188,17 +185,6 @@ def get_english_article(word): subject='[auto] Spam files audit' ) - -DIGEST = Mail( - 'digest', subject='OSF Notifications', - categories=['notifications', 'notifications-digest'] -) - -DIGEST_REVIEWS_MODERATORS = Mail( - 'digest_reviews_moderators', - subject='Recent submissions to ${provider_name}', -) - # Retraction related Mail objects PENDING_RETRACTION_ADMIN = Mail( 'pending_retraction_admin', diff --git a/website/notifications/emails.py b/website/notifications/emails.py index 9c34867ad3a..7a22ba8954c 100644 --- a/website/notifications/emails.py +++ b/website/notifications/emails.py @@ -5,7 +5,7 @@ from osf.models import AbstractNode, NotificationSubscription, NotificationType from osf.models.notifications import NotificationDigest -from osf.utils.permissions import ADMIN, READ +from osf.utils.permissions import READ from website import mails from website.notifications import constants from website.notifications import utils @@ -32,32 +32,6 @@ def notify(event, user, node, timestamp, **context): event_context=context ) -def notify_mentions(event, user, node, timestamp, **context): - OSFUser = apps.get_model('osf', 'OSFUser') - recipient_ids = context.get('new_mentions', []) - recipients = OSFUser.objects.filter(guids___id__in=recipient_ids) - sent_users = notify_global_event(event, user, node, timestamp, recipients, context=context) - return sent_users - -def notify_global_event(event, sender_user, node, timestamp, recipients, template=None, context=None): - event_type = utils.find_subscription_type(event) - sent_users = [] - if not context: - context = {} - - for recipient in recipients: - subscriptions = get_user_subscriptions(recipient, event_type) - context['is_creator'] = recipient == node.creator - if node.provider: - context['has_psyarxiv_chronos_text'] = node.has_permission(recipient, ADMIN) and 'psyarxiv' in node.provider.name.lower() - for notification_type in subscriptions: - if (notification_type != 'none' and subscriptions[notification_type] and recipient._id in subscriptions[notification_type]): - store_emails([recipient._id], notification_type, event, sender_user, node, timestamp, template=template, **context) - sent_users.append(recipient._id) - - return sent_users - - def store_emails(recipient_ids, notification_type, event, user, node, timestamp, abstract_provider=None, template=None, **context): """Store notification emails diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index 6a7c7cab3d9..9de4f342daf 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -19,7 +19,7 @@ RegistryError, ) from website.notifications.events import utils as event_utils -from osf.models import AbstractNode, NodeLog, Preprint +from osf.models import AbstractNode, NodeLog, Preprint, NotificationType from addons.base.signals import file_updated as signal @@ -278,12 +278,28 @@ def perform(self): ) # Move the document from one subscription to another because the old one isn't needed - utils.move_subscription(rm_users, self.event_type, self.source_node, self.event_type, self.node) + utils.move_subscription( + rm_users, + self.event_type, + self.source_node, + self.event_type, + self.node + ) + # Notify each user for notification in NOTIFICATION_TYPES: if notification == 'none': continue if moved[notification]: + NotificationType.objects.get( + name=NotificationType.Type.NODE_ADDON_FILE_MOVED, + ).emit( + user=self.user, + event_context={ + 'profile_image_url': self.profile_image_url, + 'url': self.url + } + ) emails.store_emails(moved[notification], notification, 'file_updated', self.user, self.node, self.timestamp, message=self.html_message, profile_image_url=self.profile_image_url, url=self.url) diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index ed9a936492f..c2e82c872db 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,22 +1,36 @@ import logging + +from osf import apps +from osf.models import NotificationType, Node from website.project.signals import contributor_added, project_created from framework.auth.signals import user_confirmed logger = logging.getLogger(__name__) @project_created.connect -def subscribe_creator(node): - if node.is_collection or node.is_deleted: +def subscribe_creator(resource): + if resource.is_collection or resource.is_deleted: return None from website.notifications.utils import subscribe_user_to_notifications - subscribe_user_to_notifications(node, node.creator) + subscribe_user_to_notifications(resource, resource.creator) @contributor_added.connect -def subscribe_contributor(node, contributor, auth=None, *args, **kwargs): +def subscribe_contributor(resource, contributor, auth=None, *args, **kwargs): from website.notifications.utils import subscribe_user_to_notifications - subscribe_user_to_notifications(node, contributor) + if isinstance(resource, Node) == 'osf.node': + if resource.is_collection or resource.is_deleted: + return None + subscribe_user_to_notifications(resource, contributor) @user_confirmed.connect def subscribe_confirmed_user(user): - from website.notifications.utils import subscribe_user_to_global_notifications - subscribe_user_to_global_notifications(user) + NotificationSubscription = apps.get_model('osf.NotificationSubscription') + user_events = [ + NotificationType.Type.USER_FILE_UPDATED, + NotificationType.Type.USER_REVIEWS, + ] + for user_event in user_events: + NotificationSubscription.objects.get_or_create( + user=user, + notification_type=user_event + ) diff --git a/website/notifications/utils.py b/website/notifications/utils.py index d9ceadfc39b..3b41c3435c0 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -5,7 +5,7 @@ from django.db.models import Q from framework.postcommit_tasks.handlers import run_postcommit -from osf.models import NotificationSubscription +from osf.models import NotificationSubscription, NotificationType from osf.utils.permissions import READ from website.notifications import constants from website.notifications.exceptions import InvalidSubscriptionError @@ -95,10 +95,10 @@ def remove_supplemental_node(node): @app.task(max_retries=5, default_retry_delay=60) def remove_subscription_task(node_id): AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') + NotificationSubscription = apps.get_model('osf.NotificationSubscription') node = AbstractNode.load(node_id) - NotificationSubscriptionLegacy.objects.filter(node=node).delete() + NotificationSubscription.objects.filter(node=node).delete() parent = node.parent_node if parent and parent.child_node_subscriptions: @@ -172,11 +172,11 @@ def move_subscription(remove_users, source_event, source_node, new_event, new_no :param new_node: Instance of Node :return: Returns a NOTIFICATION_TYPES list of removed users without permissions """ - NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') + NotificationSubscription = apps.get_model('osf.NotificationSubscription') OSFUser = apps.get_model('osf.OSFUser') if source_node == new_node: return - old_sub = NotificationSubscriptionLegacy.load(to_subscription_key(source_node._id, source_event)) + old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) if not old_sub: return elif old_sub: @@ -236,8 +236,8 @@ def check_project_subscriptions_are_all_none(user, node): def get_all_user_subscriptions(user, extra=None): """ Get all Subscription objects that the user is subscribed to""" - NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') - queryset = NotificationSubscriptionLegacy.objects.filter( + NotificationSubscription = apps.get_model('osf.NotificationSubscription') + queryset = NotificationSubscription.objects.filter( Q(none=user.pk) | Q(email_digest=user.pk) | Q(email_transactional=user.pk) @@ -391,14 +391,13 @@ def get_parent_notification_type(node, event, user): :return: str notification type (e.g. 'email_transactional') """ AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') if node and isinstance(node, AbstractNode) and node.parent_node and node.parent_node.has_permission(user, READ): parent = node.parent_node key = to_subscription_key(parent._id, event) try: - subscription = NotificationSubscriptionLegacy.objects.get(_id=key) - except NotificationSubscriptionLegacy.DoesNotExist: + subscription = NotificationSubscription.objects.get(_id=key) + except NotificationSubscription.DoesNotExist: return get_parent_notification_type(parent, event, user) for notification_type in constants.NOTIFICATION_TYPES: @@ -424,60 +423,25 @@ def get_global_notification_type(global_subscription, user): return notification_type -def check_if_all_global_subscriptions_are_none(user): - # This function predates comment mentions, which is a global_ notification that cannot be disabled - # Therefore, an actual check would never return True. - # If this changes, an optimized query would look something like: - # not NotificationSubscriptionLegacy.objects.filter(Q(event_name__startswith='global_') & (Q(email_digest=user.pk)|Q(email_transactional=user.pk))).exists() - return False - - -def subscribe_user_to_global_notifications(user): - NotificationSubscriptionLegacy = apps.get_model('osf.NotificationSubscriptionLegacy') - notification_type = 'email_transactional' - user_events = constants.USER_SUBSCRIPTIONS_AVAILABLE - for user_event in user_events: - user_event_id = to_subscription_key(user._id, user_event) - - # get_or_create saves on creation - subscription, created = NotificationSubscriptionLegacy.objects.get_or_create(_id=user_event_id, user=user, event_name=user_event) - subscription.add_user_to_subscription(user, notification_type) - subscription.save() - - def subscribe_user_to_notifications(node, user): """ Update the notification settings for the creator or contributors :param user: User to subscribe to notifications """ - Preprint = apps.get_model('osf.Preprint') - DraftRegistration = apps.get_model('osf.DraftRegistration') - if isinstance(node, Preprint): - raise InvalidSubscriptionError('Preprints are invalid targets for subscriptions at this time.') - - if isinstance(node, DraftRegistration): - raise InvalidSubscriptionError('DraftRegistrations are invalid targets for subscriptions at this time.') - - if node.is_collection: - raise InvalidSubscriptionError('Collections are invalid targets for subscriptions') - - if node.is_deleted: - raise InvalidSubscriptionError('Deleted Nodes are invalid targets for subscriptions') if getattr(node, 'is_registration', False): raise InvalidSubscriptionError('Registrations are invalid targets for subscriptions') if user.is_registered: - for event in ['node_file_updated',]: - subscription, _ = NotificationSubscription.objects.get_or_create( - user=user, - notification_type__name=event - ) - subscription, _ = NotificationSubscription.objects.get_or_create( - user=user, - notification_type__name=event, - object_id=node.id, - content_type=ContentType.objects.get_for_model(node) - ) + NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=NotificationType.Type.USER_FILE_UPDATED, + ) + NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=NotificationType.Type.NODE_FILE_UPDATED, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node) + ) def format_user_and_project_subscriptions(user): diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 0800afaf8ca..50ff050ceea 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -619,7 +619,7 @@ def check_email_throttle(node, contributor, throttle=None): return False # No previous sent notification, not throttled @contributor_added.connect -def notify_added_contributor(node, contributor, email_template, auth=None, *args, **kwargs): +def notify_added_contributor(node, contributor, notification_type, auth=None, *args, **kwargs): """Send a notification to a contributor who was just added to a node. Handles: @@ -631,15 +631,15 @@ def notify_added_contributor(node, contributor, email_template, auth=None, *args node (AbstractNode): The node to which the contributor was added. contributor (OSFUser): The user being added. auth (Auth, optional): Authorization context. - email_template (str, optional): Template identifier. + notification_type (str, optional): Template identifier. """ if check_email_throttle_claim_email(node, contributor): return - if email_template == 'false': + if not notification_type: return # Default values - notification_type = email_template or NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + notification_type = notification_type or NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT logo = settings.OSF_LOGO # Use match for notification type/logic @@ -659,13 +659,13 @@ def notify_added_contributor(node, contributor, email_template, auth=None, *args notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF logo = settings.OSF_PREPRINTS_LOGO else: - raise NotImplementedError(f'email_template: {email_template} not implemented.') + raise NotImplementedError(f'notification_type: {notification_type} not implemented.') NotificationType.objects.get(name=notification_type).emit( user=contributor, event_context={ 'user': contributor.id, - 'node': node.id, + 'node': node.title, 'referrer_name': getattr(getattr(auth, 'user', None), 'fullname', '') if auth else '', 'is_initiator': getattr(getattr(auth, 'user', None), 'id', None) == contributor.id if auth else False, 'all_global_subscriptions_none': False, From 2e98358973a9136f02b196421bcb90b2142de561 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Sun, 27 Jul 2025 19:10:49 -0400 Subject: [PATCH 126/176] remove child node subscription list --- api/subscriptions/serializers.py | 22 +--- ...e_abstractnode_child_node_subscriptions.py | 17 +++ osf/models/node.py | 4 - osf/models/notifications.py | 13 -- website/notifications/events/files.py | 115 +++--------------- website/notifications/listeners.py | 6 +- website/notifications/utils.py | 13 +- website/templates/emails/empty.html.mako | 1 - website/templates/emails/test.html.mako | 1 - 9 files changed, 48 insertions(+), 144 deletions(-) create mode 100644 osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py delete mode 100644 website/templates/emails/empty.html.mako delete mode 100644 website/templates/emails/test.html.mako diff --git a/api/subscriptions/serializers.py b/api/subscriptions/serializers.py index ede0782ae65..1b7e6449833 100644 --- a/api/subscriptions/serializers.py +++ b/api/subscriptions/serializers.py @@ -1,9 +1,7 @@ -from django.contrib.contenttypes.models import ContentType from rest_framework import serializers as ser from api.nodes.serializers import RegistrationProviderRelationshipField from api.collections_providers.fields import CollectionProviderRelationshipField from api.preprints.serializers import PreprintProviderRelationshipField -from osf.models import Node from website.util import api_v2_url @@ -23,7 +21,10 @@ class SubscriptionSerializer(JSONAPISerializer): help_text='The id of the subscription fixed for backward compatibility', ) event_name = ser.CharField(read_only=True) - frequency = FrequencyField(source='message_frequency', required=True) + frequency = FrequencyField( + source='message_frequency', + required=True, + ) class Meta: type_ = 'subscription' @@ -36,20 +37,7 @@ def get_absolute_url(self, obj): return obj.absolute_api_v2_url def update(self, instance, validated_data): - user = self.context['request'].user - frequency = validated_data.get('frequency') or 'none' - instance.message_frequency = frequency - - if frequency != 'none' and instance.content_type == ContentType.objects.get_for_model(Node): - node = Node.objects.get( - id=instance.id, - content_type=instance.content_type, - ) - user_subs = node.parent_node.child_node_subscriptions - if node._id not in user_subs.setdefault(user._id, []): - user_subs[user._id].append(node._id) - node.parent_node.save() - + instance.message_frequency = validated_data.get['frequency'] return instance diff --git a/osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py b/osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py new file mode 100644 index 00000000000..79bd4ec9243 --- /dev/null +++ b/osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.13 on 2025-07-27 23:06 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0033_delete_queuedmail'), + ] + + operations = [ + migrations.RemoveField( + model_name='abstractnode', + name='child_node_subscriptions', + ), + ] diff --git a/osf/models/node.py b/osf/models/node.py index f4fdb2c2122..4ea827e5b4a 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -318,10 +318,6 @@ class AbstractNode(DirtyFieldsMixin, TypedModel, AddonModelMixin, IdentifierMixi ) SELECT {fields} FROM "{nodelicenserecord}" WHERE id = (SELECT node_license_id FROM ascendants WHERE node_license_id IS NOT NULL) LIMIT 1;""") - # Dictionary field mapping user id to a list of nodes in node.nodes which the user has subscriptions for - # {: [, , ...] } - # TODO: Can this be a reference instead of data? - child_node_subscriptions = DateTimeAwareJSONField(default=dict, blank=True) _contributors = models.ManyToManyField(OSFUser, through=Contributor, related_name='nodes') diff --git a/osf/models/notifications.py b/osf/models/notifications.py index 41ec120b4ee..80703f1620f 100644 --- a/osf/models/notifications.py +++ b/osf/models/notifications.py @@ -70,12 +70,6 @@ def add_user_to_subscription(self, user, notification_type, save=True): if nt == notification_type: getattr(self, nt).add(user) - if notification_type != 'none' and isinstance(self.owner, Node) and self.owner.parent_node: - user_subs = self.owner.parent_node.child_node_subscriptions - if self.owner._id not in user_subs.setdefault(user._id, []): - user_subs[user._id].append(self.owner._id) - self.owner.parent_node.save() - if save: # Do not clean legacy objects self.save(clean=False) @@ -87,13 +81,6 @@ def remove_user_from_subscription(self, user, save=True): except ValueError: pass - if isinstance(self.owner, Node) and self.owner.parent_node: - try: - self.owner.parent_node.child_node_subscriptions.get(user._id, []).remove(self.owner._id) - self.owner.parent_node.save() - except ValueError: - pass - if save: self.save() diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index 9de4f342daf..aa8aca2f32b 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -9,16 +9,12 @@ from furl import furl import markupsafe -from website.notifications import emails -from website.notifications.constants import NOTIFICATION_TYPES -from website.notifications import utils from website.notifications.events.base import ( register, Event, event_registry, RegistryError, ) -from website.notifications.events import utils as event_utils from osf.models import AbstractNode, NodeLog, Preprint, NotificationType from addons.base.signals import file_updated as signal @@ -236,82 +232,17 @@ def perform(self): if self.node == self.source_node: super().perform() return - # File - if self.payload['destination']['kind'] != 'folder': - moved, warn, rm_users = event_utils.categorize_users( - self.user, - self.event_type, - self.source_node, - self.event_type, - self.node - ) - warn_message = f'{self.html_message} You are no longer tracking that file based on the settings you selected for the component.' - remove_message = ( - f'{self.html_message} Your subscription has been removed due to ' - 'insufficient permissions in the new component.' - ) - # Folder - else: - # Gets all the files in a folder to look for permissions conflicts - files = event_utils.get_file_subs_from_folder( - self.addon, - self.user, - self.payload['destination']['kind'], - self.payload['destination']['path'], - self.payload['destination']['name'] - ) - # Bins users into different permissions - moved, warn, rm_users = event_utils.compile_user_lists( - files, - self.user, - self.source_node, - self.node - ) - # For users that don't have individual file subscription but has permission on the new node - warn_message = f'{self.html_message} You are no longer tracking that folder or files within based on the settings you selected for the component.' - # For users without permission on the new node - remove_message = ( - f'{self.html_message} Your subscription has been removed for the ' - 'folder, or a file within, due to insufficient permissions in the new ' - 'component.' - ) - - # Move the document from one subscription to another because the old one isn't needed - utils.move_subscription( - rm_users, - self.event_type, - self.source_node, - self.event_type, - self.node + NotificationType.objects.get( + name=NotificationType.Type.NODE_ADDON_FILE_MOVED, + ).emit( + user=self.user, + event_context={ + 'profile_image_url': self.profile_image_url, + 'url': self.url + } ) - # Notify each user - for notification in NOTIFICATION_TYPES: - if notification == 'none': - continue - if moved[notification]: - NotificationType.objects.get( - name=NotificationType.Type.NODE_ADDON_FILE_MOVED, - ).emit( - user=self.user, - event_context={ - 'profile_image_url': self.profile_image_url, - 'url': self.url - } - ) - emails.store_emails(moved[notification], notification, 'file_updated', self.user, self.node, - self.timestamp, message=self.html_message, - profile_image_url=self.profile_image_url, url=self.url) - if warn[notification]: - emails.store_emails(warn[notification], notification, 'file_updated', self.user, self.node, - self.timestamp, message=warn_message, profile_image_url=self.profile_image_url, - url=self.url) - if rm_users[notification]: - emails.store_emails(rm_users[notification], notification, 'file_updated', self.user, self.source_node, - self.timestamp, message=remove_message, - profile_image_url=self.profile_image_url, url=self.source_url) - @register(NodeLog.FILE_COPIED) class AddonFileCopied(ComplexFileEvent): @@ -324,26 +255,16 @@ def perform(self): together because they both don't have a subscription to a newly copied file. """ - remove_message = self.html_message + ' You do not have permission in the new component.' if self.node == self.source_node: super().perform() return - if self.payload['destination']['kind'] != 'folder': - moved, warn, rm_users = event_utils.categorize_users(self.user, self.event_type, self.source_node, - self.event_type, self.node) - else: - files = event_utils.get_file_subs_from_folder(self.addon, self.user, self.payload['destination']['kind'], - self.payload['destination']['path'], - self.payload['destination']['name']) - moved, warn, rm_users = event_utils.compile_user_lists(files, self.user, self.source_node, self.node) - for notification in NOTIFICATION_TYPES: - if notification == 'none': - continue - if moved[notification] or warn[notification]: - users = list(set(moved[notification]).union(set(warn[notification]))) - emails.store_emails(users, notification, 'file_updated', self.user, self.node, self.timestamp, - message=self.html_message, profile_image_url=self.profile_image_url, url=self.url) - if rm_users[notification]: - emails.store_emails(rm_users[notification], notification, 'file_updated', self.user, self.source_node, - self.timestamp, message=remove_message, - profile_image_url=self.profile_image_url, url=self.source_url) + + NotificationType.objects.get( + name=NotificationType.Type.NODE_ADDON_FILE_MOVED, + ).emit( + user=self.user, + event_context={ + 'profile_image_url': self.profile_image_url, + 'url': self.url + } + ) diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index c2e82c872db..4447fa971d7 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,7 +1,6 @@ import logging from osf import apps -from osf.models import NotificationType, Node from website.project.signals import contributor_added, project_created from framework.auth.signals import user_confirmed @@ -17,7 +16,9 @@ def subscribe_creator(resource): @contributor_added.connect def subscribe_contributor(resource, contributor, auth=None, *args, **kwargs): from website.notifications.utils import subscribe_user_to_notifications - if isinstance(resource, Node) == 'osf.node': + from osf.models import Node + + if isinstance(resource, Node): if resource.is_collection or resource.is_deleted: return None subscribe_user_to_notifications(resource, contributor) @@ -25,6 +26,7 @@ def subscribe_contributor(resource, contributor, auth=None, *args, **kwargs): @user_confirmed.connect def subscribe_confirmed_user(user): NotificationSubscription = apps.get_model('osf.NotificationSubscription') + NotificationType = apps.get_model('osf.NotificationType') user_events = [ NotificationType.Type.USER_FILE_UPDATED, NotificationType.Type.USER_REVIEWS, diff --git a/website/notifications/utils.py b/website/notifications/utils.py index 3b41c3435c0..331b2162acf 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -96,16 +96,11 @@ def remove_supplemental_node(node): def remove_subscription_task(node_id): AbstractNode = apps.get_model('osf.AbstractNode') NotificationSubscription = apps.get_model('osf.NotificationSubscription') - node = AbstractNode.load(node_id) - NotificationSubscription.objects.filter(node=node).delete() - parent = node.parent_node - - if parent and parent.child_node_subscriptions: - for user_id in parent.child_node_subscriptions: - if node._id in parent.child_node_subscriptions[user_id]: - parent.child_node_subscriptions[user_id].remove(node._id) - parent.save() + NotificationSubscription.objects.filter( + object_id=node.id, + content_type=ContentType.objects.get_for_model(node), + ).delete() @run_postcommit(once_per_request=False, celery=True) diff --git a/website/templates/emails/empty.html.mako b/website/templates/emails/empty.html.mako deleted file mode 100644 index c78480affe2..00000000000 --- a/website/templates/emails/empty.html.mako +++ /dev/null @@ -1 +0,0 @@ -

    ${body}

    diff --git a/website/templates/emails/test.html.mako b/website/templates/emails/test.html.mako deleted file mode 100644 index da55c3f3af8..00000000000 --- a/website/templates/emails/test.html.mako +++ /dev/null @@ -1 +0,0 @@ -Hello

    ${name}

    From 38d7caab341c8a80d747262907622c8790bd40c4 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 28 Jul 2025 09:09:39 -0400 Subject: [PATCH 127/176] refactor and fix mailhog tests --- api/preprints/serializers.py | 4 +-- ...est_draft_registration_contributor_list.py | 6 ++--- .../views/test_draft_registration_list.py | 2 +- api_tests/mailhog/test_mailhog.py | 25 +++++++++++++------ notifications.yaml | 11 ++++++++ osf/models/node.py | 5 ++-- osf/models/notification.py | 3 +-- osf/models/notification_type.py | 7 +++--- osf/models/preprint.py | 2 +- osf/models/provider.py | 3 ++- osf/models/registrations.py | 4 +-- osf/models/user_message.py | 2 +- website/mails/mails.py | 8 ------ website/project/views/contributor.py | 12 ++++----- 14 files changed, 53 insertions(+), 41 deletions(-) diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index b5cdd0f1f93..c0e867510a5 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -48,7 +48,7 @@ Preprint, PreprintProvider, Node, - NodeLicense, + NodeLicense, NotificationType, ) from osf.utils import permissions as osf_permissions from osf.utils.workflows import DefaultStates @@ -468,7 +468,7 @@ def update(self, preprint, validated_data): preprint, contributor=author, auth=auth, - notification_type='preprint', + notification_type=NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT, ) return preprint diff --git a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py index 090993add28..232d3071da5 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_contributor_list.py @@ -239,7 +239,7 @@ def test_add_contributor_sends_email(self, app, user, user_two, url_project_cont ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + assert notifications[0]['type'] == NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT # Overrides TestNodeContributorCreateEmail def test_add_contributor_signal_if_default( @@ -282,7 +282,7 @@ def test_add_unregistered_contributor_sends_email(self, app, user, url_project_c ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + assert notifications[0]['type'] == NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT # Overrides TestNodeContributorCreateEmail def test_add_unregistered_contributor_signal_if_default(self, app, user, url_project_contribs): @@ -301,7 +301,7 @@ def test_add_unregistered_contributor_signal_if_default(self, app, user, url_pro ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + assert notifications[0]['type'] == NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT # Overrides TestNodeContributorCreateEmail def test_add_unregistered_contributor_without_email_no_email(self, app, user, url_project_contribs): diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index cc409555e10..ed1bc6192aa 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -435,7 +435,7 @@ def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_reg auth=user.auth ) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + assert notifications[0]['type'] == NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT assert notifications[0]['kwargs']['user'] == user def test_create_draft_with_provider( diff --git a/api_tests/mailhog/test_mailhog.py b/api_tests/mailhog/test_mailhog.py index 573df3a4fbe..fb9b8fba771 100644 --- a/api_tests/mailhog/test_mailhog.py +++ b/api_tests/mailhog/test_mailhog.py @@ -1,6 +1,5 @@ import requests import pytest -from django.core.mail import send_mail from waffle.testutils import override_switch from osf import features from website import settings @@ -14,7 +13,7 @@ ) from framework import auth from unittest import mock -from osf.models import OSFUser +from osf.models import OSFUser, NotificationType from tests.base import ( OsfTestCase, ) @@ -24,22 +23,32 @@ @pytest.mark.django_db class TestMailHog: + @mock.patch('website.mails.settings.ENABLE_TEST_EMAIL', True) def test_mailhog_received_mail(self): with override_switch(features.ENABLE_MAILHOG, active=True): mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' mailhog_v2 = f'{settings.MAILHOG_API_HOST}/api/v2/messages' requests.delete(mailhog_v1) - send_mail( - 'test email', - 'test message', - from_email=settings.OSF_CONTACT_EMAIL, - recipient_list=['to_addr@mail.com',] + NotificationType.objects.get( + name=NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL + ).emit( + user=None, + destination_address='to_addr@mail.com', + event_context={ + 'fullname': '', + 'osf_support_email': '', + 'count': 'US', + 'error': 'eooer', + } ) + res = requests.get(mailhog_v2).json() assert res['count'] == 1 assert res['items'][0]['Content']['Headers']['To'][0] == 'to_addr@mail.com' - assert res['items'][0]['Content']['Headers']['Subject'][0] == 'A test email to Mailhog' + assert res['items'][0]['Content']['Headers']['Subject'][0] == NotificationType.objects.get( + name=NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL + ).subject requests.delete(mailhog_v1) diff --git a/notifications.yaml b/notifications.yaml index 65ffa263181..6830c973bb4 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -131,6 +131,7 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/registration_bulk_upload_success_all.html.mako' - name: user_registration_bulk_upload_failure_all + subject: "Registrations Were Not Bulk Uploaded to your Community's Registry" __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/registration_bulk_upload_failure_all.html.mako' @@ -314,6 +315,10 @@ notification_types: __docs__: ... object_content_type_model_name: preprint template: 'website/templates/emails/withdrawal_request_declined.html.mako' + - name: preprint_contributor_added_default + __docs__: ... + object_content_type_model_name: preprint + template: 'website/templates/emails/contributor_added_preprints.html.mako' #### SUPPORT - name: crossref_error __docs__: ... @@ -381,3 +386,9 @@ notification_types: __docs__: ... object_content_type_model_name: desk template: 'website/templates/emails/support_request.html.mako' +### Draft Registration + - name: draft_registration_contributor_added_default + subject: 'You have a new registration draft.' + __docs__: ... + object_content_type_model_name: draftregistration + template: 'website/templates/emails/contributor_added_draft_registration.html.mako' diff --git a/osf/models/node.py b/osf/models/node.py index 4ea827e5b4a..44815d360c0 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -34,6 +34,7 @@ from framework.exceptions import PermissionsError, HTTPError from framework.sentry import log_exception from osf.exceptions import InvalidTagError, NodeStateError, TagNotFoundError, ValidationError +from osf.models.notification_type import NotificationType from .contributor import Contributor from .collection_submission import CollectionSubmission @@ -937,7 +938,7 @@ def contributors_and_group_members(self): @property def contributor_email_template(self): - return 'default' + return NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT @property def registrations_all(self): @@ -1335,7 +1336,7 @@ def subscribe_contributors_to_node(self): and send emails to users that they have been added to the project. (DraftNodes are hidden until registration). """ - from . import NotificationType + from osf.models.notification_type import NotificationType for user in self.contributors.filter(is_registered=True): perm = self.contributor_set.get(user=user).permission diff --git a/osf/models/notification.py b/osf/models/notification.py index fb9922078e4..228ee3e9d5a 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -31,8 +31,7 @@ def send( raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') recipient_address = destination_address or self.subscription.user.username - - if protocol_type == 'email' and settings.DEV_MODE and settings.ENABLE_TEST_EMAIL: + if protocol_type == 'email' and settings.ENABLE_TEST_EMAIL: email.send_email_over_smtp( recipient_address, self.subscription.notification_type, diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 34aee20a357..3df55aa987d 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -51,7 +51,6 @@ class Type(str, Enum): USER_FORGOT_PASSWORD = 'user_forgot_password' USER_FORGOT_PASSWORD_INSTITUTION = 'user_forgot_password_institution' USER_REQUEST_EXPORT = 'user_request_export' - USER_CONTRIBUTOR_ADDED_OSF_PREPRINT = 'user_contributor_added_osf_preprint' USER_DUPLICATE_ACCOUNTS_OSF4I = 'user_duplicate_accounts_osf4i' USER_EXTERNAL_LOGIN_LINK_SUCCESS = 'user_external_login_link_success' USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL = 'user_registration_bulk_upload_failure_all' @@ -63,12 +62,9 @@ class Type(str, Enum): USER_ARCHIVE_JOB_COPY_ERROR = 'user_archive_job_copy_error' USER_ARCHIVE_JOB_FILE_NOT_FOUND = 'user_archive_job_file_not_found' USER_COMMENT_REPLIES = 'user_comment_replies' - USER_COMMENTS = 'user_comments' USER_FILE_UPDATED = 'user_file_updated' - USER_COMMENT_MENTIONS = 'user_mentions' USER_REVIEWS = 'user_reviews' USER_PASSWORD_RESET = 'user_password_reset' - USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = 'user_contributor_added_draft_registration' USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE = 'user_external_login_confirm_email_create' USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK = 'user_external_login_email_confirm_link' USER_CONFIRM_MERGE = 'user_confirm_merge' @@ -133,6 +129,7 @@ class Type(str, Enum): PREPRINT_REQUEST_WITHDRAWAL_APPROVED = 'preprint_request_withdrawal_approved' PREPRINT_REQUEST_WITHDRAWAL_DECLINED = 'preprint_request_withdrawal_declined' PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'preprint_contributor_added_preprint_node_from_osf' + PREPRINT_CONTRIBUTOR_ADDED_DEFAULT = 'preprint_contributor_added_default' PREPRINT_PENDING_RETRACTION_ADMIN = 'preprint_pending_retraction_admin' # Collections Submission notifications @@ -146,6 +143,8 @@ class Type(str, Enum): REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = 'registration_bulk_upload_failure_duplicates' + DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT = 'draft_registration_contributor_added_default' + @property def instance(self): obj, created = NotificationType.objects.get_or_create(name=self.value) diff --git a/osf/models/preprint.py b/osf/models/preprint.py index b6c864bcf83..150987ca893 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -589,7 +589,7 @@ def osfstorage_region(self): @property def contributor_email_template(self): - return 'preprint' + return NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT @property def file_read_scope(self): diff --git a/osf/models/provider.py b/osf/models/provider.py index 38b0affb035..3bfa846b687 100644 --- a/osf/models/provider.py +++ b/osf/models/provider.py @@ -15,7 +15,8 @@ from guardian.models import GroupObjectPermissionBase, UserObjectPermissionBase from framework import sentry -from . import NotificationType, NotificationSubscription +from osf.models.notification_type import NotificationType +from osf.models.notification_subscription import NotificationSubscription from .base import BaseModel, TypedObjectIDMixin from .mixins import ReviewProviderMixin from .brand import Brand diff --git a/osf/models/registrations.py b/osf/models/registrations.py index e4f4106b563..f08fefe83b9 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -21,7 +21,7 @@ from osf.exceptions import NodeStateError, DraftRegistrationStateError from osf.external.internet_archive.tasks import archive_to_ia, update_ia_metadata from osf.metrics import RegistriesModerationMetrics -from . import NotificationType +from osf.models.notification_type import NotificationType from .action import RegistrationAction from .archive import ArchiveJob from .contributor import DraftRegistrationContributor @@ -1197,7 +1197,7 @@ def visible_contributors(self): @property def contributor_email_template(self): # Override for ContributorMixin - return 'draft_registration' + return NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT @property def institutions_url(self): diff --git a/osf/models/user_message.py b/osf/models/user_message.py index 126e4be5bd6..e66ea395a52 100644 --- a/osf/models/user_message.py +++ b/osf/models/user_message.py @@ -3,7 +3,7 @@ from django.db.models.signals import post_save from django.dispatch import receiver -from . import NotificationType +from osf.models.notification_type import NotificationType from .base import BaseModel, ObjectIDMixin diff --git a/website/mails/mails.py b/website/mails/mails.py index 0dec8be81a2..47e0edf3d28 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -165,10 +165,6 @@ def get_english_article(word): 'contributor_added_preprint_node_from_osf', subject='You have been added as a contributor to an OSF project.' ) -CONTRIBUTOR_ADDED_DRAFT_REGISTRATION = Mail( - 'contributor_added_draft_registration', - subject='You have a new registration draft.' -) MODERATOR_ADDED = lambda provider: Mail( 'moderator_added', subject=f'You have been added as a moderator for {provider.name}' @@ -378,10 +374,6 @@ def get_english_article(word): subject='Some Registrations Successfully Bulk Uploaded to your Community\'s Registry' ) -REGISTRATION_BULK_UPLOAD_FAILURE_ALL = Mail( - 'registration_bulk_upload_failure_all', - subject='Registrations Were Not Bulk Uploaded to your Community\'s Registry' -) REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = Mail( 'registration_bulk_upload_failure_duplicates', diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 50ff050ceea..f63b5f71317 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -476,7 +476,7 @@ def send_claim_email( node, notify=True, throttle=24 * 3600, - email_template='default' + notification_type='default' ): """ Send a claim email to an unregistered contributor or the referrer, depending on the scenario. @@ -487,7 +487,7 @@ def send_claim_email( node (Node): The node where the user claimed their account. notify (bool): Whether to notify the invited user about their pending verification. throttle (int): Throttle period (in seconds) to prevent repeated emails. - email_template (str): The email template identifier. + notification_type (str): The notification_type identifier. Returns: str: The address the notification was sent to. Raises: @@ -502,7 +502,7 @@ def send_claim_email( # Option 1: Referrer provided name and email (send to claimer) if unclaimed_record.get('email') == claimer_email: # Select notification type and logo using match - match email_template: + match notification_type: case 'preprint': if getattr(node.provider, 'is_default', False): notification_type = NotificationType.Type.USER_INVITE_OSF_PREPRINT @@ -511,7 +511,7 @@ def send_claim_email( notification_type = NotificationType.Type.PROVIDER_USER_INVITE_PREPRINT logo = getattr(node.provider, '_id', None) case 'draft_registration': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + notification_type = NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT case _: notification_type = NotificationType.Type.USER_INVITE_DEFAULT @@ -646,9 +646,9 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a if notification_type == 'default': notification_type = NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT elif notification_type == 'preprint': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT elif notification_type == 'draft_registration': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION + notification_type = NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT elif notification_type == 'access': notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST elif notification_type == 'access_request': From ddd78d4bf227191aa22d5e7c65b6b623be711385 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 28 Jul 2025 09:31:12 -0400 Subject: [PATCH 128/176] refactor add_contributors to use notification_type --- api/nodes/serializers.py | 42 +++- .../views/test_node_contributors_list.py | 25 +- api_tests/requests/mixins.py | 3 +- .../test_node_request_institutional_access.py | 8 +- .../views/test_request_actions_create.py | 2 +- notifications.yaml | 21 ++ osf/models/mixins.py | 80 +++--- osf/models/node.py | 5 - osf/models/preprint.py | 4 - osf/models/registrations.py | 7 +- osf/utils/machines.py | 9 +- .../test_institutional_admin_contributors.py | 6 +- osf_tests/test_merging_users.py | 4 - tests/base.py | 4 - tests/test_adding_contributor_views.py | 8 +- tests/test_events.py | 236 +++++------------- website/mails/mails.py | 33 --- website/notifications/constants.py | 4 - website/notifications/listeners.py | 20 +- website/notifications/utils.py | 78 +++--- website/project/views/contributor.py | 23 +- 21 files changed, 263 insertions(+), 359 deletions(-) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 8725479456d..0dd8c8e69cb 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -28,7 +28,7 @@ from django.core.exceptions import ValidationError from framework.auth.core import Auth from framework.exceptions import PermissionsError -from osf.models import Tag, CollectionSubmission +from osf.models import Tag, CollectionSubmission, NotificationType, OSFUser from rest_framework import serializers as ser from rest_framework import exceptions from addons.base.exceptions import InvalidAuthError, InvalidFolderError @@ -1246,22 +1246,40 @@ def create(self, validated_data): auth = Auth(self.context['request'].user) full_name = validated_data.get('full_name') bibliographic = validated_data.get('bibliographic') - send_email = self.context['request'].GET.get('send_email') or self.context['default_email'] + email_preference = self.context['request'].GET.get('send_email') or self.context['default_email'] permissions = self.get_proposed_permissions(validated_data) - self.validate_data(node, user_id=id, full_name=full_name, email=email, index=index) + self.validate_data( + node, + user_id=id, + full_name=full_name, + email=email, + index=index, + ) - if send_email not in self.email_preferences: - raise exceptions.ValidationError(detail=f'{send_email} is not a valid email preference.') + if email_preference not in self.email_preferences: + raise exceptions.ValidationError(detail=f'{email_preference} is not a valid email preference.') - try: - contributor_dict = { - 'auth': auth, 'user_id': id, 'email': email, 'full_name': full_name, 'send_email': send_email, - 'bibliographic': bibliographic, 'index': index, - } + contributor = OSFUser.load(id) + if email or (contributor and contributor.is_registered): + notification_type = { + 'false': False, + 'default': NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + }[email_preference] + else: + notification_type = False - contributor_dict['permissions'] = permissions - contributor_obj = node.add_contributor_registered_or_not(**contributor_dict) + try: + contributor_obj = node.add_contributor_registered_or_not( + auth=auth, + user_id=id, + email=email, + full_name=full_name, + notification_type=notification_type, + bibliographic=bibliographic, + index=index, + permissions=permissions, + ) except ValidationError as e: raise exceptions.ValidationError(detail=e.messages[0]) except ValueError as e: diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index 4d29857676d..6983307b1fc 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -1218,15 +1218,20 @@ def url_project_contribs(self, project_public): def test_add_contributor_no_email_if_false( self, app, user, url_project_contribs ): - url = f'{url_project_contribs}?send_email=false' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': {'full_name': 'Kanye West', 'email': 'kanye@west.com'}, - } - } with capture_notifications() as notifications: - res = app.post_json_api(url, payload, auth=user.auth) + res = app.post_json_api( + f'{url_project_contribs}?send_email=false', + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Jason Kelece', + 'email': 'jason@kelece.com' + }, + } + }, + auth=user.auth + ) assert not notifications assert res.status_code == 201 @@ -1267,7 +1272,7 @@ def test_add_contributor_signal_if_default( res = app.post_json_api(url, payload, auth=user.auth) args, kwargs = mock_send.call_args assert res.status_code == 201 - assert 'default' == kwargs['email_template'] + assert NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT == kwargs['notification_type'] def test_add_contributor_signal_preprint_email_disallowed( self, app, user, user_two, url_project_contribs @@ -1370,7 +1375,7 @@ def test_add_unregistered_contributor_without_email_no_email(self, app, user, ur 'data': { 'type': 'contributors', 'attributes': { - 'full_name': 'Kanye West', + 'full_name': 'Jason Kelece', }, } } diff --git a/api_tests/requests/mixins.py b/api_tests/requests/mixins.py index 4b281b0862d..a32321c7836 100644 --- a/api_tests/requests/mixins.py +++ b/api_tests/requests/mixins.py @@ -1,5 +1,6 @@ import pytest +from osf.models import NotificationType from osf.utils.workflows import DefaultStates, RequestTypes from osf_tests.factories import ( AuthUserFactory, @@ -37,7 +38,7 @@ def project(self, admin, write_contrib): proj.add_contributor( contributor=write_contrib, permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS, - send_email='access_request', + notification_type=NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST, save=True ) return proj diff --git a/api_tests/requests/views/test_node_request_institutional_access.py b/api_tests/requests/views/test_node_request_institutional_access.py index d41b7639f05..d5df5c966f7 100644 --- a/api_tests/requests/views/test_node_request_institutional_access.py +++ b/api_tests/requests/views/test_node_request_institutional_access.py @@ -249,7 +249,7 @@ def test_email_sent_on_creation( with capture_notifications() as notifications: res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST assert res.status_code == 201 def test_bcc_institutional_admin( @@ -269,7 +269,7 @@ def test_bcc_institutional_admin( with capture_notifications() as notifications: res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST assert res.status_code == 201 def test_reply_to_institutional_admin( @@ -289,7 +289,7 @@ def test_reply_to_institutional_admin( with capture_notifications() as notifications: res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST assert res.status_code == 201 def test_access_requests_disabled_raises_permission_denied( @@ -325,7 +325,7 @@ def test_placeholder_text_when_comment_is_empty( with capture_notifications() as notifications: res = app.post_json_api(url, create_payload, auth=institutional_admin.auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST assert res.status_code == 201 def test_requester_can_resubmit(self, app, project, institutional_admin, url, create_payload): diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index ff277ac0233..a8b71da01f4 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -199,7 +199,7 @@ def test_email_sent_on_approve(self, app, admin, url, node_request): with capture_notifications() as notifications: res = app.post_json_api(url, payload, auth=admin.auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT assert res.status_code == 201 node_request.reload() assert initial_state != node_request.machine_state diff --git a/notifications.yaml b/notifications.yaml index 6830c973bb4..c2edd80d0aa 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -160,6 +160,11 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/spam_user_banned.html.mako' + - name: user_file_updated + subject: 'Your ${action} has finished' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/file_operation_success.html.mako' - name: user_file_operation_success subject: 'Your ${action} has finished' __docs__: ... @@ -170,6 +175,10 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/file_operation_failed.html.mako' + - name: user_reviews + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/file_operation_failed.html.mako' #### PROVIDER - name: provider_new_pending_submissions @@ -189,6 +198,7 @@ notification_types: object_content_type_model_name: abstractprovider template: 'website/templates/emails/reviews_submission_confirmation.html.mako' - name: provider_confirm_email_moderation + subject: 'OSF Account Verification, {provider.name}' __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/confirm_moderation.html.mako' @@ -210,6 +220,10 @@ notification_types: __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' + - name: node_files_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' - name: node_file_added __docs__: ... object_content_type_model_name: abstractnode @@ -326,30 +340,37 @@ notification_types: template: 'website/templates/emails/new_pending_submissions.html.mako' #### Collection Submissions - name: collection_submission_removed_moderator + subject: f'{node.title} was removed from {collection.title}' __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - name: collection_submission_removed_private + subject: '{node.title} was removed from {collection.title}' __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - name: collection_submission_removed_admin + subject: '{node.title} was removed from {collection.title}' __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - name: collection_submission_submitted + subject: f'{submitter.fullname} has requested to add {node.title} to a collection' __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - name: collection_submission_cancel + subject: 'Request to add {node.title} to {collection.title} was canceled' __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - name: collection_submission_accepted + subject: '{node.title} was accepted into {collection.title}' __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' - name: collection_submission_rejected + subject: '{node.title} was not accepted into {collection.title}' __docs__: ... object_content_type_model_name: collectionsubmission template: 'website/templates/emails/new_pending_submissions.html.mako' diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 33463e09924..be8058150fe 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -27,6 +27,7 @@ BlockedEmailError, ) from osf.models.notification_type import NotificationType +from osf.models.notification_subscription import NotificationSubscription from .node_relation import NodeRelation from .nodelog import NodeLog from .subject import Subject @@ -1076,9 +1077,12 @@ def add_to_group(self, user, group): else: raise TypeError(f"Unsupported group type: {type(group)}") - # Add default notification subscription - for subscription in self.DEFAULT_SUBSCRIPTIONS: - self.add_user_to_subscription(user, f'{self._id}_{subscription}') + NotificationSubscription.objects.get_or_create( + user=user, + notification_type=NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + ) + ) def remove_from_group(self, user, group, unsubscribe=True): _group = self.get_group(group) @@ -1092,14 +1096,10 @@ def remove_from_group(self, user, group, unsubscribe=True): return _group.user_set.remove(user) - def add_user_to_subscription(self, user, subscription_id): - notification = self.notification_subscriptions.get(_id=subscription_id) - user_id = user.id - is_subscriber = notification.none.filter(id=user_id).exists() \ - or notification.email_digest.filter(id=user_id).exists() \ - or notification.email_transactional.filter(id=user_id).exists() - if not is_subscriber: - notification.add_user_to_subscription(user, 'email_transactional', save=True) + def add_user_to_subscription(self, user, subscription): + subscription.objects.get_or_create( + user=user, + ) def remove_user_from_subscription(self, user, subscription_id): notification = self.notification_subscriptions.get(_id=subscription_id) @@ -1308,11 +1308,6 @@ def order_by_contributor_field(self): # 'contributor___order', for example raise NotImplementedError() - @property - def contributor_email_template(self): - # default contributor email template as a string - raise NotImplementedError() - def get_addons(self): raise NotImplementedError() @@ -1394,24 +1389,42 @@ def _get_admin_contributors_query(self, users, require_active=True): qs = qs.filter(user__is_active=True) return qs - def add_contributor(self, contributor, permissions=None, visible=True, - send_email=None, auth=None, log=True, save=False, make_curator=False): + def add_contributor( + self, + contributor, + permissions=None, + visible=True, + notification_type=None, + auth=None, + log=True, + save=False, + make_curator=False + ): """Add a contributor to the project. :param User contributor: The contributor to be added :param list permissions: Permissions to grant to the contributor. Array of all permissions if node, highest permission to grant, if contributor, as a string. :param bool visible: Contributor is visible in project dashboard - :param str send_email: Email preference for notifying added contributor + :param str notification_type: Email preference for notifying added contributor :param Auth auth: All the auth information including user, API key :param bool log: Add log to self :param bool save: Save after adding contributor :param bool make_curator indicates whether the user should be an institutional curator :returns: Whether contributor was added """ - send_email = send_email or self.contributor_email_template # If user is merged into another account, use master account contrib_to_add = contributor.merged_by if contributor.is_merged else contributor + if notification_type is None: + from osf.models import AbstractNode, Preprint, DraftRegistration + + if isinstance(self, AbstractNode): + notification_type = NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + elif isinstance(self, Preprint): + notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT + elif isinstance(self, DraftRegistration): + notification_type = NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT + if contrib_to_add.is_disabled: raise ValidationValueError('Deactivated users cannot be added as contributors.') @@ -1465,7 +1478,7 @@ def add_contributor(self, contributor, permissions=None, visible=True, self, contributor=contributor, auth=auth, - notification_type=send_email, + notification_type=notification_type, permissions=permissions ) @@ -1513,7 +1526,7 @@ def add_unregistered_contributor( fullname, email, auth, - send_email=None, + notification_type=None, visible=True, permissions=None, existing_user=None @@ -1528,7 +1541,6 @@ def add_unregistered_contributor( :raises: DuplicateEmailError if user with given email is already in the database. """ OSFUser = apps.get_model('osf.OSFUser') - send_email = send_email or self.contributor_email_template if email: try: @@ -1568,7 +1580,7 @@ def add_unregistered_contributor( permissions=permissions, auth=auth, visible=visible, - send_email=send_email, + notification_type=notification_type, log=True, save=False ) @@ -1581,13 +1593,11 @@ def add_contributor_registered_or_not(self, user_id=None, full_name=None, email=None, - send_email=None, + notification_type=None, permissions=None, bibliographic=True, index=None): OSFUser = apps.get_model('osf.OSFUser') - send_email = send_email or self.contributor_email_template - if user_id: contributor = OSFUser.load(user_id) if not contributor: @@ -1602,7 +1612,7 @@ def add_contributor_registered_or_not(self, auth=auth, visible=bibliographic, permissions=permissions, - send_email=send_email, + notification_type=notification_type, save=True ) else: @@ -1615,7 +1625,7 @@ def add_contributor_registered_or_not(self, fullname=full_name, email=contributor.username, auth=auth, - send_email=send_email, + notification_type=notification_type, permissions=permissions, visible=bibliographic, existing_user=contributor, @@ -1627,14 +1637,20 @@ def add_contributor_registered_or_not(self, raise ValidationValueError(f'{contributor.fullname} is already a contributor.') if contributor and contributor.is_registered: - self.add_contributor(contributor=contributor, auth=auth, visible=bibliographic, - send_email=send_email, permissions=permissions, save=True) + self.add_contributor( + contributor=contributor, + auth=auth, + visible=bibliographic, + notification_type=notification_type, + permissions=permissions, + save=True + ) else: contributor = self.add_unregistered_contributor( fullname=full_name, email=email, auth=auth, - send_email=send_email, + notification_type=notification_type, permissions=permissions, visible=bibliographic ) diff --git a/osf/models/node.py b/osf/models/node.py index 44815d360c0..d6b35c81335 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -34,7 +34,6 @@ from framework.exceptions import PermissionsError, HTTPError from framework.sentry import log_exception from osf.exceptions import InvalidTagError, NodeStateError, TagNotFoundError, ValidationError -from osf.models.notification_type import NotificationType from .contributor import Contributor from .collection_submission import CollectionSubmission @@ -936,10 +935,6 @@ def contributors_and_group_members(self): """ return self.get_users_with_perm(READ) - @property - def contributor_email_template(self): - return NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - @property def registrations_all(self): """For v1 compat.""" diff --git a/osf/models/preprint.py b/osf/models/preprint.py index 150987ca893..a2415643e2a 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -587,10 +587,6 @@ def root_folder(self): def osfstorage_region(self): return self.region - @property - def contributor_email_template(self): - return NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT - @property def file_read_scope(self): return oauth_scopes.CoreScopes.PREPRINT_FILE_READ diff --git a/osf/models/registrations.py b/osf/models/registrations.py index f08fefe83b9..1ef8689643f 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -1194,11 +1194,6 @@ def visible_contributors(self): draftregistrationcontributor__visible=True ).order_by(self.order_by_contributor_field) - @property - def contributor_email_template(self): - # Override for ContributorMixin - return NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT - @property def institutions_url(self): # For NodeInstitutionsRelationshipSerializer @@ -1317,7 +1312,7 @@ def create_from_node(cls, user, schema, node=None, data=None, provider=None): draft, contributor=user, auth=None, - notification_type=NotificationType.Type.USER_CONTRIBUTOR_ADDED_DRAFT_REGISTRATION, + notification_type=NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT, permissions=initiator_permissions ) diff --git a/osf/utils/machines.py b/osf/utils/machines.py index d713f5264c0..ce92f81db3b 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -222,13 +222,20 @@ def save_changes(self, ev): contributor_permissions = ev.kwargs.get('permissions', self.machineable.requested_permissions) make_curator = self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST.value visible = False if make_curator else ev.kwargs.get('visible', True) + if self.machineable.request_type == NodeRequestTypes.ACCESS: + notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST + elif self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST: + notification_type = NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST + else: + notification_type = None + try: self.machineable.target.add_contributor( self.machineable.creator, auth=Auth(ev.kwargs['user']), permissions=contributor_permissions, visible=visible, - send_email=self.machineable.request_type, + notification_type=notification_type, make_curator=make_curator, ) except IntegrityError as e: diff --git a/osf_tests/test_institutional_admin_contributors.py b/osf_tests/test_institutional_admin_contributors.py index 62d4205eeb2..bf56ba3fcb6 100644 --- a/osf_tests/test_institutional_admin_contributors.py +++ b/osf_tests/test_institutional_admin_contributors.py @@ -142,7 +142,7 @@ def test_requested_permissions_or_default(self, app, project, institutional_admi auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - send_email='access', + notification_type='access', make_curator=False, ) @@ -168,7 +168,7 @@ def test_permissions_override_requested_permissions(self, app, project, institut auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - send_email='access', + notification_type='access', make_curator=False, ) @@ -194,6 +194,6 @@ def test_requested_permissions_is_used(self, app, project, institutional_admin): auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - send_email='access', + notification_type='access', make_curator=False, ) diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index 2a7400bd40d..e51e922ec62 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -6,8 +6,6 @@ from framework.celery_tasks import handlers from website import settings -from website.project.signals import contributor_added -from website.project.views.contributor import notify_added_contributor from website.util.metrics import OsfSourceTags from framework.auth import Auth @@ -288,8 +286,6 @@ def test_merge_unregistered(self): assert self.user in self.project_with_unreg_contrib.contributors def test_merge_doesnt_send_signal(self): - #Explictly reconnect signal as it is disconnected by default for test - contributor_added.connect(notify_added_contributor) other_user = UserFactory() with capture_notifications() as notifications: with override_flag(ENABLE_GV, active=True): diff --git a/tests/base.py b/tests/base.py index b308b9dca17..1eacefc066d 100644 --- a/tests/base.py +++ b/tests/base.py @@ -21,10 +21,6 @@ from osf.models import RegistrationSchema from website import settings from website.app import init_app -from website.notifications.listeners import (subscribe_contributor, - subscribe_creator) -from website.project.signals import contributor_added, project_created -from website.project.views.contributor import notify_added_contributor from website.signals import ALL_SIGNALS from .json_api_test_app import JSONAPITestApp diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 30e38b3425a..62e84e916fc 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -331,12 +331,12 @@ def test_notify_contributor_email_does_not_send_before_throttle_expires(self): project = ProjectFactory() auth = Auth(project.creator) with capture_notifications() as notifications: - notify_added_contributor(project, contributor, 'default', auth) + notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth) assert len(notifications) == 1 # 2nd call does not send email because throttle period has not expired with capture_notifications() as notifications: - notify_added_contributor(project, contributor, 'default', auth) + notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth) assert not notifications def test_notify_contributor_email_sends_after_throttle_expires(self): @@ -346,13 +346,13 @@ def test_notify_contributor_email_sends_after_throttle_expires(self): project = ProjectFactory() auth = Auth(project.creator) with capture_notifications() as notifications: - notify_added_contributor(project, contributor, 'default', auth, throttle=throttle) + notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) assert len(notifications) == 1 assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT time.sleep(1) # throttle period expires with capture_notifications() as notifications: - notify_added_contributor(project, contributor, 'default', auth, throttle=throttle) + notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) assert len(notifications) == 2 assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT assert notifications[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT diff --git a/tests/test_events.py b/tests/test_events.py index 812eb1608a1..bd79036b384 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -3,10 +3,10 @@ from unittest import mock from django.contrib.contenttypes.models import ContentType -from pytest import raises from osf.models import NotificationType -from website.notifications.events.base import Event, register, event_registry +from tests.utils import capture_notifications +from website.notifications.events.base import event_registry from website.notifications.events.files import ( FileAdded, FileRemoved, FolderCreated, FileUpdated, AddonFileCopied, AddonFileMoved, AddonFileRenamed, @@ -136,9 +136,9 @@ def setUp(self): self.project = factories.ProjectFactory(creator=self.user_1) # subscription self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + 'file_updated', - owner=self.project, - event_name='file_updated', + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) ) self.sub.save() self.event = event_registry['file_updated'](self.user_2, self.project, 'file_updated', payload=file_payload) @@ -162,9 +162,9 @@ def setUp(self): self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() self.project_subscription = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) ) self.project_subscription.save() self.user2 = factories.UserFactory() @@ -189,8 +189,9 @@ def setUp(self): self.consolidate_auth = Auth(user=self.user) self.project = factories.ProjectFactory() self.project_subscription = factories.NotificationSubscriptionFactory( - user=self.user, - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_ADDED), + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_REMOVED) ) self.project_subscription.object_id = self.project.id self.project_subscription.content_type = ContentType.objects.get_for_model(self.project) @@ -226,7 +227,7 @@ def setUp(self): self.project = factories.ProjectFactory() self.project_subscription = factories.NotificationSubscriptionFactory( user=self.user, - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_ADDED), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED), ) self.project_subscription.save() self.user2 = factories.UserFactory() @@ -254,9 +255,10 @@ def setUp(self): self.project = factories.ProjectFactory(creator=self.user_1) # subscription self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + 'file_updated', - owner=self.project, - event_name='file_updated', + user=self.user_2, + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.USER_FILE_UPDATED) ) self.sub.save() @@ -266,7 +268,6 @@ def setUp(self): self.user_1, self.project, 'addon_file_renamed', payload=file_renamed_payload ) - self.sub.email_digest.add(self.user_2) self.sub.save() def test_rename_file_html(self): @@ -308,26 +309,23 @@ def setUp(self): # Subscriptions # for parent node self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) ) self.sub.save() # for private node self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - owner=self.private_node, - event_name='file_updated' + object_id=self.private_node.id, + content_type=ContentType.objects.get_for_model(self.private_node), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) ) self.private_sub.save() # for file subscription self.file_sub = factories.NotificationSubscriptionFactory( - _id='{pid}_{wbid}_file_updated'.format( - pid=self.project._id, - wbid=self.event.waterbutler_id - ), - owner=self.project, - event_name='xyz42_file_updated' + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILES_UPDATED) ) self.file_sub.save() @@ -340,42 +338,51 @@ def test_info_formed_correct(self): def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications - self.sub.email_digest.add(self.user_2) + # self.sub.email_digest.add(self.user_2) self.sub.save() - self.event.perform() - assert 0 == mock_store.call_count + with capture_notifications() as notifications: + self.event.perform() + assert not notifications def test_perform_store_called_once(self): - self.sub.email_transactional.add(self.user_1) + # self.sub.email_transactional.add(self.user_1) self.sub.save() - self.event.perform() - assert 1 == mock_store.call_count + with capture_notifications() as notifications: + self.event.perform() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_ADDON_FILE_MOVED def test_perform_store_one_of_each(self): # Move Event: Tests that store_emails is called 3 times, one in # each category - self.sub.email_transactional.add(self.user_1) + # self.sub.email_transactional.add(self.user_1) self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.private_node.save() - self.sub.email_digest.add(self.user_3) + # self.sub.email_digest.add(self.user_3) self.sub.save() self.project.add_contributor(self.user_4, permissions=WRITE, auth=self.auth) self.project.save() - self.file_sub.email_digest.add(self.user_4) + # self.file_sub.email_digest.add(self.user_4) self.file_sub.save() - self.event.perform() - assert 3 == mock_store.call_count + with capture_notifications() as notifications: + self.event.perform() + assert len(notifications) == 3 + assert notifications[0]['type'] == NotificationType.Type.NODE_FILE_UPDATED + assert notifications[1]['type'] == NotificationType.Type.NODE_FILE_UPDATED + assert notifications[2]['type'] == NotificationType.Type.NODE_FILE_UPDATED def test_remove_user_sent_once(self): # Move Event: Tests removed user is removed once. Regression self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() - self.file_sub.email_digest.add(self.user_3) + # self.file_sub.email_digest.add(self.user_3) self.file_sub.save() - self.event.perform() - assert 1 == mock_store.call_count + with capture_notifications() as notifications: + self.event.perform() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_ADDON_FILE_MOVED class TestFileCopied(OsfTestCase): @@ -398,26 +405,23 @@ def setUp(self): # Subscriptions # for parent node self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) ) self.sub.save() # for private node self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - owner=self.private_node, - event_name='file_updated' + object_id=self.private_node.id, + content_type=ContentType.objects.get_for_model(self.private_node), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) ) self.private_sub.save() # for file subscription self.file_sub = factories.NotificationSubscriptionFactory( - _id='{pid}_{wbid}_file_updated'.format( - pid=self.project._id, - wbid=self.event.waterbutler_id - ), - owner=self.project, - event_name='xyz42_file_updated' + object_id=self.project.id, + content_type=ContentType.objects.get_for_model(self.project), + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILES_UPDATED) ) self.file_sub.save() @@ -434,133 +438,31 @@ def test_info_correct(self): def test_copied_one_of_each(self): # Copy Event: Tests that store_emails is called 2 times, two with # permissions, one without - self.sub.email_transactional.add(self.user_1) + # self.sub.email_transactional.add(self.user_1) self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.private_node.save() - self.sub.email_digest.add(self.user_3) + # self.sub.email_digest.add(self.user_3) self.sub.save() self.project.add_contributor(self.user_4, permissions=WRITE, auth=self.auth) self.project.save() - self.file_sub.email_digest.add(self.user_4) + # self.file_sub.email_digest.add(self.user_4) self.file_sub.save() - self.event.perform() - assert 2 == mock_store.call_count + with capture_notifications() as notifications: + self.event.perform() + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.NODE_FILE_UPDATED + assert notifications[1]['type'] == NotificationType.Type.NODE_FILE_UPDATED def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications - self.sub.email_digest.add(self.user_2) + # self.sub.email_digest.add(self.user_2) self.sub.save() - self.event.perform() - assert 0 == mock_store.call_count - - -class TestCategorizeUsers(OsfTestCase): - def setUp(self): - super().setUp() - self.user_1 = factories.AuthUserFactory() - self.auth = Auth(user=self.user_1) - self.user_2 = factories.AuthUserFactory() - self.user_3 = factories.AuthUserFactory() - self.user_4 = factories.AuthUserFactory() - self.project = factories.ProjectFactory(creator=self.user_1) - self.private_node = factories.NodeFactory( - parent=self.project, is_public=False, creator=self.user_1 - ) - # Payload - file_moved_payload = file_move_payload(self.private_node, self.project) - self.event = event_registry['addon_file_moved']( - self.user_2, self.private_node, 'addon_file_moved', - payload=file_moved_payload - ) - # Subscriptions - # for parent node - self.sub = factories.NotificationSubscriptionFactory( - _id=self.project._id + '_file_updated', - owner=self.project, - event_name='file_updated' - ) - self.sub.save() - # for private node - self.private_sub = factories.NotificationSubscriptionFactory( - _id=self.private_node._id + '_file_updated', - owner=self.private_node, - event_name='file_updated' - ) - self.private_sub.save() - # for file subscription - self.file_sub = factories.NotificationSubscriptionFactory( - _id='{pid}_{wbid}_file_updated'.format( - pid=self.project._id, - wbid=self.event.waterbutler_id - ), - owner=self.project, - event_name='xyz42_file_updated' - ) - self.file_sub.save() - - def test_warn_user(self): - # Tests that a user with a sub in the origin node gets a warning that - # they are no longer tracking the file. - self.sub.email_transactional.add(self.user_1) - self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.project.save() - self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.private_node.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.private_sub.none.add(self.user_3) - self.private_sub.save() - moved, warn, removed = utils.categorize_users( - self.event.user, self.event.event_type, self.event.source_node, - self.event.event_type, self.event.node - ) - assert {email_transactional: [], email_digest: [self.user_3._id], 'none': []} == warn - assert {email_transactional: [self.user_1._id], email_digest: [], 'none': []} == moved - - def test_moved_user(self): - # Doesn't warn a user with two different subs, but does send a - # moved email - self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.project.save() - self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.private_node.save() - self.sub.email_digest.add(self.user_3) - self.sub.save() - self.private_sub.email_transactional.add(self.user_3) - self.private_sub.save() - moved, warn, removed = utils.categorize_users( - self.event.user, self.event.event_type, self.event.source_node, - self.event.event_type, self.event.node - ) - assert {email_transactional: [], email_digest: [], 'none': []} == warn - assert {email_transactional: [self.user_3._id], email_digest: [], 'none': []} == moved - - def test_remove_user(self): - self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) - self.project.save() - self.file_sub.email_transactional.add(self.user_3) - self.file_sub.save() - moved, warn, removed = utils.categorize_users( - self.event.user, self.event.event_type, self.event.source_node, - self.event.event_type, self.event.node - ) - assert {email_transactional: [self.user_3._id], email_digest: [], 'none': []} == removed - - def test_node_permissions(self): - self.private_node.add_contributor(self.user_3, permissions=WRITE) - self.private_sub.email_digest.add(self.user_3, self.user_4) - remove = {email_transactional: [], email_digest: [], 'none': []} - warn = {email_transactional: [], email_digest: [self.user_3._id, self.user_4._id], 'none': []} - subbed, remove = utils.subscriptions_node_permissions( - self.private_node, - warn, - remove - ) - assert {email_transactional: [], email_digest: [self.user_3._id], 'none': []} == subbed - assert {email_transactional: [], email_digest: [self.user_4._id], 'none': []} == remove + with capture_notifications() as notifications: + self.event.perform() + assert not notifications class TestSubscriptionManipulations(OsfTestCase): diff --git a/website/mails/mails.py b/website/mails/mails.py index 47e0edf3d28..fa5896e9351 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -93,42 +93,9 @@ def get_english_article(word): 'confirm_registries_osf', subject='OSF Account Verification, OSF Registries' ) -CONFIRM_EMAIL_MODERATION = lambda provider: Mail( - 'confirm_moderation', - subject=f'OSF Account Verification, {provider.name}' -) # Merge account, add or remove email confirmation emails. CONFIRM_MERGE = Mail('confirm_merge', subject='Confirm account merge') -COLLECTION_SUBMISSION_REJECTED = lambda collection, node: Mail( - 'collection_submission_rejected', - subject=f'{node.title} was not accepted into {collection.title}' -) -COLLECTION_SUBMISSION_SUBMITTED = lambda submitter, node: Mail( - 'collection_submission_submitted', - subject=f'{submitter.fullname} has requested to add {node.title} to a collection' -) -COLLECTION_SUBMISSION_ACCEPTED = lambda collection, node: Mail( - 'collection_submission_accepted', - subject=f'{node.title} was accepted into {collection.title}' -) -COLLECTION_SUBMISSION_REMOVED_MODERATOR = lambda collection, node: Mail( - 'collection_submission_removed_moderator', - subject=f'{node.title} was removed from {collection.title}' -) -COLLECTION_SUBMISSION_REMOVED_ADMIN = lambda collection, node: Mail( - 'collection_submission_removed_admin', - subject=f'{node.title} was removed from {collection.title}' -) -COLLECTION_SUBMISSION_REMOVED_PRIVATE = lambda collection, node: Mail( - 'collection_submission_removed_private', - subject=f'{node.title} was removed from {collection.title}' -) -COLLECTION_SUBMISSION_CANCEL = lambda collection, node: Mail( - 'collection_submission_cancel', - subject=f'Request to add {node.title} to {collection.title} was canceled' -) - PRIMARY_EMAIL_CHANGED = Mail('primary_email_changed', subject='Primary email changed') diff --git a/website/notifications/constants.py b/website/notifications/constants.py index 35e3559d252..3b0b81d6823 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -1,7 +1,3 @@ - -# Note: if the subscription starts with 'global_', it will be treated like a default -# subscription. If no notification type has been assigned, the user subscription -# will default to 'email_transactional'. USER_SUBSCRIPTIONS_AVAILABLE = [ 'user_file_updated', 'user_reviews' diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index 4447fa971d7..ca9fdcd6807 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,6 +1,7 @@ import logging -from osf import apps +from django.apps import apps + from website.project.signals import contributor_added, project_created from framework.auth.signals import user_confirmed @@ -27,12 +28,11 @@ def subscribe_contributor(resource, contributor, auth=None, *args, **kwargs): def subscribe_confirmed_user(user): NotificationSubscription = apps.get_model('osf.NotificationSubscription') NotificationType = apps.get_model('osf.NotificationType') - user_events = [ - NotificationType.Type.USER_FILE_UPDATED, - NotificationType.Type.USER_REVIEWS, - ] - for user_event in user_events: - NotificationSubscription.objects.get_or_create( - user=user, - notification_type=user_event - ) + NotificationSubscription.objects.get_or_create( + user=user, + notification_type=NotificationType.objects.get(name=NotificationType.Type.USER_FILE_UPDATED) + ) + NotificationSubscription.objects.get_or_create( + user=user, + notification_type=NotificationType.objects.get(name=NotificationType.Type.USER_REVIEWS) + ) diff --git a/website/notifications/utils.py b/website/notifications/utils.py index 331b2162acf..b86792f348a 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -2,7 +2,6 @@ from django.apps import apps from django.contrib.contenttypes.models import ContentType -from django.db.models import Q from framework.postcommit_tasks.handlers import run_postcommit from osf.models import NotificationSubscription, NotificationType @@ -78,7 +77,13 @@ def remove_contributor_from_subscriptions(node, user): # If user still has permissions through being a contributor or group member, or has # admin perms on a parent, don't remove their subscription if not (node.is_contributor_or_group_member(user)) and user._id not in node.admin_contributor_or_group_member_ids: - node_subscriptions = get_all_node_subscriptions(user, node) + node_subscriptions = NotificationSubscription.objects.filter( + user=user, + user__isnull=True, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node) + ) + for subscription in node_subscriptions: subscription.remove_user_from_subscription(user) @@ -198,10 +203,9 @@ def get_configured_projects(user): :return: list of node objects for projects with no parent """ configured_projects = set() - user_subscriptions = get_all_user_subscriptions(user, extra=( - ~Q(node__type='osf.collection') & - Q(node__is_deleted=False) - )) + user_subscriptions = NotificationSubscription.objects.filter( + user=user + ) for subscription in user_subscriptions: # If the user has opted out of emails skip @@ -220,38 +224,19 @@ def get_configured_projects(user): return sorted(configured_projects, key=lambda n: n.title.lower()) - def check_project_subscriptions_are_all_none(user, node): - node_subscriptions = get_all_node_subscriptions(user, node) + node_subscriptions = NotificationSubscription.objects.filter( + user=user, + user__isnull=True, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node) + ) + for s in node_subscriptions: if not s.none.filter(id=user.id).exists(): return False return True - -def get_all_user_subscriptions(user, extra=None): - """ Get all Subscription objects that the user is subscribed to""" - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - queryset = NotificationSubscription.objects.filter( - Q(none=user.pk) | - Q(email_digest=user.pk) | - Q(email_transactional=user.pk) - ).distinct() - return queryset.filter(extra) if extra else queryset - - -def get_all_node_subscriptions(user, node, user_subscriptions=None): - """ Get all Subscription objects for a node that the user is subscribed to - :param user: OSFUser object - :param node: Node object - :param user_subscriptions: all Subscription objects that the user is subscribed to - :return: list of Subscription objects for a node that the user is subscribed to - """ - if not user_subscriptions: - user_subscriptions = get_all_user_subscriptions(user) - return user_subscriptions.filter(user__isnull=True, node=node) - - def format_data(user, nodes): """ Format subscriptions data for project settings page :param user: OSFUser object @@ -260,7 +245,6 @@ def format_data(user, nodes): """ items = [] - user_subscriptions = get_all_user_subscriptions(user) for node in nodes: assert node, f'{node._id} is not a valid Node.' @@ -276,14 +260,19 @@ def format_data(user, nodes): # user is contributor on a component of the project/node if can_read: - node_sub_available = ['node_file_updated'] - subscriptions = get_all_node_subscriptions(user, node, user_subscriptions=user_subscriptions).filter(event_name__in=node_sub_available) + subscriptions = NotificationSubscription.objects.filter( + user=user, + notification_type__name='node_file_updated', + user__isnull=True, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node) + ) for subscription in subscriptions: - index = node_sub_available.index(getattr(subscription, 'event_name')) - children_tree.append(serialize_event(user, subscription=subscription, - node=node, event_description=node_sub_available.pop(index))) - for node_sub in node_sub_available: + children_tree.append( + serialize_event(user, subscription=subscription, node=node) + ) + for node_sub in subscriptions: children_tree.append(serialize_event(user, node=node, event_description=node_sub)) children_tree.sort(key=lambda s: s['event']['title']) @@ -317,7 +306,7 @@ def format_user_subscriptions(user): user, subscription, event_description=user_subs_available.pop(user_subs_available.index(getattr(subscription, 'event_name'))) ) - for subscription in get_all_user_subscriptions(user) + for subscription in NotificationSubscription.objects.get(user=user) if subscription is not None and getattr(subscription, 'event_name') in user_subs_available ] subscriptions.extend([serialize_event(user, event_description=sub) for sub in user_subs_available]) @@ -329,7 +318,14 @@ def format_file_subscription(user, node_id, path, provider): AbstractNode = apps.get_model('osf.AbstractNode') node = AbstractNode.load(node_id) wb_path = path.lstrip('/') - for subscription in get_all_node_subscriptions(user, node): + subscriptions = NotificationSubscription.objects.filter( + user=user, + user__isnull=True, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node) + ) + + for subscription in subscriptions: if wb_path in getattr(subscription, 'event_name'): return serialize_event(user, subscription, node) return serialize_event(user, node=node, event_description='file_updated') diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index f63b5f71317..715044063e8 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -643,18 +643,16 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a logo = settings.OSF_LOGO # Use match for notification type/logic - if notification_type == 'default': - notification_type = NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - elif notification_type == 'preprint': - notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT - elif notification_type == 'draft_registration': - notification_type = NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT - elif notification_type == 'access': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST - elif notification_type == 'access_request': - notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST - elif notification_type == 'institutional_request': - notification_type = NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST + if notification_type == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT: + pass + elif notification_type == NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT: + pass + elif notification_type == NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT: + pass + elif notification_type == NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST: + pass + elif notification_type == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST: + pass elif getattr(node, 'has_linked_published_preprints', None): notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF logo = settings.OSF_PREPRINTS_LOGO @@ -665,7 +663,6 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a user=contributor, event_context={ 'user': contributor.id, - 'node': node.title, 'referrer_name': getattr(getattr(auth, 'user', None), 'fullname', '') if auth else '', 'is_initiator': getattr(getattr(auth, 'user', None), 'id', None) == contributor.id if auth else False, 'all_global_subscriptions_none': False, From c89399fbd198383300fa1f5929f4159082f20be1 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 28 Jul 2025 15:45:20 -0400 Subject: [PATCH 129/176] remove _ensure_subscriptions --- ...test_collections_provider_moderator_list.py | 2 -- osf_tests/test_schema_responses.py | 3 +-- osf_tests/utils.py | 18 ------------------ 3 files changed, 1 insertion(+), 22 deletions(-) diff --git a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py index bf1efa42e2b..289a24fb313 100644 --- a/api_tests/providers/collections/views/test_collections_provider_moderator_list.py +++ b/api_tests/providers/collections/views/test_collections_provider_moderator_list.py @@ -7,7 +7,6 @@ CollectionProviderFactory, ) from osf.utils import permissions -from osf_tests.utils import _ensure_subscriptions from tests.utils import capture_notifications @@ -20,7 +19,6 @@ def url(provider): def provider(): provider = CollectionProviderFactory() provider.update_group_permissions() - _ensure_subscriptions(provider) return provider diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 1226c24c353..7b6250f8f25 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -7,7 +7,7 @@ from osf.models import schema_response # import module for mocking purposes from osf.utils.workflows import ApprovalStates, SchemaResponseTriggers from osf_tests.factories import AuthUserFactory, ProjectFactory, RegistrationFactory, RegistrationProviderFactory -from osf_tests.utils import get_default_test_schema, _ensure_subscriptions +from osf_tests.utils import get_default_test_schema from tests.utils import capture_notifications from transitions import MachineError @@ -812,7 +812,6 @@ class TestModeratedSchemaResponseApprovalFlows(): def provider(self): provider = RegistrationProviderFactory() provider.update_group_permissions() - _ensure_subscriptions(provider) provider.reviews_workflow = Workflows.PRE_MODERATION.value provider.save() return provider diff --git a/osf_tests/utils.py b/osf_tests/utils.py index ecfd046d1b2..884c4249de9 100644 --- a/osf_tests/utils.py +++ b/osf_tests/utils.py @@ -3,8 +3,6 @@ import functools from unittest import mock -from django.contrib.contenttypes.models import ContentType - from framework.auth import Auth from django.utils import timezone from google.cloud.storage import Client, Bucket, Blob @@ -18,7 +16,6 @@ Sanction, RegistrationProvider, RegistrationSchema, - NotificationSubscription ) from osf.utils.migrations import create_schema_blocks_for_atomic_schema @@ -222,21 +219,6 @@ def get_default_test_schema(): return test_schema - -def _ensure_subscriptions(provider): - '''Make sure a provider's subscriptions exist. - - Provider subscriptions are populated by an on_save signal when the provider is created. - This has led to observed race conditions and probabalistic test failures. - Avoid that. - ''' - for notification_type in provider.DEFAULT_SUBSCRIPTIONS: - NotificationSubscription.objects.get_or_create( - notification_type=notification_type, - object_id=provider.id, - content_type=ContentType.objects.get_for_model(provider) - ) - def assert_notification_correctness(send_mail_mock, expected_template, expected_recipients): '''Confirms that a mocked send_mail function contains the appropriate calls.''' assert send_mail_mock.call_count == len(expected_recipients) From bf9945190e03ac18abf0240135a8856bbac3d1a7 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 28 Jul 2025 16:00:11 -0400 Subject: [PATCH 130/176] add draft_registration notificationtype for send_email --- api/nodes/serializers.py | 1 + osf/models/mixins.py | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 0dd8c8e69cb..6c674ede555 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -1265,6 +1265,7 @@ def create(self, validated_data): notification_type = { 'false': False, 'default': NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + 'draft_registration': NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT, }[email_preference] else: notification_type = False diff --git a/osf/models/mixins.py b/osf/models/mixins.py index be8058150fe..f1561dd8648 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1092,7 +1092,7 @@ def remove_from_group(self, user, group, unsubscribe=True): if unsubscribe: # remove notification subscription for subscription in self.DEFAULT_SUBSCRIPTIONS: - self.remove_user_from_subscription(user, f'{self._id}_{subscription}') + self.remove_user_from_subscription(user, subscription) return _group.user_set.remove(user) @@ -1101,9 +1101,8 @@ def add_user_to_subscription(self, user, subscription): user=user, ) - def remove_user_from_subscription(self, user, subscription_id): - notification = self.notification_subscriptions.get(_id=subscription_id) - notification.remove_user_from_subscription(user, save=True) + def remove_user_from_subscription(self, user, subscription): + subscription.remove_user_from_subscription(user, save=True) class TaxonomizableMixin(models.Model): From 17816561fd441f29f976932289879bef3fe99e3d Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 28 Jul 2025 16:24:24 -0400 Subject: [PATCH 131/176] upgrade institutional request access notification subscriptions --- .../views/test_user_message_institutional_access.py | 6 +++--- notifications.yaml | 9 +++++++++ osf/models/mixins.py | 7 ++++++- osf/models/notification_subscription.py | 12 +++++++++++- osf/models/user_message.py | 6 +++--- tests/test_auth.py | 6 +++--- website/archiver/utils.py | 1 + website/mails/mails.py | 9 --------- ...er_message_institutional_access_request.html.mako | 6 +++--- 9 files changed, 39 insertions(+), 23 deletions(-) diff --git a/api_tests/users/views/test_user_message_institutional_access.py b/api_tests/users/views/test_user_message_institutional_access.py index aac978abeb0..7a0c6dbb419 100644 --- a/api_tests/users/views/test_user_message_institutional_access.py +++ b/api_tests/users/views/test_user_message_institutional_access.py @@ -221,7 +221,7 @@ def test_cc_institutional_admin( auth=institutional_admin.auth, ) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST assert notifications[0]['kwargs']['user'].username == user_with_affiliation.username assert res.status_code == 201 user_message = UserMessage.objects.get() @@ -235,7 +235,7 @@ def test_cc_field_defaults_to_false(self, app, institutional_admin, url_with_aff with capture_notifications() as notifications: res = app.post_json_api(url_with_affiliation, payload, auth=institutional_admin.auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST assert notifications[0]['kwargs']['user'].username == user_with_affiliation.username assert res.status_code == 201 @@ -255,4 +255,4 @@ def test_reply_to_header_set(self, app, institutional_admin, user_with_affiliati auth=institutional_admin.auth, ) assert res.status_code == 201 - assert notifications[0]['user'].username == user_with_affiliation.username + assert notifications[0]['kwargs']['user'].username == user_with_affiliation.username diff --git a/notifications.yaml b/notifications.yaml index c2edd80d0aa..693db2010bd 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -179,6 +179,10 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/file_operation_failed.html.mako' + - name: user_archive_job_exceeded + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/archive_size_exceeded_user.html.mako' #### PROVIDER - name: provider_new_pending_submissions @@ -387,6 +391,11 @@ notification_types: __docs__: Archive job failed due to copy error. Sent to support desk. object_content_type_model_name: desk template: 'website/templates/emails/new_pending_submissions.html.mako' + - name: desk_archive_job_exceeded + subject: 'Problem registering' + __docs__: Archive job failed due to size exceeded + object_content_type_model_name: desk + template: 'website/templates/emails/archive_size_exceeded_desk.html.mako' - name: desk_archive_job_file_not_found __docs__: Archive job failed because files were not found. Sent to support desk. object_content_type_model_name: desk diff --git a/osf/models/mixins.py b/osf/models/mixins.py index f1561dd8648..9405574970d 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1102,7 +1102,12 @@ def add_user_to_subscription(self, user, subscription): ) def remove_user_from_subscription(self, user, subscription): - subscription.remove_user_from_subscription(user, save=True) + subscriptions = NotificationSubscription.objects.filter( + user=user, + notification_type=NotificationType.objects.get(name=subscription), + ) + if subscriptions: + subscriptions.get().remove_user_from_subscription(user) class TaxonomizableMixin(models.Model): diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index 41b88ba9ea2..665c67029ff 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -2,7 +2,7 @@ from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError -from osf.models.notification_type import get_default_frequency_choices +from osf.models.notification_type import get_default_frequency_choices, FrequencyChoices from osf.models.notification import Notification from .base import BaseModel @@ -99,3 +99,13 @@ def _id(self): return f'{self.user._id}_global' case _: raise NotImplementedError() + + def remove_user_from_subscription(self, user): + """ + """ + from osf.models.notification_subscription import NotificationSubscription + notification, _ = NotificationSubscription.objects.update_or_create( + user=user, + notification_type=self, + defaults={'message_frequency': FrequencyChoices.NONE.value} + ) diff --git a/osf/models/user_message.py b/osf/models/user_message.py index e66ea395a52..df89e7c4649 100644 --- a/osf/models/user_message.py +++ b/osf/models/user_message.py @@ -89,10 +89,10 @@ def send_institution_request(self) -> None: ).emit( user=self.recipient, event_context={ - 'sender': self.sender, - 'recipient': self.recipient, + 'sender_fullname': self.sender.fullname, + 'recipient_fullname': self.recipient.fullname, 'message_text': self.message_text, - 'institution': self.institution, + 'institution_name': self.institution.name, }, email_context={ 'bcc_addr': [self.sender.username] if self.is_sender_BCCed else None, diff --git a/tests/test_auth.py b/tests/test_auth.py index 4e6ebf2265c..25068c024c8 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -174,7 +174,7 @@ def test_password_change_sends_email(self): user.set_password('killerqueen') user.save() assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD + assert notifications[0]['type'] == NotificationType.Type.USER_PASSWORD_RESET @mock.patch('framework.auth.utils.requests.post') def test_validate_recaptcha_success(self, req_post): @@ -219,12 +219,12 @@ def test_sign_up_twice_sends_two_confirmation_emails_only(self): with capture_notifications() as notifications: self.app.post(url, json=sign_up_data) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD + assert notifications[0]['type'] == NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL with capture_notifications() as notifications: self.app.post(url, json=sign_up_data) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD + assert notifications[0]['type'] == NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL class TestAuthObject(OsfTestCase): diff --git a/website/archiver/utils.py b/website/archiver/utils.py index 9768c43a894..2808faa1015 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -31,6 +31,7 @@ def send_archiver_size_exceeded_mails(src, user, stat_result, url): NotificationType.objects.get( name=NotificationType.Type.DESK_ARCHIVE_JOB_EXCEEDED ).emit( + user=user, event_context={ 'user': user.id, 'src': src._id, diff --git a/website/mails/mails.py b/website/mails/mails.py index fa5896e9351..c7958c8169a 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -200,15 +200,6 @@ def get_english_article(word): UNESCAPE = '<% from osf.utils.sanitize import unescape_entities %> ${unescape_entities(src.title)}' PROBLEM_REGISTERING = 'Problem registering ' + UNESCAPE -ARCHIVE_SIZE_EXCEEDED_DESK = Mail( - 'archive_size_exceeded_desk', - subject=PROBLEM_REGISTERING -) -ARCHIVE_SIZE_EXCEEDED_USER = Mail( - 'archive_size_exceeded_user', - subject=PROBLEM_REGISTERING -) - ARCHIVE_COPY_ERROR_DESK = Mail( 'archive_copy_error_desk', subject=PROBLEM_REGISTERING diff --git a/website/templates/emails/user_message_institutional_access_request.html.mako b/website/templates/emails/user_message_institutional_access_request.html.mako index 1e314f91e4e..3c401b690f1 100644 --- a/website/templates/emails/user_message_institutional_access_request.html.mako +++ b/website/templates/emails/user_message_institutional_access_request.html.mako @@ -4,7 +4,7 @@ <%!from website import settings%> - Hello ${recipient.fullname}, + Hello ${recipient_fullname},

    This message is coming from an Institutional administrator within your Institution.

    @@ -14,12 +14,12 @@

    % endif

    - Want more information? Visit ${settings.DOMAIN} to learn about OSF, or + Want more information? Visit ${domain} to learn about OSF, or https://cos.io/ for information about its supporting organization, the Center for Open Science.

    - Questions? Email ${settings.OSF_CONTACT_EMAIL} + Questions? Email ${osf_contact_email}

    From 75d6f45e56a95b5bdfb457560a18ff22553fa682 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 28 Jul 2025 17:06:14 -0400 Subject: [PATCH 132/176] fix archiver notifications --- api/nodes/serializers.py | 1 + notifications.yaml | 14 +++++++++++++- osf/models/notification_type.py | 3 ++- osf/utils/machines.py | 4 ++-- osf_tests/test_archiver.py | 12 ++++++------ .../test_institutional_admin_contributors.py | 8 ++++---- scripts/stuck_registration_audit.py | 2 +- website/archiver/utils.py | 19 ++++++++++++------- website/mails/mails.py | 15 --------------- .../emails/archive_copy_error_desk.html.mako | 4 ++-- .../emails/archive_copy_error_user.html.mako | 4 ++-- .../archive_registration_stuck_desk.html.mako | 2 +- .../archive_size_exceeded_desk.html.mako | 6 +++--- .../archive_size_exceeded_user.html.mako | 4 ++-- 14 files changed, 51 insertions(+), 47 deletions(-) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 6c674ede555..80cc400b73d 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -1266,6 +1266,7 @@ def create(self, validated_data): 'false': False, 'default': NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, 'draft_registration': NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT, + 'preprint': NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT, }[email_preference] else: notification_type = False diff --git a/notifications.yaml b/notifications.yaml index 693db2010bd..7082541e164 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -180,9 +180,20 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/file_operation_failed.html.mako' - name: user_archive_job_exceeded + subject: 'Problem Registering' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/archive_size_exceeded_user.html.mako' + - name: user_archive_job_copy_error + subject: 'Problem Registering' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/archive_copy_error_user.html.mako' + - name: user_archive_job_uncaught_error + subject: 'Problem Registering' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/archive_uncaught_error_user.html.mako' #### PROVIDER - name: provider_new_pending_submissions @@ -388,9 +399,10 @@ notification_types: object_content_type_model_name: desk template: 'website/templates/emails/addons_boa_job_complete.html.mako' - name: desk_archive_job_copy_error + subject: 'Problem registering' __docs__: Archive job failed due to copy error. Sent to support desk. object_content_type_model_name: desk - template: 'website/templates/emails/new_pending_submissions.html.mako' + template: 'website/templates/emails/archive_copy_error_desk.html.mako' - name: desk_archive_job_exceeded subject: 'Problem registering' __docs__: Archive job failed due to size exceeded diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 3df55aa987d..7e7bf72fd6e 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -78,6 +78,7 @@ class Type(str, Enum): USER_INVITE_OSF_PREPRINT = 'user_invite_osf_preprint' USER_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'user_contributor_added_preprint_node_from_osf' USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'user_contributor_added_access_request' + USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' # Node notifications NODE_COMMENT = 'node_comments' @@ -201,7 +202,7 @@ def desk_types(cls): def emit( self, - user, + user=None, destination_address=None, subscribed_object=None, message_frequency='instantly', diff --git a/osf/utils/machines.py b/osf/utils/machines.py index ce92f81db3b..aadc8d7849a 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -222,9 +222,9 @@ def save_changes(self, ev): contributor_permissions = ev.kwargs.get('permissions', self.machineable.requested_permissions) make_curator = self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST.value visible = False if make_curator else ev.kwargs.get('visible', True) - if self.machineable.request_type == NodeRequestTypes.ACCESS: + if self.machineable.request_type == NodeRequestTypes.ACCESS.value: notification_type = NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST - elif self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST: + elif self.machineable.request_type == NodeRequestTypes.INSTITUTIONAL_REQUEST.value: notification_type = NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST else: notification_type = None diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index f653b20ea25..4d0491b21c9 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -733,8 +733,8 @@ def test_handle_archive_fail(self): {} ) assert len(notifications) == 2 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.DESK_ARCHIVE_JOB_COPY_ERROR + assert notifications[1]['type'] == NotificationType.Type.USER_ARCHIVE_JOB_COPY_ERROR self.dst.reload() assert self.dst.is_deleted @@ -748,8 +748,8 @@ def test_handle_archive_fail_copy(self): {} ) assert len(notifications) == 2 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.DESK_ARCHIVE_JOB_COPY_ERROR + assert notifications[1]['type'] == NotificationType.Type.USER_ARCHIVE_JOB_COPY_ERROR def test_handle_archive_fail_size(self): with capture_notifications() as notifications: @@ -761,8 +761,8 @@ def test_handle_archive_fail_size(self): {} ) assert len(notifications) == 2 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.DESK_ARCHIVE_JOB_EXCEEDED + assert notifications[1]['type'] == NotificationType.Type.USER_ARCHIVE_JOB_EXCEEDED def test_aggregate_file_tree_metadata(self): a_stat_result = archiver_utils.aggregate_file_tree_metadata('dropbox', FILE_TREE, self.user) diff --git a/osf_tests/test_institutional_admin_contributors.py b/osf_tests/test_institutional_admin_contributors.py index bf56ba3fcb6..d7702c2e6be 100644 --- a/osf_tests/test_institutional_admin_contributors.py +++ b/osf_tests/test_institutional_admin_contributors.py @@ -2,7 +2,7 @@ from unittest import mock -from osf.models import Contributor +from osf.models import Contributor, NotificationType from osf_tests.factories import ( AuthUserFactory, ProjectFactory, @@ -142,7 +142,7 @@ def test_requested_permissions_or_default(self, app, project, institutional_admi auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - notification_type='access', + notification_type=NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST, make_curator=False, ) @@ -168,7 +168,7 @@ def test_permissions_override_requested_permissions(self, app, project, institut auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - notification_type='access', + notification_type=NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST, make_curator=False, ) @@ -194,6 +194,6 @@ def test_requested_permissions_is_used(self, app, project, institutional_admin): auth=mock.ANY, permissions=permissions.ADMIN, # `requested_permissions` should take precedence visible=True, - notification_type='access', + notification_type=NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST, make_curator=False, ) diff --git a/scripts/stuck_registration_audit.py b/scripts/stuck_registration_audit.py index 07a5d9a68c6..c9bce059fb9 100644 --- a/scripts/stuck_registration_audit.py +++ b/scripts/stuck_registration_audit.py @@ -102,7 +102,7 @@ def main(): ).emit( destination_address=settings.OSF_SUPPORT_EMAIL, event_context={ - 'broken_registrations': broken_registrations, + 'broken_registrations_count': len(broken_registrations), 'attachment_name': filename, 'attachement_content': output.getvalue(), 'can_change_preferences': False diff --git a/website/archiver/utils.py b/website/archiver/utils.py index 2808faa1015..c6d4c0de009 100644 --- a/website/archiver/utils.py +++ b/website/archiver/utils.py @@ -45,8 +45,8 @@ def send_archiver_size_exceeded_mails(src, user, stat_result, url): ).emit( user=user, event_context={ - 'user': user, - 'src': src, + 'user': user.fullname, + 'src': src.title, 'can_change_preferences': False, } ) @@ -111,10 +111,13 @@ def send_archiver_uncaught_error_mails(src, user, results, url): NotificationType.objects.get( name=NotificationType.Type.DESK_ARCHIVE_JOB_UNCAUGHT_ERROR ).emit( + destination_address=settings.OSF_SUPPORT_EMAIL, event_context={ - 'user': user.id, + 'user_fullname': user.fullname, + 'src_title': src.title, + 'src__id': src._id, 'src': src._id, - 'results': results, + 'results': [str(error) for error in results], 'url': url, 'can_change_preferences': False, } @@ -122,11 +125,13 @@ def send_archiver_uncaught_error_mails(src, user, results, url): NotificationType.objects.get( name=NotificationType.Type.USER_ARCHIVE_JOB_UNCAUGHT_ERROR ).emit( - user=user, + destination_address=settings.OSF_SUPPORT_EMAIL, event_context={ - 'user': user.id, + 'user_fullname': user.fullname, + 'src_title': src.title, + 'src__id': src._id, 'src': src._id, - 'results': results, + 'results': [str(error) for error in results], 'can_change_preferences': False, } ) diff --git a/website/mails/mails.py b/website/mails/mails.py index c7958c8169a..8ab4ddcabd5 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -199,16 +199,6 @@ def get_english_article(word): UNESCAPE = '<% from osf.utils.sanitize import unescape_entities %> ${unescape_entities(src.title)}' PROBLEM_REGISTERING = 'Problem registering ' + UNESCAPE - -ARCHIVE_COPY_ERROR_DESK = Mail( - 'archive_copy_error_desk', - subject=PROBLEM_REGISTERING -) -ARCHIVE_COPY_ERROR_USER = Mail( - 'archive_copy_error_user', - subject=PROBLEM_REGISTERING - -) ARCHIVE_FILE_NOT_FOUND_DESK = Mail( 'archive_file_not_found_desk', subject=PROBLEM_REGISTERING @@ -228,11 +218,6 @@ def get_english_article(word): subject='[auto] Stuck registrations audit' ) -ARCHIVE_UNCAUGHT_ERROR_USER = Mail( - 'archive_uncaught_error_user', - subject=PROBLEM_REGISTERING -) - ARCHIVE_SUCCESS = Mail( 'archive_success', subject='Registration of ' + UNESCAPE + ' complete' diff --git a/website/templates/emails/archive_copy_error_desk.html.mako b/website/templates/emails/archive_copy_error_desk.html.mako index 18938e306ac..9b2c779c386 100644 --- a/website/templates/emails/archive_copy_error_desk.html.mako +++ b/website/templates/emails/archive_copy_error_desk.html.mako @@ -11,9 +11,9 @@ - User: ${user.fullname} (${user.username}) [${user._id}] + User: ${user_fullname} (${user_username}) [${user__id}] - Tried to register ${src.title} (${url}) [${src._id}], but the archive task failed when copying files. + Tried to register ${src_title} (${url}) [${src__id}], but the archive task failed when copying files.
    A report is included below: diff --git a/website/templates/emails/archive_copy_error_user.html.mako b/website/templates/emails/archive_copy_error_user.html.mako index 310bd4f5a6b..ce7566f322c 100644 --- a/website/templates/emails/archive_copy_error_user.html.mako +++ b/website/templates/emails/archive_copy_error_user.html.mako @@ -5,12 +5,12 @@ <% from website import settings %> -

    Issue registering ${src.title}

    +

    Issue registering ${src_title}

    - We cannot archive ${src.title} at this time because there were errors copying files from some of the linked third-party services. It's possible that this is due to temporary unavailability of one or more of these services and that retrying the registration may resolve this issue. Our development team is investigating this failure. We're sorry for any inconvenience this may have caused. + We cannot archive ${src_title} at this time because there were errors copying files from some of the linked third-party services. It's possible that this is due to temporary unavailability of one or more of these services and that retrying the registration may resolve this issue. Our development team is investigating this failure. We're sorry for any inconvenience this may have caused. diff --git a/website/templates/emails/archive_registration_stuck_desk.html.mako b/website/templates/emails/archive_registration_stuck_desk.html.mako index f3fac20204a..f56cde7c054 100644 --- a/website/templates/emails/archive_registration_stuck_desk.html.mako +++ b/website/templates/emails/archive_registration_stuck_desk.html.mako @@ -4,7 +4,7 @@ <%def name="content()"> -

    ${len(broken_registrations)} registrations found stuck in archiving

    +

    ${len(broken_registrations_count)} registrations found stuck in archiving

    diff --git a/website/templates/emails/archive_size_exceeded_desk.html.mako b/website/templates/emails/archive_size_exceeded_desk.html.mako index 8b4376c1c0d..18280511c04 100644 --- a/website/templates/emails/archive_size_exceeded_desk.html.mako +++ b/website/templates/emails/archive_size_exceeded_desk.html.mako @@ -4,14 +4,14 @@ <% from website import settings %> -

    Issue registering ${src.title}

    +

    Issue registering ${src_title}

    - User: ${user.fullname} (${user.username}) [${user._id}] + User: ${user_fullname} (${user_username}) [${user._id}] - Tried to register ${src.title} (${url}), but the resulting archive would have exceeded our caps for disk usage (${settings.MAX_ARCHIVE_SIZE / 1024 ** 3}GB). + Tried to register ${src_title} (${url}), but the resulting archive would have exceeded our caps for disk usage (${settings.MAX_ARCHIVE_SIZE / 1024 ** 3}GB).
    A report is included below: diff --git a/website/templates/emails/archive_size_exceeded_user.html.mako b/website/templates/emails/archive_size_exceeded_user.html.mako index d30498bc222..ef852bed8d4 100644 --- a/website/templates/emails/archive_size_exceeded_user.html.mako +++ b/website/templates/emails/archive_size_exceeded_user.html.mako @@ -4,12 +4,12 @@ <% from website import settings %> -

    Issue registering ${src.title}

    +

    Issue registering ${src_title}

    - We cannot archive ${src.title} at this time because the projected size of the registration exceeds our usage limits. You should receive a followup email from our support team shortly. We're sorry for any inconvenience this may have caused. + We cannot archive ${src_title} at this time because the projected size of the registration exceeds our usage limits. You should receive a followup email from our support team shortly. We're sorry for any inconvenience this may have caused. From 9e8bd85811ebda68ff4c13582e9680c206d034fa Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 28 Jul 2025 18:50:36 -0400 Subject: [PATCH 133/176] fix schema response tests --- .../views/test_request_actions_create.py | 2 +- api_tests/users/views/test_user_confirm.py | 2 +- api_tests/users/views/test_user_settings.py | 2 +- .../test_user_settings_reset_password.py | 2 +- notifications.yaml | 6 +++ osf/utils/machines.py | 2 +- osf_tests/test_collection.py | 2 +- osf_tests/test_node.py | 39 ++++++++++--------- osf_tests/test_reviewable.py | 1 - osf_tests/test_schema_responses.py | 4 +- osf_tests/test_user.py | 2 +- scripts/osfstorage/usage_audit.py | 3 +- website/mails/mails.py | 12 ------ website/settings/defaults.py | 7 ---- 14 files changed, 38 insertions(+), 48 deletions(-) diff --git a/api_tests/requests/views/test_request_actions_create.py b/api_tests/requests/views/test_request_actions_create.py index a8b71da01f4..ff277ac0233 100644 --- a/api_tests/requests/views/test_request_actions_create.py +++ b/api_tests/requests/views/test_request_actions_create.py @@ -199,7 +199,7 @@ def test_email_sent_on_approve(self, app, admin, url, node_request): with capture_notifications() as notifications: res = app.post_json_api(url, payload, auth=admin.auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST assert res.status_code == 201 node_request.reload() assert initial_state != node_request.machine_state diff --git a/api_tests/users/views/test_user_confirm.py b/api_tests/users/views/test_user_confirm.py index bb2acee47c9..72c35091890 100644 --- a/api_tests/users/views/test_user_confirm.py +++ b/api_tests/users/views/test_user_confirm.py @@ -170,7 +170,7 @@ def test_post_success_link(self, app, confirm_url, user_with_email_verification) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + assert notifications[0]['type'] == NotificationType.Type.USER_EXTERNAL_LOGIN_LINK_SUCCESS user.reload() assert user.external_identity['ORCID']['0000-0000-0000-0000'] == 'VERIFIED' diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index 847576d9913..530b8455c3c 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -60,7 +60,7 @@ def test_post(self, app, user_one, user_two, url, payload): with capture_notifications() as notification: res = app.post_json_api(url, payload, auth=user_one.auth) assert len(notification) == 1 - assert notification[0]['type'] == NotificationType.Type.USER_ACCOUNT_EXPORT_FORM + assert notification[0]['type'] == NotificationType.Type.USER_REQUEST_EXPORT assert res.status_code == 204 user_one.reload() assert user_one.email_last_sent is not None diff --git a/api_tests/users/views/test_user_settings_reset_password.py b/api_tests/users/views/test_user_settings_reset_password.py index 0dbdbaec996..2a9c0e272af 100644 --- a/api_tests/users/views/test_user_settings_reset_password.py +++ b/api_tests/users/views/test_user_settings_reset_password.py @@ -36,7 +36,7 @@ def test_get(self, app, url, user_one): with capture_notifications() as notification: res = app.get(url) assert len(notification) == 1 - assert notification[0]['type'] == NotificationType.Type.RESET_PASSWORD_CONFIRMATION + assert notification[0]['type'] == NotificationType.Type.USER_PASSWORD_RESET assert res.status_code == 200 user_one.reload() diff --git a/notifications.yaml b/notifications.yaml index 7082541e164..c93f214a042 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -51,7 +51,13 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/forgot_password.html.mako' + - name: user_welcome + subject: 'Welcome to OSF' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/welcome.html.mako' - name: user_welcome_osf4i + subject: 'Welcome to OSF' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/welcome_osf4i.html.mako' diff --git a/osf/utils/machines.py b/osf/utils/machines.py index aadc8d7849a..b686afc6c43 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -179,7 +179,7 @@ def notify_withdraw(self, ev): # If there is no preprint request action, it means the withdrawal is directly initiated by admin/moderator context['force_withdrawal'] = True - context['requester_fullname'] = requester.fullname + context['requester_fullname'] = self.machineable.creator.fullname for contributor in self.machineable.contributors.all(): context['contributor_fullname'] = contributor.fullname if context.get('requester_fullname', None): diff --git a/osf_tests/test_collection.py b/osf_tests/test_collection.py index 0e39c011f65..912a0e5ec93 100644 --- a/osf_tests/test_collection.py +++ b/osf_tests/test_collection.py @@ -133,7 +133,7 @@ def test_node_removed_from_collection_on_privacy_change_notify(self, auth, provi with capture_notifications() as notifications: provider_collected_node.set_privacy('private', auth=auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_PRIVATE @mock.patch('osf.models.node.Node.check_privacy_change_viability', mock.Mock()) # mocks the storage usage limits def test_node_removed_from_collection_on_privacy_change_no_provider(self, auth, collected_node, bookmark_collection): diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index f00f822704a..3b04ceba292 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -34,7 +34,7 @@ NodeRelation, Registration, DraftRegistration, - CollectionSubmission + CollectionSubmission, NotificationType ) from addons.wiki.models import WikiPage, WikiVersion @@ -42,6 +42,7 @@ from osf.exceptions import ValidationError, ValidationValueError, UserStateError from osf.utils.workflows import DefaultStates, CollectionSubmissionStates from framework.auth.core import Auth +from tests.utils import capture_notifications from osf_tests.factories import ( AuthUserFactory, @@ -2125,23 +2126,25 @@ def test_set_privacy(self, node, auth): assert node.logs.first().action == NodeLog.MADE_PRIVATE assert last_logged_before_method_call != node.last_logged - @mock.patch('osf.models.queued_mail.queue_mail') - def test_set_privacy_sends_mail_default(self, mock_queue, node, auth): - node.set_privacy('private', auth=auth) - node.set_privacy('public', auth=auth) - assert mock_queue.call_count == 1 - - @mock.patch('osf.models.queued_mail.queue_mail') - def test_set_privacy_sends_mail(self, mock_queue, node, auth): - node.set_privacy('private', auth=auth) - node.set_privacy('public', auth=auth, meeting_creation=False) - assert mock_queue.call_count == 1 - - @mock.patch('osf.models.queued_mail.queue_mail') - def test_set_privacy_skips_mail_if_meeting(self, mock_queue, node, auth): - node.set_privacy('private', auth=auth) - node.set_privacy('public', auth=auth, meeting_creation=True) - assert bool(mock_queue.called) is False + def test_set_privacy_sends_mail_default(self, node, auth): + with capture_notifications() as notifications: + node.set_privacy('private', auth=auth) + node.set_privacy('public', auth=auth) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + + def test_set_privacy_sends_mail(self, node, auth): + with capture_notifications() as notifications: + node.set_privacy('private', auth=auth) + node.set_privacy('public', auth=auth, meeting_creation=False) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + + def test_set_privacy_skips_mail_if_meeting(self, node, auth): + with capture_notifications() as notifications: + node.set_privacy('private', auth=auth) + node.set_privacy('public', auth=auth, meeting_creation=True) + assert not notifications def test_set_privacy_can_not_cancel_pending_embargo_for_registration(self, node, user, auth): registration = RegistrationFactory(project=node) diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py index eb3783b71bc..08be5390d98 100644 --- a/osf_tests/test_reviewable.py +++ b/osf_tests/test_reviewable.py @@ -41,7 +41,6 @@ def test_reject_resubmission_sends_emails(self): is_published=False ) assert preprint.machine_state == DefaultStates.INITIAL.value - with capture_notifications() as notifications: preprint.run_submit(user) assert len(notifications) == 1 diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 7b6250f8f25..f3f831224c6 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -859,7 +859,7 @@ def test_accept_notification_sent_on_admin_approval(self, revised_response, admi with capture_notifications() as notifications: revised_response.approve(user=admin_user) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED def test_moderators_notified_on_admin_approval(self, revised_response, admin_user, moderator): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -869,7 +869,7 @@ def test_moderators_notified_on_admin_approval(self, revised_response, admin_use with capture_notifications() as notifications: revised_response.approve(user=admin_user) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED assert notifications[0]['kwargs']['user'] == moderator def test_no_moderator_notification_on_admin_approval_of_initial_response( diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index 8a8a6f29d72..7025b5a3d2e 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -904,7 +904,7 @@ def test_set_password_notify_default(self, user): user.save() assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PASSWORD_CHANGED + assert notifications[0]['type'] == NotificationType.Type.USER_PASSWORD_RESET def test_set_password_no_notify(self, user): old_password = 'password' diff --git a/scripts/osfstorage/usage_audit.py b/scripts/osfstorage/usage_audit.py index 8a8ffb6c1f1..c50e3f57640 100644 --- a/scripts/osfstorage/usage_audit.py +++ b/scripts/osfstorage/usage_audit.py @@ -25,6 +25,7 @@ from website.app import init_app from website.settings.defaults import GBs +from django.core.mail import send_mail from scripts import utils as scripts_utils # App must be init'd before django models are imported @@ -110,7 +111,7 @@ def main(send_email=False): if lines: if send_email: logger.info('Sending email...') - mails.send_mail('support+scripts@osf.io', mails.EMPTY, body='\n'.join(lines), subject='Script: OsfStorage usage audit', can_change_preferences=False,) + send_mail('support+scripts@osf.io', mails.EMPTY, body='\n'.join(lines), subject='Script: OsfStorage usage audit', can_change_preferences=False,) else: logger.info(f'send_email is False, not sending email') logger.info(f'{len(lines)} offending project(s) and user(s) found') diff --git a/website/mails/mails.py b/website/mails/mails.py index 8ab4ddcabd5..126f4ef8dfc 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -223,18 +223,6 @@ def get_english_article(word): subject='Registration of ' + UNESCAPE + ' complete' ) -WELCOME = Mail( - 'welcome', - subject='Welcome to OSF', - engagement=True -) - -WELCOME_OSF4I = Mail( - 'welcome_osf4i', - subject='Welcome to OSF', - engagement=True -) - DUPLICATE_ACCOUNTS_OSF4I = Mail( 'duplicate_accounts_sso_osf4i', subject='Duplicate OSF Accounts' diff --git a/website/settings/defaults.py b/website/settings/defaults.py index 5d39c01ab90..a68414b6763 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -457,7 +457,6 @@ class CeleryConfig: med_pri_modules = { 'framework.email.tasks', - 'scripts.send_queued_mails', 'scripts.triggered_mails', 'website.mailchimp_utils', 'website.notifications.tasks', @@ -567,7 +566,6 @@ class CeleryConfig: 'scripts.approve_registrations', 'scripts.approve_embargo_terminations', 'scripts.triggered_mails', - 'scripts.send_queued_mails', 'scripts.generate_sitemap', 'scripts.premigrate_created_modified', 'scripts.add_missing_identifiers_to_preprints', @@ -636,11 +634,6 @@ class CeleryConfig: 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, - 'send_queued_mails': { - 'task': 'scripts.send_queued_mails', - 'schedule': crontab(minute=0, hour=17), # Daily 12 p.m. - 'kwargs': {'dry_run': False}, - }, 'new-and-noteworthy': { 'task': 'scripts.populate_new_and_noteworthy_projects', 'schedule': crontab(minute=0, hour=7, day_of_week=6), # Saturday 2:00 a.m. From b89846f28c69eda62e23d0feb045642db39ed837 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 08:24:20 -0400 Subject: [PATCH 134/176] fix machine actions --- osf/utils/machines.py | 4 +++- tests/test_preprints.py | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/osf/utils/machines.py b/osf/utils/machines.py index b686afc6c43..03f5da9b967 100644 --- a/osf/utils/machines.py +++ b/osf/utils/machines.py @@ -175,11 +175,13 @@ def notify_withdraw(self, ev): trigger='accept' ) requester = preprint_request_action.target.creator + except PreprintRequestAction.DoesNotExist: # If there is no preprint request action, it means the withdrawal is directly initiated by admin/moderator context['force_withdrawal'] = True + requester = self.machineable.creator - context['requester_fullname'] = self.machineable.creator.fullname + context['requester_fullname'] = requester.fullname for contributor in self.machineable.contributors.all(): context['contributor_fullname'] = contributor.fullname if context.get('requester_fullname', None): diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 9f16edc1e58..6f1eda5876b 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -1998,12 +1998,12 @@ def test_creator_gets_email(self): with capture_notifications() as notifications: self.preprint.set_published(True, auth=Auth(self.user), save=True) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION with capture_notifications() as notifications: self.preprint_branded.set_published(True, auth=Auth(self.user), save=True) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION class TestPreprintOsfStorage(OsfTestCase): From 8e9126aefb94439d90b27d60cc21030dd43996eb Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 08:32:10 -0400 Subject: [PATCH 135/176] fix institution deactivation notifications --- notifications.yaml | 6 ++++++ osf/models/institution.py | 29 ++++++++++++----------------- osf_tests/test_institution.py | 6 +++--- website/mails/mails.py | 10 ---------- 4 files changed, 21 insertions(+), 30 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index c93f214a042..c296c9e41a4 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -61,6 +61,11 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/welcome_osf4i.html.mako' + - name: user_institution_deactivation + subject: "Your OSF login has changed - here's what you need to know!" + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/institution_deactivation.html.mako' - name: user_invite_preprints_osf __docs__: ... object_content_type_model_name: osfuser @@ -111,6 +116,7 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/request_deactivation_complete.html.mako' - name: user_storage_cap_exceeded_announcement + subject: 'Action Required to avoid disruption to your OSF project' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/storage_cap_exceeded_announcement.html.mako' diff --git a/osf/models/institution.py b/osf/models/institution.py index 737233ca7b8..afb9c259a7e 100644 --- a/osf/models/institution.py +++ b/osf/models/institution.py @@ -7,7 +7,6 @@ from django.conf import settings as django_conf_settings from django.contrib.postgres import fields -from django.core.mail import send_mail from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver @@ -15,6 +14,7 @@ from django.utils import timezone from framework import sentry +from osf.models.notification_type import NotificationType from .base import BaseModel, ObjectIDMixin from .contributor import InstitutionalContributor from .institution_affiliation import InstitutionAffiliation @@ -23,7 +23,6 @@ from .storage import InstitutionAssetFile from .validators import validate_email from osf.utils.fields import NonNaiveDateTimeField, LowercaseEmailField -from website import mails from website import settings as website_settings logger = logging.getLogger(__name__) @@ -220,21 +219,17 @@ def _send_deactivation_email(self): attempts = 0 success = 0 for user in self.get_institution_users(): - try: - attempts += 1 - send_mail( - to_addr=user.username, - mail=mails.INSTITUTION_DEACTIVATION, - user=user, - forgot_password_link=f'{website_settings.DOMAIN}{forgot_password}', - osf_support_email=website_settings.OSF_SUPPORT_EMAIL - ) - except Exception as e: - logger.error(f'Failed to send institution deactivation email to user [{user._id}] at [{self._id}]') - sentry.log_exception(e) - continue - else: - success += 1 + attempts += 1 + NotificationType.objects.get( + name=NotificationType.Type.USER_INSTITUTION_DEACTIVATION + ).emit( + user=user, + event_context={ + 'forgot_password_link': f'{website_settings.DOMAIN}{forgot_password}', + 'osf_support_email': website_settings.OSF_SUPPORT_EMAIL + } + ) + success += 1 logger.info(f'Institution deactivation notification email has been ' f'sent to [{success}/{attempts}] users for [{self._id}]') diff --git a/osf_tests/test_institution.py b/osf_tests/test_institution.py index d4442ad8590..98ee5b0bfbb 100644 --- a/osf_tests/test_institution.py +++ b/osf_tests/test_institution.py @@ -157,8 +157,8 @@ def test_send_deactivation_email_call_count(self): with capture_notifications() as notifications: institution._send_deactivation_email() assert len(notifications) == 2 - assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED - assert notifications[1]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED + assert notifications[0]['type'] == NotificationType.Type.USER_INSTITUTION_DEACTIVATION + assert notifications[1]['type'] == NotificationType.Type.USER_INSTITUTION_DEACTIVATION def test_send_deactivation_email_call_args(self): institution = InstitutionFactory() @@ -168,7 +168,7 @@ def test_send_deactivation_email_call_args(self): with capture_notifications() as notifications: institution._send_deactivation_email() assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED + assert notifications[0]['type'] == NotificationType.Type.USER_INSTITUTION_DEACTIVATION def test_deactivate_inactive_institution_noop(self): institution = InstitutionFactory() diff --git a/website/mails/mails.py b/website/mails/mails.py index 126f4ef8dfc..033f23fc819 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -280,16 +280,6 @@ def get_english_article(word): subject='Updated Terms of Use for COS Websites and Services', ) -STORAGE_CAP_EXCEEDED_ANNOUNCEMENT = Mail( - 'storage_cap_exceeded_announcement', - subject='Action Required to avoid disruption to your OSF project', -) - -INSTITUTION_DEACTIVATION = Mail( - 'institution_deactivation', - subject='Your OSF login has changed - here\'s what you need to know!' -) - REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER = Mail( 'registration_bulk_upload_product_owner', subject='Registry Could Not Bulk Upload Registrations' From df19159a8199e100a7b91ac3768cdcc9cae99264 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 09:10:21 -0400 Subject: [PATCH 136/176] fix preprint moderation --- .../views/test_preprint_contributors_list.py | 8 ++++---- .../registrations/views/test_registration_detail.py | 5 +---- api_tests/users/views/test_user_settings.py | 2 +- .../users/views/test_user_settings_reset_password.py | 2 +- osf/models/mixins.py | 9 ++++++--- osf/models/notification_subscription.py | 11 +++-------- osf/models/notification_type.py | 5 ++--- website/notifications/utils.py | 2 +- 8 files changed, 19 insertions(+), 25 deletions(-) diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index a719589563c..4dbbea685f9 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -1421,7 +1421,7 @@ def test_add_contributor_signal_if_preprint( ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT def test_add_unregistered_contributor_sends_email( self, app, user, url_preprint_contribs): @@ -1440,7 +1440,7 @@ def test_add_unregistered_contributor_sends_email( auth=user.auth ) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT assert res.status_code == 201 def test_add_unregistered_contributor_signal_if_preprint(self, app, user, url_preprint_contribs): @@ -1460,7 +1460,7 @@ def test_add_unregistered_contributor_signal_if_preprint(self, app, user, url_pr ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT def test_add_contributor_invalid_send_email_param(self, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=true' @@ -1541,7 +1541,7 @@ def test_contributor_added_signal_not_specified(self, app, user, url_preprint_co ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_CONTRIBUTOR_ADDED_OSF_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT def test_contributor_added_not_sent_if_unpublished( self, app, user, preprint_unpublished): diff --git a/api_tests/registrations/views/test_registration_detail.py b/api_tests/registrations/views/test_registration_detail.py index 1be2d14c3be..04aba5ac394 100644 --- a/api_tests/registrations/views/test_registration_detail.py +++ b/api_tests/registrations/views/test_registration_detail.py @@ -752,10 +752,7 @@ def test_initiate_withdraw_registration_fails( assert res.status_code == 400 def test_initiate_withdrawal_success(self, app, user, public_registration, public_url, public_payload): - with capture_notifications() as notifications: - res = app.put_json_api(public_url, public_payload, auth=user.auth) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.USER_REVIEWS + res = app.put_json_api(public_url, public_payload, auth=user.auth) assert res.status_code == 200 assert res.json['data']['attributes']['pending_withdrawal'] is True public_registration.refresh_from_db() diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index 530b8455c3c..927b7892d71 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -60,7 +60,7 @@ def test_post(self, app, user_one, user_two, url, payload): with capture_notifications() as notification: res = app.post_json_api(url, payload, auth=user_one.auth) assert len(notification) == 1 - assert notification[0]['type'] == NotificationType.Type.USER_REQUEST_EXPORT + assert notification[0]['type'] == NotificationType.Type.DESK_REQUEST_EXPORT assert res.status_code == 204 user_one.reload() assert user_one.email_last_sent is not None diff --git a/api_tests/users/views/test_user_settings_reset_password.py b/api_tests/users/views/test_user_settings_reset_password.py index 2a9c0e272af..d69eb87a692 100644 --- a/api_tests/users/views/test_user_settings_reset_password.py +++ b/api_tests/users/views/test_user_settings_reset_password.py @@ -36,7 +36,7 @@ def test_get(self, app, url, user_one): with capture_notifications() as notification: res = app.get(url) assert len(notification) == 1 - assert notification[0]['type'] == NotificationType.Type.USER_PASSWORD_RESET + assert notification[0]['type'] == NotificationType.Type.USER_FORGOT_PASSWORD assert res.status_code == 200 user_one.reload() diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 9405574970d..d224c61ac7c 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1102,12 +1102,15 @@ def add_user_to_subscription(self, user, subscription): ) def remove_user_from_subscription(self, user, subscription): + notification_type = NotificationType.objects.get( + name=subscription, + ) subscriptions = NotificationSubscription.objects.filter( - user=user, - notification_type=NotificationType.objects.get(name=subscription), + notification_type=notification_type, + user=user ) if subscriptions: - subscriptions.get().remove_user_from_subscription(user) + subscriptions.get().remove_user_from_subscription() class TaxonomizableMixin(models.Model): diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index 665c67029ff..7dc79047a13 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -2,7 +2,7 @@ from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError -from osf.models.notification_type import get_default_frequency_choices, FrequencyChoices +from osf.models.notification_type import get_default_frequency_choices from osf.models.notification import Notification from .base import BaseModel @@ -100,12 +100,7 @@ def _id(self): case _: raise NotImplementedError() - def remove_user_from_subscription(self, user): + def remove_user_from_subscription(self): """ """ - from osf.models.notification_subscription import NotificationSubscription - notification, _ = NotificationSubscription.objects.update_or_create( - user=user, - notification_type=self, - defaults={'message_frequency': FrequencyChoices.NONE.value} - ) + self.delete() diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 7e7bf72fd6e..6cbb3f1d2df 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -261,11 +261,10 @@ def remove_user_from_subscription(self, user): """ """ from osf.models.notification_subscription import NotificationSubscription - notification, _ = NotificationSubscription.objects.update_or_create( + notification, _ = NotificationSubscription.objects.filter( user=user, notification_type=self, - defaults={'message_frequency': FrequencyChoices.NONE.value} - ) + ).delete() def __str__(self) -> str: return self.name diff --git a/website/notifications/utils.py b/website/notifications/utils.py index b86792f348a..7ccfcf88ede 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -85,7 +85,7 @@ def remove_contributor_from_subscriptions(node, user): ) for subscription in node_subscriptions: - subscription.remove_user_from_subscription(user) + subscription.remove_user_from_subscription() @signals.node_deleted.connect From ddd1cbb2cf91c3bc97556c747c41bdb8675db7f1 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 10:51:21 -0400 Subject: [PATCH 137/176] update node contributor view to include preprints as resource --- api/draft_registrations/serializers.py | 4 +- api/nodes/serializers.py | 19 ++++--- api/nodes/views.py | 4 +- api/preprints/serializers.py | 4 +- api/registrations/serializers.py | 4 +- .../views/test_node_contributors_list.py | 4 +- .../views/test_preprint_contributors_list.py | 53 ++++++++++--------- osf/models/mixins.py | 5 +- osf/models/registrations.py | 5 +- 9 files changed, 54 insertions(+), 48 deletions(-) diff --git a/api/draft_registrations/serializers.py b/api/draft_registrations/serializers.py index 84d0c48423c..6975393eb02 100644 --- a/api/draft_registrations/serializers.py +++ b/api/draft_registrations/serializers.py @@ -13,7 +13,7 @@ NodeLicenseSerializer, NodeLicenseRelationshipField, NodeContributorsSerializer, - NodeContributorsCreateSerializer, + ResourceContributorsCreateSerializer, NodeContributorDetailSerializer, RegistrationSchemaRelationshipField, ) @@ -233,7 +233,7 @@ def get_absolute_url(self, obj): ) -class DraftRegistrationContributorsCreateSerializer(NodeContributorsCreateSerializer, DraftRegistrationContributorsSerializer): +class DraftRegistrationContributorsCreateSerializer(ResourceContributorsCreateSerializer, DraftRegistrationContributorsSerializer): """ Overrides DraftRegistrationContributorsSerializer to add email, full_name, send_email, and non-required index and users field. diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index 80cc400b73d..c52787f2a6c 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -1206,7 +1206,7 @@ def get_unregistered_contributor(self, obj): return unclaimed_records.get('name', None) -class NodeContributorsCreateSerializer(NodeContributorsSerializer): +class ResourceContributorsCreateSerializer(NodeContributorsSerializer): """ Overrides NodeContributorsSerializer to add email, full_name, send_email, and non-required index and users field. """ @@ -1228,13 +1228,13 @@ class NodeContributorsCreateSerializer(NodeContributorsSerializer): def get_proposed_permissions(self, validated_data): return validated_data.get('permission') or osf_permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS - def validate_data(self, node, user_id=None, full_name=None, email=None, index=None): + def validate_data(self, resource, user_id=None, full_name=None, email=None, index=None): if not user_id and not full_name: raise exceptions.ValidationError(detail='A user ID or full name must be provided to add a contributor.') if user_id and email: raise exceptions.ValidationError(detail='Do not provide an email when providing this user_id.') - if index is not None and index > len(node.contributors): - raise exceptions.ValidationError(detail=f'{index} is not a valid contributor index for node with id {node._id}') + if index is not None and index > len(resource.contributors): + raise exceptions.ValidationError(detail=f'{index} is not a valid contributor index for node with id {resource._id}') def create(self, validated_data): id = validated_data.get('_id') @@ -1242,7 +1242,7 @@ def create(self, validated_data): index = None if '_order' in validated_data: index = validated_data.pop('_order') - node = self.context['resource'] + resource = self.context['resource'] auth = Auth(self.context['request'].user) full_name = validated_data.get('full_name') bibliographic = validated_data.get('bibliographic') @@ -1250,29 +1250,29 @@ def create(self, validated_data): permissions = self.get_proposed_permissions(validated_data) self.validate_data( - node, + resource, user_id=id, full_name=full_name, email=email, index=index, ) - if email_preference not in self.email_preferences: raise exceptions.ValidationError(detail=f'{email_preference} is not a valid email preference.') contributor = OSFUser.load(id) if email or (contributor and contributor.is_registered): + is_published = getattr(resource, 'is_published', False) notification_type = { 'false': False, 'default': NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, 'draft_registration': NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT, - 'preprint': NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT, + 'preprint': NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT if is_published else False, }[email_preference] else: notification_type = False try: - contributor_obj = node.add_contributor_registered_or_not( + contributor_obj = resource.add_contributor_registered_or_not( auth=auth, user_id=id, email=email, @@ -1288,7 +1288,6 @@ def create(self, validated_data): raise exceptions.NotFound(detail=e.args[0]) return contributor_obj - class NodeContributorDetailSerializer(NodeContributorsSerializer): """ Overrides node contributor serializer to add additional methods diff --git a/api/nodes/views.py b/api/nodes/views.py index 50ba08cb7fe..a105634ca82 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -110,7 +110,7 @@ NodeContributorsSerializer, NodeContributorDetailSerializer, NodeInstitutionsRelationshipSerializer, - NodeContributorsCreateSerializer, + ResourceContributorsCreateSerializer, NodeViewOnlyLinkSerializer, NodeViewOnlyLinkUpdateSerializer, NodeSettingsSerializer, @@ -442,7 +442,7 @@ def get_serializer_class(self): if self.request.method == 'PUT' or self.request.method == 'PATCH' or self.request.method == 'DELETE': return NodeContributorDetailSerializer elif self.request.method == 'POST': - return NodeContributorsCreateSerializer + return ResourceContributorsCreateSerializer else: return NodeContributorsSerializer diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index c0e867510a5..6455dfb8328 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -29,7 +29,7 @@ NodeLicenseSerializer, NodeContributorsSerializer, NodeStorageProviderSerializer, - NodeContributorsCreateSerializer, + ResourceContributorsCreateSerializer, NodeContributorDetailSerializer, get_license_details, NodeTagField, @@ -588,7 +588,7 @@ def get_absolute_url(self, obj): ) -class PreprintContributorsCreateSerializer(NodeContributorsCreateSerializer, PreprintContributorsSerializer): +class PreprintContributorsCreateSerializer(ResourceContributorsCreateSerializer, PreprintContributorsSerializer): """ Overrides PreprintContributorsSerializer to add email, full_name, send_email, and non-required index and users field. diff --git a/api/registrations/serializers.py b/api/registrations/serializers.py index 786d76ddccb..10a96a3735b 100644 --- a/api/registrations/serializers.py +++ b/api/registrations/serializers.py @@ -24,7 +24,7 @@ NodeLinksSerializer, NodeLicenseSerializer, NodeContributorDetailSerializer, - NodeContributorsCreateSerializer, + ResourceContributorsCreateSerializer, RegistrationProviderRelationshipField, get_license_details, ) @@ -934,7 +934,7 @@ def update(self, instance, validated_data): ) -class RegistrationContributorsCreateSerializer(NodeContributorsCreateSerializer, RegistrationContributorsSerializer): +class RegistrationContributorsCreateSerializer(ResourceContributorsCreateSerializer, RegistrationContributorsSerializer): """ Overrides RegistrationContributorsSerializer to add email, full_name, send_email, and non-required index and users field. diff --git a/api_tests/nodes/views/test_node_contributors_list.py b/api_tests/nodes/views/test_node_contributors_list.py index 6983307b1fc..9a85bfddad2 100644 --- a/api_tests/nodes/views/test_node_contributors_list.py +++ b/api_tests/nodes/views/test_node_contributors_list.py @@ -4,7 +4,7 @@ import random from api.base.settings.defaults import API_BASE -from api.nodes.serializers import NodeContributorsCreateSerializer +from api.nodes.serializers import ResourceContributorsCreateSerializer from framework.auth.core import Auth from osf.models.notification_type import NotificationType from osf_tests.factories import ( @@ -1153,7 +1153,7 @@ class TestNodeContributorCreateValidation(NodeCRUDTestCase): @pytest.fixture() def create_serializer(self): - return NodeContributorsCreateSerializer + return ResourceContributorsCreateSerializer @pytest.fixture() def validate_data(self, create_serializer): diff --git a/api_tests/preprints/views/test_preprint_contributors_list.py b/api_tests/preprints/views/test_preprint_contributors_list.py index 4dbbea685f9..26716899a61 100644 --- a/api_tests/preprints/views/test_preprint_contributors_list.py +++ b/api_tests/preprints/views/test_preprint_contributors_list.py @@ -5,7 +5,7 @@ from django.utils import timezone from api.base.settings.defaults import API_BASE -from api.nodes.serializers import NodeContributorsCreateSerializer +from api.nodes.serializers import ResourceContributorsCreateSerializer from framework.auth.core import Auth from osf.models import PreprintLog, NotificationType from osf_tests.factories import ( @@ -1294,19 +1294,19 @@ class TestPreprintContributorCreateValidation(NodeCRUDTestCase): @pytest.fixture() def validate_data(self): - return NodeContributorsCreateSerializer.validate_data + return ResourceContributorsCreateSerializer.validate_data def test_add_contributor_validation(self, preprint_published, validate_data): # test_add_contributor_validation_user_id validate_data( - NodeContributorsCreateSerializer(), + ResourceContributorsCreateSerializer(), preprint_published, user_id='abcde') # test_add_contributor_validation_user_id_fullname validate_data( - NodeContributorsCreateSerializer(), + ResourceContributorsCreateSerializer(), preprint_published, user_id='abcde', full_name='Kanye') @@ -1314,7 +1314,7 @@ def test_add_contributor_validation(self, preprint_published, validate_data): # test_add_contributor_validation_user_id_email with pytest.raises(exceptions.ValidationError): validate_data( - NodeContributorsCreateSerializer(), + ResourceContributorsCreateSerializer(), preprint_published, user_id='abcde', email='kanye@west.com') @@ -1322,7 +1322,7 @@ def test_add_contributor_validation(self, preprint_published, validate_data): # test_add_contributor_validation_user_id_fullname_email with pytest.raises(exceptions.ValidationError): validate_data( - NodeContributorsCreateSerializer(), + ResourceContributorsCreateSerializer(), preprint_published, user_id='abcde', full_name='Kanye', @@ -1330,20 +1330,20 @@ def test_add_contributor_validation(self, preprint_published, validate_data): # test_add_contributor_validation_fullname validate_data( - NodeContributorsCreateSerializer(), + ResourceContributorsCreateSerializer(), preprint_published, full_name='Kanye') # test_add_contributor_validation_email with pytest.raises(exceptions.ValidationError): validate_data( - NodeContributorsCreateSerializer(), + ResourceContributorsCreateSerializer(), preprint_published, email='kanye@west.com') # test_add_contributor_validation_fullname_email validate_data( - NodeContributorsCreateSerializer(), + ResourceContributorsCreateSerializer(), preprint_published, full_name='Kanye', email='kanye@west.com') @@ -1421,7 +1421,7 @@ def test_add_contributor_signal_if_preprint( ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT def test_add_unregistered_contributor_sends_email( self, app, user, url_preprint_contribs): @@ -1440,7 +1440,7 @@ def test_add_unregistered_contributor_sends_email( auth=user.auth ) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT assert res.status_code == 201 def test_add_unregistered_contributor_signal_if_preprint(self, app, user, url_preprint_contribs): @@ -1460,7 +1460,7 @@ def test_add_unregistered_contributor_signal_if_preprint(self, app, user, url_pr ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributor_invalid_send_email_param(self, app, user, url_preprint_contribs): url = f'{url_preprint_contribs}?send_email=true' @@ -1541,22 +1541,23 @@ def test_contributor_added_signal_not_specified(self, app, user, url_preprint_co ) assert res.status_code == 201 assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_CONTRIBUTOR_ADDED_PREPRINT + assert notifications[0]['type'] == NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT - def test_contributor_added_not_sent_if_unpublished( - self, app, user, preprint_unpublished): - url = f'/{API_BASE}preprints/{preprint_unpublished._id}/contributors/?send_email=preprint' - payload = { - 'data': { - 'type': 'contributors', - 'attributes': { - 'full_name': 'Kanye West', - 'email': 'kanye@west.com' - } - } - } + def test_contributor_added_not_sent_if_unpublished(self, app, user, preprint_unpublished): with capture_notifications() as notifications: - res = app.post_json_api(url, payload, auth=user.auth) + res = app.post_json_api( + f'/{API_BASE}preprints/{preprint_unpublished._id}/contributors/?send_email=preprint', + { + 'data': { + 'type': 'contributors', + 'attributes': { + 'full_name': 'Jalen Hurt', + 'email': 'one@eagles.com' + } + } + }, + auth=user.auth + ) assert not notifications assert res.status_code == 201 diff --git a/osf/models/mixins.py b/osf/models/mixins.py index d224c61ac7c..0dc9f1c1361 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1428,7 +1428,10 @@ def add_contributor( if isinstance(self, AbstractNode): notification_type = NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT elif isinstance(self, Preprint): - notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT + if self.is_published: + notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT + else: + notification_type = False elif isinstance(self, DraftRegistration): notification_type = NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT diff --git a/osf/models/registrations.py b/osf/models/registrations.py index 1ef8689643f..5663ccae063 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -653,7 +653,10 @@ def retract_registration(self, user, justification=None, save=True, moderator_in f'User {user} does not have moderator privileges on Provider {self.provider}') retraction = self._initiate_retraction( - user, justification, moderator_initiated=moderator_initiated) + user, + justification, + moderator_initiated=moderator_initiated + ) self.retraction = retraction self.registered_from.add_log( action=NodeLog.RETRACTION_INITIATED, From c1c12bd07adbb7498021f916bd4eca1a34688098 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 13:24:36 -0400 Subject: [PATCH 138/176] clean up file notification events --- addons/base/views.py | 34 +++--- notifications.yaml | 65 ++++++----- osf/models/notification_type.py | 16 +-- tests/test_events.py | 154 ++++++++++---------------- website/mails/mails.py | 10 -- website/notifications/emails.py | 104 +---------------- website/notifications/events/base.py | 17 +-- website/notifications/events/files.py | 6 +- website/notifications/events/utils.py | 141 ----------------------- website/notifications/utils.py | 8 +- website/project/views/comment.py | 3 +- 11 files changed, 137 insertions(+), 421 deletions(-) delete mode 100644 website/notifications/events/utils.py diff --git a/addons/base/views.py b/addons/base/views.py index 4547112e44b..4c3d01bacdf 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -611,25 +611,24 @@ def create_waterbutler_log(payload, **kwargs): file_signals.file_updated.send( target=node, user=user, - event_type=action, payload=payload ) - match f'node_{action}': - case NotificationType.Type.NODE_FILE_ADDED: - notification = NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_ADDED) - case NotificationType.Type.NODE_FILE_REMOVED: - notification = NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_REMOVED) - case NotificationType.Type.NODE_FILE_UPDATED: - notification = NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) - case NotificationType.Type.NODE_ADDON_FILE_RENAMED: - notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_RENAMED) - case NotificationType.Type.NODE_ADDON_FILE_COPIED: - notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_COPIED) - case NotificationType.Type.NODE_ADDON_FILE_REMOVED: - notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_REMOVED) - case NotificationType.Type.NODE_ADDON_FILE_MOVED: - notification = NotificationType.objects.get(name=NotificationType.Type.NODE_ADDON_FILE_MOVED) + match action: + case NotificationType.Type.FILE_ADDED: + notification = NotificationType.objects.get(name=NotificationType.Type.FILE_ADDED) + case NotificationType.Type.FILE_REMOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.FILE_REMOVED) + case NotificationType.Type.FILE_UPDATED: + notification = NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) + case NotificationType.Type.ADDON_FILE_RENAMED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_RENAMED) + case NotificationType.Type.ADDON_FILE_COPIED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_COPIED) + case NotificationType.Type.ADDON_FILE_REMOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_REMOVED) + case NotificationType.Type.ADDON_FILE_MOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_MOVED) case _: raise NotImplementedError(f'action {action} not implemented') @@ -647,12 +646,13 @@ def create_waterbutler_log(payload, **kwargs): @file_signals.file_updated.connect -def addon_delete_file_node(self, target, user, event_type, payload): +def addon_delete_file_node(self, target, user, payload): """ Get addon BaseFileNode(s), move it into the TrashedFileNode collection and remove it from StoredFileNode. Required so that the guids of deleted addon files are not re-pointed when an addon file or folder is moved or renamed. """ + event_type = payload['action'] if event_type == 'file_removed' and payload.get('provider', None) != 'osfstorage': provider = payload['provider'] path = payload['metadata']['path'] diff --git a/notifications.yaml b/notifications.yaml index c296c9e41a4..62f636b8546 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -243,7 +243,7 @@ notification_types: template: 'website/templates/emails/reviews_resubmission_confirmation.html.mako' #### NODE - - name: node_file_updated + - name: node_wiki_updated __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' @@ -251,34 +251,6 @@ notification_types: __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' - - name: node_file_added - __docs__: ... - object_content_type_model_name: abstractnode - template: 'website/templates/emails/file_updated.html.mako' - - name: node_file_removed - __docs__: ... - object_content_type_model_name: abstractnode - template: 'website/templates/emails/file_updated.html.mako' - - name: node_addon_file_renamed - __docs__: ... - object_content_type_model_name: abstractnode - template: 'website/templates/emails/file_updated.html.mako' - - name: node_addon_file_copied - __docs__: ... - object_content_type_model_name: abstractnode - template: 'website/templates/emails/file_updated.html.mako' - - name: node_addon_file_moved - __docs__: ... - object_content_type_model_name: abstractnode - template: 'website/templates/emails/file_updated.html.mako' - - name: node_addon_file_removed - __docs__: ... - object_content_type_model_name: abstractnode - template: 'website/templates/emails/file_updated.html.mako' - - name: node_wiki_updated - __docs__: ... - object_content_type_model_name: abstractnode - template: 'website/templates/emails/file_updated.html.mako' - name: node_institutional_access_request __docs__: ... object_content_type_model_name: abstractnode @@ -308,10 +280,12 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/project_affiliation_changed.html.mako' - name: node_request_access_denied + subject: 'Your access request to an OSF project has been declined' __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/access_request_rejected.html.mako' - name: node_access_request_submitted + subject: 'An OSF user has requested access to your ${node.project_or_component}' __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/access_request_submitted.html.mako' @@ -446,3 +420,36 @@ notification_types: __docs__: ... object_content_type_model_name: draftregistration template: 'website/templates/emails/contributor_added_draft_registration.html.mako' +### Files + - name: file_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + - name: file_added + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + - name: file_removed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + - name: addon_file_renamed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + - name: addon_file_copied + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + - name: addon_file_moved + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + - name: addon_file_removed + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' + - name: folder_created + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 6cbb3f1d2df..55fe70883df 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -98,18 +98,20 @@ class Type(str, Enum): NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' NODE_PENDING_EMBARGO_TERMINATION_ADMIN = 'node_pending_embargo_termination_admin' - NODE_FILE_UPDATED = 'node_file_updated' - NODE_FILE_ADDED = 'node_file_added' - NODE_FILE_REMOVED = 'node_file_removed' - NODE_ADDON_FILE_COPIED = 'node_addon_file_copied' - NODE_ADDON_FILE_RENAMED = 'node_addon_file_renamed' - NODE_ADDON_FILE_MOVED = 'node_addon_file_moved' - NODE_ADDON_FILE_REMOVED = 'node_addon_file_removed' NODE_SCHEMA_RESPONSE_REJECTED = 'node_schema_response_rejected' NODE_SCHEMA_RESPONSE_APPROVED = 'node_schema_response_approved' NODE_SCHEMA_RESPONSE_SUBMITTED = 'node_schema_response_submitted' NODE_SCHEMA_RESPONSE_INITIATED = 'node_schema_response_initiated' + FILE_UPDATED = 'file_updated' + FILE_ADDED = 'file_added' + FILE_REMOVED = 'file_removed' + ADDON_FILE_COPIED = 'addon_file_copied' + ADDON_FILE_RENAMED = 'addon_file_renamed' + ADDON_FILE_MOVED = 'addon_file_moved' + ADDON_FILE_REMOVED = 'addon_file_removed' + FOLDER_CREATED = 'folder_created' + # Provider notifications PROVIDER_NEW_PENDING_SUBMISSIONS = 'provider_new_pending_submissions' PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS = 'provider_new_pending_withdraw_requests' diff --git a/tests/test_events.py b/tests/test_events.py index bd79036b384..cef8987f113 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -11,7 +11,6 @@ FileAdded, FileRemoved, FolderCreated, FileUpdated, AddonFileCopied, AddonFileMoved, AddonFileRenamed, ) -from website.notifications.events import utils from addons.base import signals from framework.auth import Auth from osf_tests import factories @@ -58,9 +57,6 @@ def setUp(self): ] } - def test_list_of_files(self): - assert ['e', 'f', 'c', 'd'] == utils.list_of_files(self.tree) - class TestEventExists(OsfTestCase): # Add all possible called events here to ensure that the Event class can @@ -112,21 +108,6 @@ def test_get_file_renamed(self): assert isinstance(event, AddonFileRenamed) -class TestSignalEvent(OsfTestCase): - def setUp(self): - super().setUp() - self.user = factories.UserFactory() - self.auth = Auth(user=self.user) - self.node = factories.ProjectFactory(creator=self.user) - - @mock.patch('website.notifications.events.files.FileAdded.perform') - def test_event_signal(self, mock_perform): - signals.file_updated.send( - user=self.user, target=self.node, event_type='file_added', payload=file_payload - ) - assert mock_perform.called - - class TestFileUpdated(OsfTestCase): def setUp(self): super().setUp() @@ -138,7 +119,7 @@ def setUp(self): self.sub = factories.NotificationSubscriptionFactory( object_id=self.project.id, content_type=ContentType.objects.get_for_model(self.project), - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) ) self.sub.save() self.event = event_registry['file_updated'](self.user_2, self.project, 'file_updated', payload=file_payload) @@ -148,11 +129,11 @@ def test_info_formed_correct(self): assert f'updated file "{materialized.lstrip("/")}".' == self.event.html_message assert f'updated file "{materialized.lstrip("/")}".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_file_updated(self, mock_notify): - self.event.perform() - # notify('exd', 'file_updated', 'user', self.project, timezone.now()) - assert mock_notify.called + def test_file_updated(self): + with capture_notifications() as notifications: + self.event.perform() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.FILE_UPDATED class TestFileAdded(OsfTestCase): @@ -164,7 +145,7 @@ def setUp(self): self.project_subscription = factories.NotificationSubscriptionFactory( object_id=self.project.id, content_type=ContentType.objects.get_for_model(self.project), - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) ) self.project_subscription.save() self.user2 = factories.UserFactory() @@ -175,11 +156,11 @@ def test_info_formed_correct(self): assert f'added file "{materialized.lstrip("/")}".' == self.event.html_message assert f'added file "{materialized.lstrip("/")}".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_file_added(self, mock_notify): - self.event.perform() - # notify('exd', 'file_updated', 'user', self.project, timezone.now()) - assert mock_notify.called + def test_file_added(self): + with capture_notifications() as notification: + self.event.perform() + assert len(notification) == 1 + assert notification[0]['type'] == NotificationType.Type.FILE_ADDED class TestFileRemoved(OsfTestCase): @@ -191,7 +172,7 @@ def setUp(self): self.project_subscription = factories.NotificationSubscriptionFactory( object_id=self.project.id, content_type=ContentType.objects.get_for_model(self.project), - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_REMOVED) + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_REMOVED) ) self.project_subscription.object_id = self.project.id self.project_subscription.content_type = ContentType.objects.get_for_model(self.project) @@ -202,21 +183,21 @@ def setUp(self): ) def test_info_formed_correct_file(self): - assert NotificationType.Type.NODE_FILE_UPDATED == self.event.event_type + assert NotificationType.Type.FILE_UPDATED == self.event.event_type assert f'removed file "{materialized.lstrip("/")}".' == self.event.html_message assert f'removed file "{materialized.lstrip("/")}".' == self.event.text_message def test_info_formed_correct_folder(self): - assert NotificationType.Type.NODE_FILE_UPDATED == self.event.event_type + assert NotificationType.Type.FILE_UPDATED == self.event.event_type self.event.payload['metadata']['materialized'] += '/' assert f'removed folder "{materialized.lstrip("/")}/".' == self.event.html_message assert f'removed folder "{materialized.lstrip("/")}/".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_file_removed(self, mock_notify): - self.event.perform() - # notify('exd', 'file_updated', 'user', self.project, timezone.now()) - assert mock_notify.called + def test_file_removed(self): + with capture_notifications() as notifications: + self.event.perform() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.FILE_REMOVED class TestFolderCreated(OsfTestCase): @@ -227,7 +208,7 @@ def setUp(self): self.project = factories.ProjectFactory() self.project_subscription = factories.NotificationSubscriptionFactory( user=self.user, - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED), + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED), ) self.project_subscription.save() self.user2 = factories.UserFactory() @@ -236,14 +217,15 @@ def setUp(self): ) def test_info_formed_correct(self): - assert NotificationType.Type.NODE_FILE_UPDATED == self.event.event_type + assert NotificationType.Type.FILE_UPDATED == self.event.event_type assert 'created folder "Three/".' == self.event.html_message assert 'created folder "Three/".' == self.event.text_message - @mock.patch('website.notifications.emails.notify') - def test_folder_added(self, mock_notify): - self.event.perform() - assert mock_notify.called + def test_folder_added(self): + with capture_notifications() as notifications: + self.event.perform() + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.FOLDER_CREATED class TestFolderFileRenamed(OsfTestCase): @@ -311,14 +293,14 @@ def setUp(self): self.sub = factories.NotificationSubscriptionFactory( object_id=self.project.id, content_type=ContentType.objects.get_for_model(self.project), - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) ) self.sub.save() # for private node self.private_sub = factories.NotificationSubscriptionFactory( object_id=self.private_node.id, content_type=ContentType.objects.get_for_model(self.private_node), - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) ) self.private_sub.save() # for file subscription @@ -338,51 +320,53 @@ def test_info_formed_correct(self): def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications - # self.sub.email_digest.add(self.user_2) + self.sub.user = self.user_2 self.sub.save() with capture_notifications() as notifications: self.event.perform() - assert not notifications + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.ADDON_FILE_MOVED + assert notifications[0]['kwargs']['user'] == self.user_2 def test_perform_store_called_once(self): - # self.sub.email_transactional.add(self.user_1) + self.sub.user = self.user_1 self.sub.save() with capture_notifications() as notifications: self.event.perform() assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_ADDON_FILE_MOVED + assert notifications[0]['type'] == NotificationType.Type.ADDON_FILE_MOVED def test_perform_store_one_of_each(self): # Move Event: Tests that store_emails is called 3 times, one in # each category - # self.sub.email_transactional.add(self.user_1) + self.sub.user = self.user_1 + self.sub.save() self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.private_node.save() - # self.sub.email_digest.add(self.user_3) + self.sub.user = self.user_3 self.sub.save() self.project.add_contributor(self.user_4, permissions=WRITE, auth=self.auth) self.project.save() - # self.file_sub.email_digest.add(self.user_4) + self.sub.user = self.user_4 + self.sub.save() self.file_sub.save() with capture_notifications() as notifications: self.event.perform() - assert len(notifications) == 3 - assert notifications[0]['type'] == NotificationType.Type.NODE_FILE_UPDATED - assert notifications[1]['type'] == NotificationType.Type.NODE_FILE_UPDATED - assert notifications[2]['type'] == NotificationType.Type.NODE_FILE_UPDATED + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.ADDON_FILE_MOVED def test_remove_user_sent_once(self): # Move Event: Tests removed user is removed once. Regression self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() - # self.file_sub.email_digest.add(self.user_3) + self.file_sub.user = self.user_3 self.file_sub.save() with capture_notifications() as notifications: self.event.perform() assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_ADDON_FILE_MOVED + assert notifications[0]['type'] == NotificationType.Type.ADDON_FILE_MOVED class TestFileCopied(OsfTestCase): @@ -407,14 +391,14 @@ def setUp(self): self.sub = factories.NotificationSubscriptionFactory( object_id=self.project.id, content_type=ContentType.objects.get_for_model(self.project), - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) ) self.sub.save() # for private node self.private_sub = factories.NotificationSubscriptionFactory( object_id=self.private_node.id, content_type=ContentType.objects.get_for_model(self.private_node), - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) + notification_type=NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) ) self.private_sub.save() # for file subscription @@ -436,33 +420,34 @@ def test_info_correct(self): ' Storage in Consolidate.') == self.event.text_message def test_copied_one_of_each(self): - # Copy Event: Tests that store_emails is called 2 times, two with + # Copy Event: Tests that emit is called 2 times, two with # permissions, one without - # self.sub.email_transactional.add(self.user_1) + self.sub.user = self.user_1 + self.sub.save() self.project.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.project.save() self.private_node.add_contributor(self.user_3, permissions=WRITE, auth=self.auth) self.private_node.save() - # self.sub.email_digest.add(self.user_3) + self.sub.user = self.user_3 self.sub.save() self.project.add_contributor(self.user_4, permissions=WRITE, auth=self.auth) self.project.save() - # self.file_sub.email_digest.add(self.user_4) + self.file_sub.user = self.user_4 self.file_sub.save() with capture_notifications() as notifications: self.event.perform() - assert len(notifications) == 2 - assert notifications[0]['type'] == NotificationType.Type.NODE_FILE_UPDATED - assert notifications[1]['type'] == NotificationType.Type.NODE_FILE_UPDATED + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.ADDON_FILE_COPIED def test_user_performing_action_no_email(self): # Move Event: Makes sure user who performed the action is not # included in the notifications - # self.sub.email_digest.add(self.user_2) + self.sub.user = self.user_2 self.sub.save() with capture_notifications() as notifications: self.event.perform() - assert not notifications + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.ADDON_FILE_COPIED class TestSubscriptionManipulations(OsfTestCase): @@ -495,33 +480,6 @@ def setUp(self): self.dup_1_3 = {email_transactional: ['e1234', 'f1234'], 'none': ['h1234', 'g1234'], 'email_digest': ['a1234', 'c1234']} - def test_subscription_user_difference(self): - result = utils.subscriptions_users_difference(self.emails_1, self.emails_3) - assert self.diff_1_3 == result - - def test_subscription_user_union(self): - result = utils.subscriptions_users_union(self.emails_1, self.emails_2) - assert set(self.union_1_2['email_transactional']) == set(result['email_transactional']) - assert set(self.union_1_2['none']) == set(result['none']) - assert set(self.union_1_2['email_digest']) == set(result['email_digest']) - - def test_remove_duplicates(self): - result = utils.subscriptions_users_remove_duplicates( - self.emails_1, self.emails_4, remove_same=False - ) - assert set(self.dup_1_3['email_transactional']) == set(result['email_transactional']) - assert set(self.dup_1_3['none']) == set(result['none']) - assert set(self.dup_1_3['email_digest']) == set(result['email_digest']) - - def test_remove_duplicates_true(self): - result = utils.subscriptions_users_remove_duplicates( - self.emails_1, self.emails_1, remove_same=True - ) - - assert set(result['none']) == {'h1234', 'g1234', 'i1234'} - assert result['email_digest'] == [] - assert result['email_transactional'] == [] - wb_path = '5581cb50a24f710b0f4623f9' materialized = '/One/Paper13.txt' diff --git a/website/mails/mails.py b/website/mails/mails.py index 033f23fc819..db684f7e84f 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -245,16 +245,6 @@ def get_english_article(word): subject='Confirmation of your submission to ${provider_name}' ) -ACCESS_REQUEST_SUBMITTED = Mail( - 'access_request_submitted', - subject='An OSF user has requested access to your ${node.project_or_component}' -) - -ACCESS_REQUEST_DENIED = Mail( - 'access_request_rejected', - subject='Your access request to an OSF project has been declined' -) - CROSSREF_ERROR = Mail( 'crossref_doi_error', subject='There was an error creating a DOI for preprint(s). batch_id: ${batch_id}' diff --git a/website/notifications/emails.py b/website/notifications/emails.py index 7a22ba8954c..aee02dfc0e7 100644 --- a/website/notifications/emails.py +++ b/website/notifications/emails.py @@ -1,113 +1,11 @@ -from django.apps import apps - from babel import dates, core, Locale from django.contrib.contenttypes.models import ContentType -from osf.models import AbstractNode, NotificationSubscription, NotificationType -from osf.models.notifications import NotificationDigest +from osf.models import AbstractNode, NotificationSubscription from osf.utils.permissions import READ -from website import mails from website.notifications import constants -from website.notifications import utils from website.util import web_url_for - -def notify(event, user, node, timestamp, **context): - """Retrieve appropriate ***subscription*** and passe user list -website/notifications/u - :param event: event that triggered the notification - :param user: user who triggered notification - :param node: instance of Node - :param timestamp: time event happened - :param context: optional variables specific to templates - target_user: used with comment_replies - :return: List of user ids notifications were sent to - """ - if event.endswith('_file_updated'): - NotificationType.objects.get( - name=NotificationType.Type.NODE_FILE_ADDED - ).emit( - user=user, - subscribed_object=node, - event_context=context - ) - -def store_emails(recipient_ids, notification_type, event, user, node, timestamp, abstract_provider=None, template=None, **context): - """Store notification emails - - Emails are sent via celery beat as digests - :param recipient_ids: List of user ids to send mail to. - :param notification_type: from constants.Notification_types - :param event: event that triggered notification - :param user: user who triggered the notification - :param node: instance of Node - :param timestamp: time event happened - :param context: - :return: -- - """ - OSFUser = apps.get_model('osf', 'OSFUser') - - if notification_type == 'none': - return - - # If `template` is not specified, default to using a template with name `event` - template = f'{template or event}.html.mako' - - # user whose action triggered email sending - context['user_fullname'] = user.fullname - node_lineage_ids = get_node_lineage(node) if node else [] - - for recipient_id in recipient_ids: - if recipient_id == user._id: - continue - recipient = OSFUser.load(recipient_id) - if recipient.is_disabled: - continue - context['localized_timestamp'] = localize_timestamp(timestamp, recipient) - context['recipient_fullname'] = recipient.fullname - message = mails.render_message(template, **context) - digest = NotificationDigest( - timestamp=timestamp, - send_type=notification_type, - event=event, - user=recipient, - message=message, - node_lineage=node_lineage_ids, - provider=abstract_provider - ) - digest.save() - - -def compile_subscriptions(node, event_type, event=None, level=0): - """Recurse through node and parents for subscriptions. - - :param node: current node - :param event_type: Generally node_subscriptions_available - :param event: Particular event such a file_updated that has specific file subs - :param level: How deep the recursion is - :return: a dict of notification types with lists of users. - """ - subscriptions = check_node(node, event_type) - if event: - subscriptions = check_node(node, event) # Gets particular event subscriptions - parent_subscriptions = compile_subscriptions(node, event_type, level=level + 1) # get node and parent subs - elif getattr(node, 'parent_id', False): - parent_subscriptions = \ - compile_subscriptions(AbstractNode.load(node.parent_id), event_type, level=level + 1) - else: - parent_subscriptions = check_node(None, event_type) - for notification_type in parent_subscriptions: - p_sub_n = parent_subscriptions[notification_type] - p_sub_n.extend(subscriptions[notification_type]) - for nt in subscriptions: - if notification_type != nt: - p_sub_n = list(set(p_sub_n).difference(set(subscriptions[nt]))) - if level == 0: - p_sub_n, removed = utils.separate_users(node, p_sub_n) - parent_subscriptions[notification_type] = p_sub_n - return parent_subscriptions - - def check_node(node, event): """Return subscription for a particular node and event.""" node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES} diff --git a/website/notifications/events/base.py b/website/notifications/events/base.py index 7378c8ced43..9cf225ddffe 100644 --- a/website/notifications/events/base.py +++ b/website/notifications/events/base.py @@ -2,7 +2,7 @@ from django.utils import timezone -from website.notifications import emails +from osf.models import NotificationType event_registry = {} @@ -32,14 +32,15 @@ def __init__(self, user, node, action): def perform(self): """Call emails.notify to notify users of an action""" - emails.notify( - event=self.event_type, + print(self.action) + NotificationType.objects.get( + name=self.action + ).emit( user=self.user, - node=self.node, - timestamp=self.timestamp, - message=self.html_message, - profile_image_url=self.profile_image_url, - url=self.url + event_context={ + 'profile_image_url': self.profile_image_url, + 'action': self.action, + } ) @property diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index aa8aca2f32b..95685b10b01 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -64,7 +64,7 @@ def text_message(self): @property def event_type(self): """Most basic event type.""" - return 'node_file_updated' + return 'file_updated' @property def waterbutler_id(self): @@ -234,7 +234,7 @@ def perform(self): return NotificationType.objects.get( - name=NotificationType.Type.NODE_ADDON_FILE_MOVED, + name=NotificationType.Type.ADDON_FILE_MOVED, ).emit( user=self.user, event_context={ @@ -260,7 +260,7 @@ def perform(self): return NotificationType.objects.get( - name=NotificationType.Type.NODE_ADDON_FILE_MOVED, + name=NotificationType.Type.ADDON_FILE_MOVED, ).emit( user=self.user, event_context={ diff --git a/website/notifications/events/utils.py b/website/notifications/events/utils.py deleted file mode 100644 index 83e4c79bce4..00000000000 --- a/website/notifications/events/utils.py +++ /dev/null @@ -1,141 +0,0 @@ -from itertools import product - -from website.notifications.emails import compile_subscriptions -from website.notifications import utils, constants - - -def get_file_subs_from_folder(addon, user, kind, path, name): - """Find the file tree under a specified folder.""" - folder = dict(kind=kind, path=path, name=name) - file_tree = addon._get_file_tree(filenode=folder, user=user, version='latest-published') - return list_of_files(file_tree) - - -def list_of_files(file_object): - files = [] - if file_object['kind'] == 'file': - return [file_object['path']] - else: - for child in file_object['children']: - files.extend(list_of_files(child)) - return files - - -def compile_user_lists(files, user, source_node, node): - """Take multiple file ids and compiles them. - - :param files: List of WaterButler paths - :param user: User who initiated action/event - :param source_node: Node instance from - :param node: Node instance to - :return: move, warn, and remove dicts - """ - # initialise subscription dictionaries - move = {key: [] for key in constants.NOTIFICATION_TYPES} - warn = {key: [] for key in constants.NOTIFICATION_TYPES} - remove = {key: [] for key in constants.NOTIFICATION_TYPES} - # get the node subscription - if len(files) == 0: - move, warn, remove = categorize_users( - user, 'file_updated', source_node, 'file_updated', node - ) - # iterate through file subscriptions - for file_path in files: - path = file_path.strip('/') - t_move, t_warn, t_remove = categorize_users( - user, path + '_file_updated', source_node, - path + '_file_updated', node - ) - # Add file subs to overall list of subscriptions - for notification in constants.NOTIFICATION_TYPES: - move[notification] = list(set(move[notification]).union(set(t_move[notification]))) - warn[notification] = list(set(warn[notification]).union(set(t_warn[notification]))) - remove[notification] = list(set(remove[notification]).union(set(t_remove[notification]))) - return move, warn, remove - - -def categorize_users(user, source_event, source_node, event, node): - """Categorize users from a file subscription into three categories. - - Puts users in one of three bins: - - Moved: User has permissions on both nodes, subscribed to both - - Warned: User has permissions on both, not subscribed to destination - - Removed: Does not have permission on destination node - :param user: User instance who started the event - :param source_event: _event_name - :param source_node: node from where the event happened - :param event: new guid event name - :param node: node where event ends up - :return: Moved, to be warned, and removed users. - """ - remove = utils.users_to_remove(source_event, source_node, node) - source_node_subs = compile_subscriptions(source_node, utils.find_subscription_type(source_event)) - new_subs = compile_subscriptions(node, utils.find_subscription_type(source_event), event) - - # Moves users into the warn bucket or the move bucket - move = subscriptions_users_union(source_node_subs, new_subs) - warn = subscriptions_users_difference(source_node_subs, new_subs) - - # Removes users without permissions - warn, remove = subscriptions_node_permissions(node, warn, remove) - - # Remove duplicates - warn = subscriptions_users_remove_duplicates(warn, new_subs, remove_same=False) - move = subscriptions_users_remove_duplicates(move, new_subs, remove_same=False) - - # Remove duplicates between move and warn; and move and remove - move = subscriptions_users_remove_duplicates(move, warn, remove_same=True) - move = subscriptions_users_remove_duplicates(move, remove, remove_same=True) - - for notifications in constants.NOTIFICATION_TYPES: - # Remove the user who started this whole thing. - user_id = user._id - if user_id in warn[notifications]: - warn[notifications].remove(user_id) - if user_id in move[notifications]: - move[notifications].remove(user_id) - if user_id in remove[notifications]: - remove[notifications].remove(user_id) - - return move, warn, remove - - -def subscriptions_node_permissions(node, warn_subscription, remove_subscription): - for notification in constants.NOTIFICATION_TYPES: - subbed, removed = utils.separate_users(node, warn_subscription[notification]) - warn_subscription[notification] = subbed - remove_subscription[notification].extend(removed) - remove_subscription[notification] = list(set(remove_subscription[notification])) - return warn_subscription, remove_subscription - - -def subscriptions_users_union(emails_1, emails_2): - return { - notification: - list( - set(emails_1[notification]).union(set(emails_2[notification])) - ) - for notification in constants.NOTIFICATION_TYPES.keys() - } - - -def subscriptions_users_difference(emails_1, emails_2): - return { - notification: - list( - set(emails_1[notification]).difference(set(emails_2[notification])) - ) - for notification in constants.NOTIFICATION_TYPES.keys() - } - - -def subscriptions_users_remove_duplicates(emails_1, emails_2, remove_same=False): - emails_list = dict(emails_1) - product_list = product(constants.NOTIFICATION_TYPES, repeat=2) - for notification_1, notification_2 in product_list: - if notification_2 == notification_1 and not remove_same or notification_2 == 'none': - continue - emails_list[notification_1] = list( - set(emails_list[notification_1]).difference(set(emails_2[notification_2])) - ) - return emails_list diff --git a/website/notifications/utils.py b/website/notifications/utils.py index 7ccfcf88ede..fc565610777 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -41,7 +41,7 @@ def find_subscription_type(subscription): """ subs_available = constants.USER_SUBSCRIPTIONS_AVAILABLE subs_available.extend(list({ - 'node_file_updated': 'Files updated' + 'file_updated': 'Files updated' }.keys())) for available in subs_available: if available in subscription: @@ -262,7 +262,7 @@ def format_data(user, nodes): if can_read: subscriptions = NotificationSubscription.objects.filter( user=user, - notification_type__name='node_file_updated', + notification_type__name='file_updated', user__isnull=True, object_id=node.id, content_type=ContentType.objects.get_for_model(node) @@ -331,7 +331,7 @@ def format_file_subscription(user, node_id, path, provider): return serialize_event(user, node=node, event_description='file_updated') -all_subs = ['node_file_updated'] +all_subs = ['file_updated'] all_subs += constants.USER_SUBSCRIPTIONS_AVAILABLE def serialize_event(user, subscription=None, node=None, event_description=None): @@ -429,7 +429,7 @@ def subscribe_user_to_notifications(node, user): ) NotificationSubscription.objects.get_or_create( user=user, - notification_type__name=NotificationType.Type.NODE_FILE_UPDATED, + notification_type__name=NotificationType.Type.FILE_UPDATED, object_id=node.id, content_type=ContentType.objects.get_for_model(node) ) diff --git a/website/project/views/comment.py b/website/project/views/comment.py index 5e274052f18..968f8cb7c2e 100644 --- a/website/project/views/comment.py +++ b/website/project/views/comment.py @@ -14,7 +14,8 @@ @file_updated.connect -def update_file_guid_referent(self, target, event_type, payload, user=None): +def update_file_guid_referent(self, target, payload, user=None): + event_type = payload['action'] if event_type not in ('addon_file_moved', 'addon_file_renamed'): return # Nothing to do From 7c792223fec4a4f3f41c2a642716cf34d93b250f Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 14:14:45 -0400 Subject: [PATCH 139/176] fix contributor email notifications with new throttle --- ...ontributor_added_email_records_and_more.py | 25 +++++++++ osf/models/user.py | 14 ----- osf_tests/test_merging_users.py | 1 - tests/test_adding_contributor_views.py | 37 ++++++------- website/notifications/events/base.py | 1 - website/project/views/contributor.py | 53 ++++--------------- 6 files changed, 51 insertions(+), 80 deletions(-) create mode 100644 osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py diff --git a/osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py b/osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py new file mode 100644 index 00000000000..48fd5f258da --- /dev/null +++ b/osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.13 on 2025-07-29 17:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0034_remove_abstractnode_child_node_subscriptions'), + ] + + operations = [ + migrations.RemoveField( + model_name='osfuser', + name='contributor_added_email_records', + ), + migrations.RemoveField( + model_name='osfuser', + name='group_connected_email_records', + ), + migrations.RemoveField( + model_name='osfuser', + name='member_added_email_records', + ), + ] diff --git a/osf/models/user.py b/osf/models/user.py index fc3526d71f1..8dfaff58a44 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -226,20 +226,6 @@ class OSFUser(DirtyFieldsMixin, GuidMixin, BaseModel, AbstractBaseUser, Permissi # ... # } - # Time of last sent notification email to newly added contributors - # Format : { - # : { - # 'last_sent': time.time() - # } - # ... - # } - contributor_added_email_records = DateTimeAwareJSONField(default=dict, blank=True) - - # Tracks last email sent where user was added to an OSF Group - member_added_email_records = DateTimeAwareJSONField(default=dict, blank=True) - # Tracks last email sent where an OSF Group was connected to a node - group_connected_email_records = DateTimeAwareJSONField(default=dict, blank=True) - # The user into which this account was merged merged_by = models.ForeignKey('self', null=True, blank=True, related_name='merger', on_delete=models.CASCADE) diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index e51e922ec62..9317260fb1b 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -138,7 +138,6 @@ def is_mrm_field(value): 'username', 'verification_key', 'verification_key_v2', - 'contributor_added_email_records', 'requested_deactivation', ] diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 62e84e916fc..bb59a2eeef5 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -3,7 +3,6 @@ import pytest from django.core.exceptions import ValidationError -from pytest import approx from rest_framework import status as http_status from framework import auth @@ -28,6 +27,7 @@ ) from tests.utils import capture_notifications from website.profile.utils import add_contributor_json, serialize_unregistered +from website import settings from website.project.views.contributor import ( deserialize_contributors, notify_added_contributor, @@ -189,10 +189,8 @@ def test_add_contributors_post_only_sends_one_email_to_unreg_user(self, mock_sen assert self.project.can_edit(user=self.creator) with capture_notifications() as noitification: self.app.post(url, json=payload, auth=self.creator.auth) - assert len(noitification) == 3 + assert len(noitification) == 1 assert noitification[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - assert noitification[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - assert noitification[2]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributors_post_only_sends_one_email_to_registered_user(self): # Project has components @@ -218,10 +216,8 @@ def test_add_contributors_post_only_sends_one_email_to_registered_user(self): assert self.project.can_edit(user=self.creator) with capture_notifications() as notifications: self.app.post(url, json=payload, auth=self.creator.auth) - assert len(notifications) == 3 + assert len(notifications) == 1 assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - assert notifications[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - assert notifications[2]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self): @@ -250,9 +246,8 @@ def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_nod self.app.post(url, json=payload, auth=self.creator.auth) # send_mail is called for both the project and the sub-component - assert len(notifications) == 2 + assert len(notifications) == 1 assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - assert notifications[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT @mock.patch('website.project.views.contributor.send_claim_email') @@ -288,8 +283,6 @@ def test_email_sent_when_reg_user_is_added(self): project.save() assert len(notifications) == 1 assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - contributor.refresh_from_db() - assert contributor.contributor_added_email_records[project._id]['last_sent'] == approx(int(time.time()), rel=1) def test_contributor_added_email_sent_to_unreg_user(self): unreg_user = UnregUserFactory() @@ -345,17 +338,17 @@ def test_notify_contributor_email_sends_after_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) - with capture_notifications() as notifications: - notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - - time.sleep(1) # throttle period expires - with capture_notifications() as notifications: - notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) - assert len(notifications) == 2 - assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - assert notifications[1]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + with mock.patch.object(settings, 'CONTRIBUTOR_ADDED_EMAIL_THROTTLE', 1): + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + + time.sleep(settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE) # throttle period expires + with capture_notifications() as notifications: + notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributor_to_fork_sends_email(self): contributor = UserFactory() diff --git a/website/notifications/events/base.py b/website/notifications/events/base.py index 9cf225ddffe..00e93b46ed1 100644 --- a/website/notifications/events/base.py +++ b/website/notifications/events/base.py @@ -32,7 +32,6 @@ def __init__(self, user, node, action): def perform(self): """Call emails.notify to notify users of an action""" - print(self.action) NotificationType.objects.get( name=self.action ).emit( diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 715044063e8..d0a217d1cc1 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -438,9 +438,6 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 'osf_contact_email': settings.OSF_CONTACT_EMAIL, } ) - referrer.contributor_added_email_records = {node._id: {'last_sent': get_timestamp()}} - referrer.save() - # Send mail to claimer, telling them to wait for referrer NotificationType.objects.get( name=NotificationType.Type.USER_PENDING_VERIFICATION_REGISTERED @@ -457,19 +454,6 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 } ) -def check_email_throttle_claim_email(node, contributor): - contributor_record = contributor.contributor_added_email_records.get(node._id, {}) - if contributor_record: - timestamp = contributor_record.get('last_sent', None) - if timestamp: - if not throttle_period_expired( - timestamp, - settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE - ): - return True - else: - contributor.contributor_added_email_records[node._id] = {} - def send_claim_email( email, unclaimed_user, @@ -568,7 +552,7 @@ def send_claim_email( ) -def check_email_throttle(node, contributor, throttle=None): +def check_email_throttle(node, contributor, notification_type): """ Check whether a 'contributor added' notification was sent recently (within the throttle period) for the given node and contributor. @@ -576,36 +560,22 @@ def check_email_throttle(node, contributor, throttle=None): Args: node (AbstractNode): The node to check. contributor (OSFUser): The contributor being notified. - throttle (int, optional): Throttle period in seconds (defaults to CONTRIBUTOR_ADDED_EMAIL_THROTTLE setting). + notification_type (str, optional): What type of notification to check for. Returns: bool: True if throttled (email was sent recently), False otherwise. """ - from osf.models import Notification, NotificationType, NotificationSubscription + from osf.models import Notification, NotificationSubscription from website import settings - throttle = throttle or settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE - - try: - notification_type = NotificationType.objects.get( - name=NotificationType.Type.NODE_COMMENT.value - ) - except NotificationType.DoesNotExist: - return False # Fail-safe: if the notification type isn't set up, don't throttle - from django.contrib.contenttypes.models import ContentType from datetime import timedelta - # Check for an active subscription for this contributor and this node - subscription = NotificationSubscription.objects.filter( + subscription, create = NotificationSubscription.objects.get_or_create( user=contributor, - notification_type=notification_type, - content_type=ContentType.objects.get_for_model(node), - object_id=node.id - ).first() - - if not subscription: + notification_type__name=notification_type, + ) + if create: return False # No subscription means no previous notifications, so no throttling - # Check the most recent Notification for this subscription last_notification = Notification.objects.filter( subscription=subscription, @@ -613,7 +583,7 @@ def check_email_throttle(node, contributor, throttle=None): ).order_by('-sent').first() if last_notification and last_notification.sent: - cutoff_time = timezone.now() - timedelta(seconds=throttle) + cutoff_time = timezone.now() - timedelta(seconds=settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE) return last_notification.sent > cutoff_time return False # No previous sent notification, not throttled @@ -633,16 +603,12 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a auth (Auth, optional): Authorization context. notification_type (str, optional): Template identifier. """ - if check_email_throttle_claim_email(node, contributor): - return if not notification_type: return - # Default values notification_type = notification_type or NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT logo = settings.OSF_LOGO - # Use match for notification type/logic if notification_type == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT: pass elif notification_type == NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT: @@ -659,6 +625,9 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a else: raise NotImplementedError(f'notification_type: {notification_type} not implemented.') + if check_email_throttle(node, contributor, notification_type): + return + NotificationType.objects.get(name=notification_type).emit( user=contributor, event_context={ From e04d901fd60e01c33348291bfc528e0eafd5f9e7 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 14:50:32 -0400 Subject: [PATCH 140/176] remove old notification routes and routes --- addons/base/views.py | 3 +- .../0036_delete_notificationdigest.py | 16 + osf/models/__init__.py | 2 +- osf/models/mixins.py | 2 +- osf/models/notification_subscription.py | 5 - osf/models/notifications.py | 71 +--- osf/models/validators.py | 4 +- osf_tests/factories.py | 15 +- osf_tests/test_comment.py | 38 +- tests/test_events.py | 3 - website/notifications/constants.py | 26 -- website/notifications/emails.py | 98 ----- website/notifications/events/base.py | 4 - website/notifications/events/files.py | 3 +- website/notifications/exceptions.py | 7 - website/notifications/listeners.py | 30 +- website/notifications/utils.py | 398 +----------------- website/notifications/views.py | 106 ----- website/routes.py | 26 -- 19 files changed, 69 insertions(+), 788 deletions(-) create mode 100644 osf/migrations/0036_delete_notificationdigest.py delete mode 100644 website/notifications/emails.py delete mode 100644 website/notifications/exceptions.py delete mode 100644 website/notifications/views.py diff --git a/addons/base/views.py b/addons/base/views.py index 4c3d01bacdf..f91ae0ce2ce 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -56,7 +56,6 @@ from osf.metrics import PreprintView, PreprintDownload from osf.utils import permissions from osf.external.gravy_valet import request_helpers -from website.notifications.emails import localize_timestamp from website.profile.utils import get_profile_image_url from website.project import decorators from website.project.decorators import must_be_contributor_or_public, must_be_valid_project, check_contributor_auth @@ -636,7 +635,7 @@ def create_waterbutler_log(payload, **kwargs): user=user, event_context={ 'profile_image_url': user.profile_image_url(), - 'localized_timestamp': localize_timestamp(timezone.now(), user), + 'localized_timestamp': timezone.now(), 'user_fullname': user.fullname, 'url': node.absolute_url, } diff --git a/osf/migrations/0036_delete_notificationdigest.py b/osf/migrations/0036_delete_notificationdigest.py new file mode 100644 index 00000000000..8ab718d12d6 --- /dev/null +++ b/osf/migrations/0036_delete_notificationdigest.py @@ -0,0 +1,16 @@ +# Generated by Django 4.2.13 on 2025-07-29 18:25 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('osf', '0035_remove_osfuser_contributor_added_email_records_and_more'), + ] + + operations = [ + migrations.DeleteModel( + name='NotificationDigest', + ), + ] diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 669059d9c4c..7e02185c4ff 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -62,7 +62,7 @@ from .node_relation import NodeRelation from .nodelog import NodeLog from .notable_domain import NotableDomain, DomainReference -from .notifications import NotificationDigest, NotificationSubscriptionLegacy +from .notifications import NotificationSubscriptionLegacy from .notification_subscription import NotificationSubscription from .notification_type import NotificationType from .notification import Notification diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 0dc9f1c1361..0bcf35330b3 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1110,7 +1110,7 @@ def remove_user_from_subscription(self, user, subscription): user=user ) if subscriptions: - subscriptions.get().remove_user_from_subscription() + subscriptions.get().delete() class TaxonomizableMixin(models.Model): diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index 7dc79047a13..41b88ba9ea2 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -99,8 +99,3 @@ def _id(self): return f'{self.user._id}_global' case _: raise NotImplementedError() - - def remove_user_from_subscription(self): - """ - """ - self.delete() diff --git a/osf/models/notifications.py b/osf/models/notifications.py index 80703f1620f..be89d26248f 100644 --- a/osf/models/notifications.py +++ b/osf/models/notifications.py @@ -1,13 +1,6 @@ -from django.contrib.postgres.fields import ArrayField from django.db import models -from website.notifications.constants import NOTIFICATION_TYPES -from .node import Node -from .user import OSFUser -from .base import BaseModel, ObjectIDMixin -from .validators import validate_subscription_type -from osf.utils.fields import NonNaiveDateTimeField -from website.util import api_v2_url +from .base import BaseModel class NotificationSubscriptionLegacy(BaseModel): @@ -31,65 +24,3 @@ class Meta: # Both PreprintProvider and RegistrationProvider default instances use "osf" as their `_id` unique_together = ('_id', 'provider') db_table = 'osf_notificationsubscription_legacy' - - @classmethod - def load(cls, q): - # modm doesn't throw exceptions when loading things that don't exist - try: - return cls.objects.get(_id=q) - except cls.DoesNotExist: - return None - - @property - def owner(self): - # ~100k have owner==user - if self.user is not None: - return self.user - # ~8k have owner=Node - elif self.node is not None: - return self.node - - @owner.setter - def owner(self, value): - if isinstance(value, OSFUser): - self.user = value - elif isinstance(value, Node): - self.node = value - - @property - def absolute_api_v2_url(self): - path = f'/subscriptions/{self._id}/' - return api_v2_url(path) - - def add_user_to_subscription(self, user, notification_type, save=True): - for nt in NOTIFICATION_TYPES: - if getattr(self, nt).filter(id=user.id).exists(): - if nt != notification_type: - getattr(self, nt).remove(user) - else: - if nt == notification_type: - getattr(self, nt).add(user) - - if save: - # Do not clean legacy objects - self.save(clean=False) - - def remove_user_from_subscription(self, user, save=True): - for notification_type in NOTIFICATION_TYPES: - try: - getattr(self, notification_type, []).remove(user) - except ValueError: - pass - - if save: - self.save() - -class NotificationDigest(ObjectIDMixin, BaseModel): - user = models.ForeignKey('OSFUser', null=True, blank=True, on_delete=models.CASCADE) - provider = models.ForeignKey('AbstractProvider', null=True, blank=True, on_delete=models.CASCADE) - timestamp = NonNaiveDateTimeField() - send_type = models.CharField(max_length=50, db_index=True, validators=[validate_subscription_type, ]) - event = models.CharField(max_length=50) - message = models.TextField() - # TODO: Could this be a m2m with or without an order field? - node_lineage = ArrayField(models.CharField(max_length=31)) diff --git a/osf/models/validators.py b/osf/models/validators.py index 87f00f826a6..29ee184b66e 100644 --- a/osf/models/validators.py +++ b/osf/models/validators.py @@ -8,8 +8,6 @@ from django.utils.deconstruct import deconstructible from rest_framework import exceptions -from website.notifications.constants import NOTIFICATION_TYPES - from osf.utils.registrations import FILE_VIEW_URL_REGEX from osf.utils.sanitize import strip_html from osf.exceptions import ValidationError, ValidationValueError, reraise_django_validation_errors, BlockedEmailError @@ -54,7 +52,7 @@ def string_required(value): def validate_subscription_type(value): - if value not in NOTIFICATION_TYPES: + if value not in ['email_transactional', 'email_digest', 'none']: raise ValidationValueError diff --git a/osf_tests/factories.py b/osf_tests/factories.py index d1c7e640250..cced02e978d 100644 --- a/osf_tests/factories.py +++ b/osf_tests/factories.py @@ -6,7 +6,7 @@ from unittest import mock from factory import SubFactory -from factory.fuzzy import FuzzyDateTime, FuzzyAttribute, FuzzyChoice +from factory.fuzzy import FuzzyDateTime, FuzzyChoice from unittest.mock import patch, Mock import pytz @@ -20,7 +20,6 @@ from django.db.utils import IntegrityError from faker import Factory, Faker from waffle.models import Flag, Sample, Switch -from website.notifications.constants import NOTIFICATION_TYPES from osf.utils import permissions from website.archiver import ARCHIVER_SUCCESS from website.settings import FAKE_EMAIL_NAME, FAKE_EMAIL_DOMAIN @@ -1064,18 +1063,6 @@ def make_node_lineage(): return [node1._id, node2._id, node3._id, node4._id] - -class NotificationDigestFactory(DjangoModelFactory): - timestamp = FuzzyDateTime(datetime.datetime(1970, 1, 1, tzinfo=pytz.UTC)) - node_lineage = FuzzyAttribute(fuzzer=make_node_lineage) - user = factory.SubFactory(UserFactory) - send_type = FuzzyChoice(choices=NOTIFICATION_TYPES.keys()) - message = fake.text(max_nb_chars=2048) - event = fake.text(max_nb_chars=50) - class Meta: - model = models.NotificationDigest - - class ConferenceFactory(DjangoModelFactory): class Meta: model = models.Conference diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py index bb11d34591c..62a295367fd 100644 --- a/osf_tests/test_comment.py +++ b/osf_tests/test_comment.py @@ -500,7 +500,7 @@ def test_comments_move_on_file_rename(self, project, user): } self._create_file_with_comment(node=source['node'], path=source['path'], user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_renamed', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) assert self.guid._id == file_node.get_guid()._id @@ -521,7 +521,7 @@ def test_comments_move_on_folder_rename(self, project, user): file_name = 'file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_renamed', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) @@ -543,7 +543,7 @@ def test_comments_move_on_subfolder_file_when_parent_folder_is_renamed(self, pro file_path = 'sub-subfolder/file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_path), user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_renamed', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_path), file_id=self.file._id)) @@ -564,7 +564,7 @@ def test_comments_move_when_file_moved_to_subfolder(self, project, user): } self._create_file_with_comment(node=source['node'], path=source['path'], user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) @@ -585,7 +585,7 @@ def test_comments_move_when_file_moved_from_subfolder_to_root(self, project, use } self._create_file_with_comment(node=source['node'], path=source['path'], user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) @@ -606,7 +606,7 @@ def test_comments_move_when_file_moved_from_project_to_component(self, project, } self._create_file_with_comment(node=source['node'], path=source['path'], user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) @@ -628,7 +628,7 @@ def test_comments_move_when_file_moved_from_component_to_project(self, project, } self._create_file_with_comment(node=source['node'], path=source['path'], user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) @@ -651,7 +651,7 @@ def test_comments_move_when_folder_moved_to_subfolder(self, user, project): file_name = 'file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) @@ -673,7 +673,7 @@ def test_comments_move_when_folder_moved_from_subfolder_to_root(self, project, u file_name = 'file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) @@ -695,7 +695,7 @@ def test_comments_move_when_folder_moved_from_project_to_component(self, project file_name = 'file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) @@ -717,7 +717,7 @@ def test_comments_move_when_folder_moved_from_component_to_project(self, project file_name = 'file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) @@ -751,7 +751,7 @@ def test_comments_move_when_file_moved_to_osfstorage(self, project, user): } self._create_file_with_comment(node=source['node'], path=source['path'], user=user) payload = self._create_payload('move', user, source, destination, self.file._id, destination_file_id=destination['path'].strip('/')) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class('osfstorage', BaseFileNode.FILE).get_or_create(destination['node'], destination['path']) @@ -792,7 +792,7 @@ def test_comments_move_when_folder_moved_to_osfstorage(self, project, user): file_name = 'file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) payload = self._create_payload('move', user, source, destination, self.file._id, destination_file_id=osf_file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class('osfstorage', BaseFileNode.FILE).get_or_create(destination['node'], osf_file._id) @@ -827,7 +827,7 @@ def test_comments_move_when_file_moved_to_different_provider(self, destination_p } self._create_file_with_comment(node=source['node'], path=source['path'], user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(destination_provider, BaseFileNode.FILE).get_or_create(destination['node'], destination['path']) @@ -868,7 +868,7 @@ def test_comments_move_when_folder_moved_to_different_provider(self, destination file_name = 'file.txt' self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(destination_provider, BaseFileNode.FILE).get_or_create(destination['node'], destination_path) @@ -919,7 +919,7 @@ def test_comments_move_when_file_moved_from_project_to_component(self, project, self._create_file_with_comment(node=source['node'], path=source['path'], user=user) self.file.move_under(destination['node'].get_addon(self.provider).get_root()) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) @@ -942,7 +942,7 @@ def test_comments_move_when_file_moved_from_component_to_project(self, project, self._create_file_with_comment(node=source['node'], path=source['path'], user=user) self.file.move_under(destination['node'].get_addon(self.provider).get_root()) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) @@ -966,7 +966,7 @@ def test_comments_move_when_folder_moved_from_project_to_component(self, project self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) self.file.move_under(destination['node'].get_addon(self.provider).get_root()) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) @@ -989,7 +989,7 @@ def test_comments_move_when_folder_moved_from_component_to_project(self, project self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) self.file.move_under(destination['node'].get_addon(self.provider).get_root()) payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], event_type='addon_file_moved', payload=payload) + update_file_guid_referent(self=None, target=destination['node'], payload=payload) self.guid.reload() file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) diff --git a/tests/test_events.py b/tests/test_events.py index cef8987f113..e2f81f81a21 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -1,7 +1,5 @@ from collections import OrderedDict -from unittest import mock - from django.contrib.contenttypes.models import ContentType from osf.models import NotificationType @@ -11,7 +9,6 @@ FileAdded, FileRemoved, FolderCreated, FileUpdated, AddonFileCopied, AddonFileMoved, AddonFileRenamed, ) -from addons.base import signals from framework.auth import Auth from osf_tests import factories from osf.utils.permissions import WRITE diff --git a/website/notifications/constants.py b/website/notifications/constants.py index 3b0b81d6823..6e05855582b 100644 --- a/website/notifications/constants.py +++ b/website/notifications/constants.py @@ -1,32 +1,6 @@ -USER_SUBSCRIPTIONS_AVAILABLE = [ - 'user_file_updated', - 'user_reviews' -] - -PROVIDER_SUBSCRIPTIONS_AVAILABLE = { - 'provider_new_pending_submissions': 'New preprint submissions for moderators to review.' -} - # Note: the python value None mean inherit from parent NOTIFICATION_TYPES = { 'email_transactional': 'Email when a change occurs', 'email_digest': 'Daily email digest of all changes to this project', 'none': 'None' } - -# Formatted file provider names for notification emails -PROVIDERS = { - 'osfstorage': 'OSF Storage', - 'boa': 'Boa', - 'box': 'Box', - 'dataverse': 'Dataverse', - 'dropbox': 'Dropbox', - 'figshare': 'figshare', - 'github': 'GitHub', - 'gitlab': 'GitLab', - 'bitbucket': 'Bitbucket', - 'googledrive': 'Google Drive', - 'owncloud': 'ownCloud', - 'onedrive': 'Microsoft OneDrive', - 's3': 'Amazon S3' -} diff --git a/website/notifications/emails.py b/website/notifications/emails.py deleted file mode 100644 index aee02dfc0e7..00000000000 --- a/website/notifications/emails.py +++ /dev/null @@ -1,98 +0,0 @@ -from babel import dates, core, Locale -from django.contrib.contenttypes.models import ContentType - -from osf.models import AbstractNode, NotificationSubscription -from osf.utils.permissions import READ -from website.notifications import constants -from website.util import web_url_for - -def check_node(node, event): - """Return subscription for a particular node and event.""" - node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES} - if node: - subscription = NotificationSubscription.objects.filter( - object_id=node.id, - content_type=ContentType.objects.get_for_model(node), - notification_type__name=event - ) - for notification_type in node_subscriptions: - users = getattr(subscription, notification_type, []) - if users: - for user in users.exclude(date_disabled__isnull=False): - if node.has_permission(user, READ): - node_subscriptions[notification_type].append(user._id) - return node_subscriptions - - -def get_user_subscriptions(user, event): - if user.is_disabled: - return {} - user_subscription, _ = NotificationSubscription.objects.get_or_create( - user=user, - notification_type__name=event - ) - return user_subscription - - -def get_node_lineage(node): - """ Get a list of node ids in order from the node to top most project - e.g. [parent._id, node._id] - """ - from osf.models import Preprint - lineage = [node._id] - if isinstance(node, Preprint): - return lineage - - while node.parent_id: - node = node.parent_node - lineage = [node._id] + lineage - - return lineage - - -def get_settings_url(uid, user): - if uid == user._id: - return web_url_for('user_notifications', _absolute=True) - - node = AbstractNode.load(uid) - assert node, 'get_settings_url recieved an invalid Node id' - return node.web_url_for('node_setting', _guid=True, _absolute=True) - -def fix_locale(locale): - """Atempt to fix a locale to have the correct casing, e.g. de_de -> de_DE - - This is NOT guaranteed to return a valid locale identifier. - """ - try: - language, territory = locale.split('_', 1) - except ValueError: - return locale - else: - return '_'.join([language, territory.upper()]) - -def localize_timestamp(timestamp, user): - try: - user_timezone = dates.get_timezone(user.timezone) - except LookupError: - user_timezone = dates.get_timezone('Etc/UTC') - - try: - user_locale = Locale(user.locale) - except core.UnknownLocaleError: - user_locale = Locale('en') - - # Do our best to find a valid locale - try: - user_locale.date_formats - except OSError: # An IOError will be raised if locale's casing is incorrect, e.g. de_de vs. de_DE - # Attempt to fix the locale, e.g. de_de -> de_DE - try: - user_locale = Locale(fix_locale(user.locale)) - user_locale.date_formats - except (core.UnknownLocaleError, OSError): - user_locale = Locale('en') - - formatted_date = dates.format_date(timestamp, format='full', locale=user_locale) - formatted_time = dates.format_time(timestamp, format='short', tzinfo=user_timezone, locale=user_locale) - - return f'{formatted_time} on {formatted_date}' diff --git a/website/notifications/events/base.py b/website/notifications/events/base.py index 00e93b46ed1..2d36e74ba15 100644 --- a/website/notifications/events/base.py +++ b/website/notifications/events/base.py @@ -64,7 +64,3 @@ def event_type(self): Examples: _file_updated""" raise NotImplementedError - - -class RegistryError(TypeError): - pass diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index 95685b10b01..d88cf3441e4 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -13,7 +13,6 @@ register, Event, event_registry, - RegistryError, ) from osf.models import AbstractNode, NodeLog, Preprint, NotificationType from addons.base.signals import file_updated as signal @@ -24,7 +23,7 @@ def file_updated(self, target=None, user=None, event_type=None, payload=None): if isinstance(target, Preprint): return if event_type not in event_registry: - raise RegistryError + raise NotImplementedError(f' {event_type} not in {event_registry}') event = event_registry[event_type](user, target, event_type, payload=payload) event.perform() diff --git a/website/notifications/exceptions.py b/website/notifications/exceptions.py deleted file mode 100644 index 573a58164d3..00000000000 --- a/website/notifications/exceptions.py +++ /dev/null @@ -1,7 +0,0 @@ -from osf.exceptions import OSFError - -class InvalidSubscriptionError(OSFError): - """Raised if an invalid subscription is attempted. e.g. attempt to - subscribe to an invalid target: institution, bookmark, deleted project etc. - """ - pass diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index ca9fdcd6807..2ed837308bb 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,7 +1,9 @@ import logging from django.apps import apps +from django.contrib.contenttypes.models import ContentType +from osf.models import NotificationSubscription, NotificationType from website.project.signals import contributor_added, project_created from framework.auth.signals import user_confirmed @@ -11,18 +13,36 @@ def subscribe_creator(resource): if resource.is_collection or resource.is_deleted: return None - from website.notifications.utils import subscribe_user_to_notifications - subscribe_user_to_notifications(resource, resource.creator) + user = resource.creator + if user.is_registered: + NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=NotificationType.Type.USER_FILE_UPDATED, + ) + NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=NotificationType.Type.FILE_UPDATED, + object_id=resource.id, + content_type=ContentType.objects.get_for_model(resource) + ) @contributor_added.connect def subscribe_contributor(resource, contributor, auth=None, *args, **kwargs): - from website.notifications.utils import subscribe_user_to_notifications from osf.models import Node - if isinstance(resource, Node): if resource.is_collection or resource.is_deleted: return None - subscribe_user_to_notifications(resource, contributor) + if contributor.is_registered: + NotificationSubscription.objects.get_or_create( + user=contributor, + notification_type__name=NotificationType.Type.USER_FILE_UPDATED, + ) + NotificationSubscription.objects.get_or_create( + user=contributor, + notification_type__name=NotificationType.Type.FILE_UPDATED, + object_id=resource.id, + content_type=ContentType.objects.get_for_model(resource) + ) @user_confirmed.connect def subscribe_confirmed_user(user): diff --git a/website/notifications/utils.py b/website/notifications/utils.py index fc565610777..1cb8f485866 100644 --- a/website/notifications/utils.py +++ b/website/notifications/utils.py @@ -1,66 +1,13 @@ -import collections - from django.apps import apps from django.contrib.contenttypes.models import ContentType from framework.postcommit_tasks.handlers import run_postcommit -from osf.models import NotificationSubscription, NotificationType -from osf.utils.permissions import READ -from website.notifications import constants -from website.notifications.exceptions import InvalidSubscriptionError +from osf.models import NotificationSubscription from website.project import signals from framework.celery_tasks import app -class NotificationsDict(dict): - def __init__(self): - super().__init__() - self.update(messages=[], children=collections.defaultdict(NotificationsDict)) - - def add_message(self, keys, messages): - """ - :param keys: ordered list of project ids from parent to node (e.g. ['parent._id', 'node._id']) - :param messages: built email message for an event that occurred on the node - :return: nested dict with project/component ids as the keys with the message at the appropriate level - """ - d_to_use = self - - for key in keys: - d_to_use = d_to_use['children'][key] - - if not isinstance(messages, list): - messages = [messages] - - d_to_use['messages'].extend(messages) - - -def find_subscription_type(subscription): - """Find subscription type string within specific subscription. - Essentially removes extraneous parts of the string to get the type. - """ - subs_available = constants.USER_SUBSCRIPTIONS_AVAILABLE - subs_available.extend(list({ - 'file_updated': 'Files updated' - }.keys())) - for available in subs_available: - if available in subscription: - return available - - -def to_subscription_key(uid, event): - """Build the Subscription primary key for the given guid and event""" - return f'{uid}_{event}' - - -def from_subscription_key(key): - parsed_key = key.split('_', 1) - return { - 'uid': parsed_key[0], - 'event': parsed_key[1] - } - - @signals.contributor_removed.connect def remove_contributor_from_subscriptions(node, user): """ Remove contributor from node subscriptions unless the user is an @@ -85,7 +32,7 @@ def remove_contributor_from_subscriptions(node, user): ) for subscription in node_subscriptions: - subscription.remove_user_from_subscription() + subscription.delete() @signals.node_deleted.connect @@ -118,344 +65,3 @@ def remove_supplemental_node_from_preprints(node_id): if preprint.node is not None: preprint.node = None preprint.save() - - -def separate_users(node, user_ids): - """Separates users into ones with permissions and ones without given a list. - :param node: Node to separate based on permissions - :param user_ids: List of ids, will also take and return User instances - :return: list of subbed, list of removed user ids - """ - OSFUser = apps.get_model('osf.OSFUser') - removed = [] - subbed = [] - for user_id in user_ids: - try: - user = OSFUser.load(user_id) - except TypeError: - user = user_id - if node.has_permission(user, READ): - subbed.append(user_id) - else: - removed.append(user_id) - return subbed, removed - - -def users_to_remove(source_event, source_node, new_node): - """Find users that do not have permissions on new_node. - :param source_event: such as _file_updated - :param source_node: Node instance where a subscription currently resides - :param new_node: Node instance where a sub or new sub will be. - :return: Dict of notification type lists with user_ids - """ - removed_users = {key: [] for key in constants.NOTIFICATION_TYPES} - if source_node == new_node: - return removed_users - sub = NotificationSubscription.objects.get( - object_id=source_node.id, - content_type=ContentType.objects.get_for_model(source_node), - notification_type__name=source_event - ) - for notification_type in constants.NOTIFICATION_TYPES: - users = [] - if hasattr(sub, notification_type): - users += list(getattr(sub, notification_type).values_list('guids___id', flat=True)) - return removed_users - - -def move_subscription(remove_users, source_event, source_node, new_event, new_node): - """Moves subscription from old_node to new_node - :param remove_users: dictionary of lists of users to remove from the subscription - :param source_event: A specific guid event _file_updated - :param source_node: Instance of Node - :param new_event: A specific guid event - :param new_node: Instance of Node - :return: Returns a NOTIFICATION_TYPES list of removed users without permissions - """ - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - OSFUser = apps.get_model('osf.OSFUser') - if source_node == new_node: - return - old_sub = NotificationSubscription.load(to_subscription_key(source_node._id, source_event)) - if not old_sub: - return - elif old_sub: - old_sub._id = to_subscription_key(new_node._id, new_event) - old_sub.event_name = new_event - old_sub.owner = new_node - new_sub = old_sub - new_sub.save() - # Remove users that don't have permission on the new node. - for notification_type in constants.NOTIFICATION_TYPES: - if new_sub: - for user_id in remove_users[notification_type]: - related_manager = getattr(new_sub, notification_type, None) - subscriptions = related_manager.all() if related_manager else [] - if user_id in subscriptions: - user = OSFUser.load(user_id) - new_sub.remove_user_from_subscription(user) - - -def get_configured_projects(user): - """Filter all user subscriptions for ones that are on parent projects - and return the node objects. - :param user: OSFUser object - :return: list of node objects for projects with no parent - """ - configured_projects = set() - user_subscriptions = NotificationSubscription.objects.filter( - user=user - ) - - for subscription in user_subscriptions: - # If the user has opted out of emails skip - node = subscription.owner - - if ( - (subscription.none.filter(id=user.id).exists() and not node.parent_id) or - node._id not in user.notifications_configured - ): - continue - - root = node.root - - if not root.is_deleted: - configured_projects.add(root) - - return sorted(configured_projects, key=lambda n: n.title.lower()) - -def check_project_subscriptions_are_all_none(user, node): - node_subscriptions = NotificationSubscription.objects.filter( - user=user, - user__isnull=True, - object_id=node.id, - content_type=ContentType.objects.get_for_model(node) - ) - - for s in node_subscriptions: - if not s.none.filter(id=user.id).exists(): - return False - return True - -def format_data(user, nodes): - """ Format subscriptions data for project settings page - :param user: OSFUser object - :param nodes: list of parent project node objects - :return: treebeard-formatted data - """ - items = [] - - for node in nodes: - assert node, f'{node._id} is not a valid Node.' - - can_read = node.has_permission(user, READ) - can_read_children = node.has_permission_on_children(user, READ) - - if not can_read and not can_read_children: - continue - - children = node.get_nodes(**{'is_deleted': False, 'is_node_link': False}) - children_tree = [] - # List project/node if user has at least READ permissions (contributor or admin viewer) or if - # user is contributor on a component of the project/node - - if can_read: - subscriptions = NotificationSubscription.objects.filter( - user=user, - notification_type__name='file_updated', - user__isnull=True, - object_id=node.id, - content_type=ContentType.objects.get_for_model(node) - ) - - for subscription in subscriptions: - children_tree.append( - serialize_event(user, subscription=subscription, node=node) - ) - for node_sub in subscriptions: - children_tree.append(serialize_event(user, node=node, event_description=node_sub)) - children_tree.sort(key=lambda s: s['event']['title']) - - children_tree.extend(format_data(user, children)) - - item = { - 'node': { - 'id': node._id, - 'url': node.url if can_read else '', - 'title': node.title if can_read else 'Private Project', - }, - 'children': children_tree, - 'kind': 'folder' if not node.parent_node or not node.parent_node.has_permission(user, READ) else 'node', - 'nodeType': node.project_or_component, - 'category': node.category, - 'permissions': { - 'view': can_read, - }, - } - - items.append(item) - - return items - - -def format_user_subscriptions(user): - """ Format user-level subscriptions (e.g. comment replies across the OSF) for user settings page""" - user_subs_available = constants.USER_SUBSCRIPTIONS_AVAILABLE - subscriptions = [ - serialize_event( - user, subscription, - event_description=user_subs_available.pop(user_subs_available.index(getattr(subscription, 'event_name'))) - ) - for subscription in NotificationSubscription.objects.get(user=user) - if subscription is not None and getattr(subscription, 'event_name') in user_subs_available - ] - subscriptions.extend([serialize_event(user, event_description=sub) for sub in user_subs_available]) - return subscriptions - - -def format_file_subscription(user, node_id, path, provider): - """Format a single file event""" - AbstractNode = apps.get_model('osf.AbstractNode') - node = AbstractNode.load(node_id) - wb_path = path.lstrip('/') - subscriptions = NotificationSubscription.objects.filter( - user=user, - user__isnull=True, - object_id=node.id, - content_type=ContentType.objects.get_for_model(node) - ) - - for subscription in subscriptions: - if wb_path in getattr(subscription, 'event_name'): - return serialize_event(user, subscription, node) - return serialize_event(user, node=node, event_description='file_updated') - - -all_subs = ['file_updated'] -all_subs += constants.USER_SUBSCRIPTIONS_AVAILABLE - -def serialize_event(user, subscription=None, node=None, event_description=None): - """ - :param user: OSFUser object - :param subscription: Subscription object, use if parsing particular subscription - :param node: Node object, use if node is known - :param event_description: use if specific subscription is known - :return: treebeard-formatted subscription event - """ - if not event_description: - event_description = getattr(subscription, 'event_name') - # Looks at only the types available. Deals with pre-pending file names. - for sub_type in all_subs: - if sub_type in event_description: - event_type = sub_type - else: - event_type = event_description - if node and node.parent_node: - notification_type = 'adopt_parent' - elif event_type.startswith('global_'): - notification_type = 'email_transactional' - else: - notification_type = 'none' - if subscription: - for n_type in constants.NOTIFICATION_TYPES: - if getattr(subscription, n_type).filter(id=user.id).exists(): - notification_type = n_type - return { - 'event': { - 'title': event_description, - 'description': all_subs[event_type], - 'notificationType': notification_type, - 'parent_notification_type': get_parent_notification_type(node, event_type, user) - }, - 'kind': 'event', - 'children': [] - } - - -def get_parent_notification_type(node, event, user): - """ - Given an event on a node (e.g. comment on node 'xyz'), find the user's notification - type on the parent project for the same event. - :param obj node: event owner (Node or User object) - :param str event: notification event (e.g. 'comment_replies') - :param obj user: OSFUser object - :return: str notification type (e.g. 'email_transactional') - """ - AbstractNode = apps.get_model('osf.AbstractNode') - - if node and isinstance(node, AbstractNode) and node.parent_node and node.parent_node.has_permission(user, READ): - parent = node.parent_node - key = to_subscription_key(parent._id, event) - try: - subscription = NotificationSubscription.objects.get(_id=key) - except NotificationSubscription.DoesNotExist: - return get_parent_notification_type(parent, event, user) - - for notification_type in constants.NOTIFICATION_TYPES: - if getattr(subscription, notification_type).filter(id=user.id).exists(): - return notification_type - else: - return get_parent_notification_type(parent, event, user) - else: - return None - - -def get_global_notification_type(global_subscription, user): - """ - Given a global subscription (e.g. NotificationSubscription object with event_type - 'global_file_updated'), find the user's notification type. - :param obj global_subscription: NotificationSubscription object - :param obj user: OSFUser object - :return: str notification type (e.g. 'email_transactional') - """ - for notification_type in constants.NOTIFICATION_TYPES: - # TODO Optimize me - if getattr(global_subscription, notification_type).filter(id=user.id).exists(): - return notification_type - - -def subscribe_user_to_notifications(node, user): - """ Update the notification settings for the creator or contributors - :param user: User to subscribe to notifications - """ - - if getattr(node, 'is_registration', False): - raise InvalidSubscriptionError('Registrations are invalid targets for subscriptions') - - if user.is_registered: - NotificationSubscription.objects.get_or_create( - user=user, - notification_type__name=NotificationType.Type.USER_FILE_UPDATED, - ) - NotificationSubscription.objects.get_or_create( - user=user, - notification_type__name=NotificationType.Type.FILE_UPDATED, - object_id=node.id, - content_type=ContentType.objects.get_for_model(node) - ) - - -def format_user_and_project_subscriptions(user): - """ Format subscriptions data for user settings page. """ - return [ - { - 'node': { - 'id': user._id, - 'title': 'Default Notification Settings', - 'help': 'These are default settings for new projects you create ' + - 'or are added to. Modifying these settings will not ' + - 'modify settings on existing projects.' - }, - 'kind': 'heading', - 'children': format_user_subscriptions(user) - }, - { - 'node': { - 'id': '', - 'title': 'Project Notifications', - 'help': 'These are settings for each of your projects. Modifying ' + - 'these settings will only modify the settings for the selected project.' - }, - 'kind': 'heading', - 'children': format_data(user, get_configured_projects(user)) - }] diff --git a/website/notifications/views.py b/website/notifications/views.py deleted file mode 100644 index 09fb59a1260..00000000000 --- a/website/notifications/views.py +++ /dev/null @@ -1,106 +0,0 @@ -from rest_framework import status as http_status - -from flask import request - -from framework import sentry -from framework.auth.decorators import must_be_logged_in -from framework.exceptions import HTTPError - -from osf.models import AbstractNode, Registration, NotificationSubscription -from osf.utils.permissions import READ -from website.notifications import utils -from website.notifications.constants import NOTIFICATION_TYPES -from website.project.decorators import must_be_valid_project - - -@must_be_logged_in -def get_subscriptions(auth): - return utils.format_user_and_project_subscriptions(auth.user) - - -@must_be_logged_in -@must_be_valid_project -def get_node_subscriptions(auth, **kwargs): - node = kwargs.get('node') or kwargs['project'] - return utils.format_data(auth.user, [node]) - - -@must_be_logged_in -def get_file_subscriptions(auth, **kwargs): - node_id = request.args.get('node_id') - path = request.args.get('path') - provider = request.args.get('provider') - return utils.format_file_subscription(auth.user, node_id, path, provider) - - -@must_be_logged_in -def configure_subscription(auth): - user = auth.user - json_data = request.get_json() - target_id = json_data.get('id') - event = json_data.get('event') - notification_type = json_data.get('notification_type') - path = json_data.get('path') - provider = json_data.get('provider') - - if not event or (notification_type not in NOTIFICATION_TYPES and notification_type != 'adopt_parent'): - raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data=dict( - message_long='Must provide an event and notification type for subscription.') - ) - - node = AbstractNode.load(target_id) - if 'file_updated' in event and path is not None and provider is not None: - wb_path = path.lstrip('/') - event = wb_path + '_file_updated' - event_id = utils.to_subscription_key(target_id, event) - - if not node: - # if target_id is not a node it currently must be the current user - if not target_id == user._id: - sentry.log_message( - '{!r} attempted to subscribe to either a bad ' - 'id or non-node non-self id, {}'.format(user, target_id) - ) - raise HTTPError(http_status.HTTP_404_NOT_FOUND) - - if notification_type == 'adopt_parent': - sentry.log_message( - f'{user!r} attempted to adopt_parent of a none node id, {target_id}' - ) - raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - else: - if not node.has_permission(user, READ): - sentry.log_message(f'{user!r} attempted to subscribe to private node, {target_id}') - raise HTTPError(http_status.HTTP_403_FORBIDDEN) - - if isinstance(node, Registration): - sentry.log_message( - f'{user!r} attempted to subscribe to registration, {target_id}' - ) - raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - - if 'file_updated' in event and len(event) > len('file_updated'): - pass - else: - parent = node.parent_node - if not parent: - sentry.log_message( - '{!r} attempted to adopt_parent of ' - 'the parentless project, {!r}'.format(user, node) - ) - raise HTTPError(http_status.HTTP_400_BAD_REQUEST) - - subscription, _ = NotificationSubscription.objects.get_or_create( - user=user, - subscribed_object=node, - notification_type__name=event - ) - subscription.save() - - if node and node._id not in user.notifications_configured: - user.notifications_configured[node._id] = True - user.save() - - subscription.save() - - return {'message': f'Successfully subscribed to {notification_type} list on {event_id}'} diff --git a/website/routes.py b/website/routes.py index 1d03f538c31..7d728ea866c 100644 --- a/website/routes.py +++ b/website/routes.py @@ -56,7 +56,6 @@ from website.registries import views as registries_views from website.reviews import views as reviews_views from website.institutions import views as institution_views -from website.notifications import views as notification_views from website.ember_osf_web import views as ember_osf_web_views from website.closed_challenges import views as closed_challenges_views from website.identifiers import views as identifier_views @@ -1707,23 +1706,6 @@ def make_url_map(app): json_renderer, ), - Rule( - '/subscriptions/', - 'get', - notification_views.get_subscriptions, - json_renderer, - ), - - Rule( - [ - '/project//subscriptions/', - '/project//node//subscriptions/' - ], - 'get', - notification_views.get_node_subscriptions, - json_renderer, - ), - Rule( [ '/project//tree/', @@ -1733,14 +1715,6 @@ def make_url_map(app): project_views.node.get_node_tree, json_renderer, ), - - Rule( - '/subscriptions/', - 'post', - notification_views.configure_subscription, - json_renderer, - ), - Rule( [ '/project//settings/addons/', From b0d052117bbd8695d50ca28df9fc0c369a76ba06 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 29 Jul 2025 16:12:21 -0400 Subject: [PATCH 141/176] fix addons logs --- addons/base/views.py | 2 +- website/notifications/events/files.py | 16 ++++++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/addons/base/views.py b/addons/base/views.py index f91ae0ce2ce..af9fc52eeef 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -635,7 +635,7 @@ def create_waterbutler_log(payload, **kwargs): user=user, event_context={ 'profile_image_url': user.profile_image_url(), - 'localized_timestamp': timezone.now(), + 'localized_timestamp': str(timezone.now()), 'user_fullname': user.fullname, 'url': node.absolute_url, } diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index d88cf3441e4..c067865c0d2 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -19,12 +19,20 @@ @signal.connect -def file_updated(self, target=None, user=None, event_type=None, payload=None): +def file_updated(self, target=None, user=None, payload=None): + notification_type = { + 'rename': NotificationType.Type.ADDON_FILE_RENAMED, + 'copy': NotificationType.Type.ADDON_FILE_COPIED, + 'create': NotificationType.Type.FILE_UPDATED, + 'move': NotificationType.Type.ADDON_FILE_MOVED, + 'delete': NotificationType.Type.FILE_REMOVED, + }[payload.get('action')] if isinstance(target, Preprint): return - if event_type not in event_registry: - raise NotImplementedError(f' {event_type} not in {event_registry}') - event = event_registry[event_type](user, target, event_type, payload=payload) + + if notification_type not in event_registry: + raise NotImplementedError(f' {notification_type} not in {event_registry}') + event = event_registry[notification_type](user, target, notification_type, payload=payload) event.perform() From d77eed27875e0852347138cf3df69fa9b350a204 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 11:16:42 -0400 Subject: [PATCH 142/176] clean up claim new user email throttle --- osf/models/notification_subscription.py | 23 +++++++++----- osf/models/notification_type.py | 34 ++++++-------------- tests/test_addons.py | 14 +++------ tests/test_claim_views.py | 16 ++++++---- website/notifications/events/files.py | 14 ++++++--- website/project/views/contributor.py | 42 +++++++++++++++---------- 6 files changed, 75 insertions(+), 68 deletions(-) diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index 41b88ba9ea2..12e427b9e30 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -24,7 +24,6 @@ class NotificationSubscription(BaseModel): max_length=500, null=True ) - content_type = models.ForeignKey(ContentType, null=True, blank=True, on_delete=models.CASCADE) object_id = models.CharField(max_length=255, null=True, blank=True) subscribed_object = GenericForeignKey('content_type', 'object_id') @@ -52,19 +51,29 @@ class Meta: verbose_name = 'Notification Subscription' verbose_name_plural = 'Notification Subscriptions' - def emit(self, event_context=None): + def emit( + self, + event_context=None, + destination_address=None, + email_context=None, + ): """Emit a notification to a user by creating Notification and NotificationSubscription objects. Args: - user (OSFUser): The recipient of the notification. - subscribed_object (optional): The object the subscription is related to. - event_context (dict, optional): Context for rendering the notification template. + event_context (OSFUser): The info for context for the template + destination_address (optional): overides the user's email address for the notification. Good for sending + to a test address or OSF desk support' + email_context (dict, optional): Context for sending the email bcc, reply_to header etc """ if self.message_frequency == 'instantly': - Notification.objects.create( + notification = Notification.objects.create( subscription=self, event_context=event_context - ).send() + ) + notification.send( + destination_address=destination_address, + email_context=email_context, + ) else: Notification.objects.create( subscription=self, diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 55fe70883df..6d9fe407d93 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -2,21 +2,8 @@ from django.contrib.postgres.fields import ArrayField from django.contrib.contenttypes.models import ContentType -from osf.models.notification import Notification from enum import Enum - -class FrequencyChoices(Enum): - NONE = 'none' - INSTANTLY = 'instantly' - DAILY = 'daily' - WEEKLY = 'weekly' - MONTHLY = 'monthly' - - @classmethod - def choices(cls): - return [(key.value, key.name.capitalize()) for key in cls] - def get_default_frequency_choices(): DEFAULT_FREQUENCY_CHOICES = ['none', 'instantly', 'daily', 'weekly', 'monthly'] return DEFAULT_FREQUENCY_CHOICES.copy() @@ -225,18 +212,17 @@ def emit( subscription, created = NotificationSubscription.objects.get_or_create( notification_type=self, user=user, - content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, - object_id=subscribed_object.pk if subscribed_object else None, - defaults={'message_frequency': message_frequency}, + defaults={ + 'object_id': subscribed_object.pk if subscribed_object else None, + 'message_frequency': message_frequency, + 'content_type': ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + }, + ) + subscription.emit( + destination_address=destination_address, + event_context=event_context, + email_context=email_context, ) - if subscription.message_frequency == 'instantly': - Notification.objects.create( - subscription=subscription, - event_context=event_context - ).send( - destination_address=destination_address, - email_context=email_context - ) def add_user_to_subscription(self, user, *args, **kwargs): """ diff --git a/tests/test_addons.py b/tests/test_addons.py index f8421f2bd74..aaf4de9cc6c 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -350,18 +350,13 @@ def build_payload_with_dest(self, destination, **kwargs): 'signature': signature, } - @mock.patch('website.notifications.events.files.FileAdded.perform') - def test_add_log(self, mock_perform): - path = 'pizza' + def test_add_log(self): url = self.node.api_url_for('create_waterbutler_log') - payload = self.build_payload(metadata={'nid': self.node._id, 'path': path}) + payload = self.build_payload(metadata={'nid': self.node._id, 'path': 'pizza'}) nlogs = self.node.logs.count() self.app.put(url, json=payload) self.node.reload() assert self.node.logs.count() == nlogs + 1 - # # Mocking form_message and perform so that the payload need not be exact. - # assert mock_form_message.called, "form_message not called" - assert mock_perform.called, 'perform not called' def test_add_log_missing_args(self): path = 'pizza' @@ -1542,13 +1537,14 @@ def test_resolve_folder_raise(self): def test_delete_action_creates_trashed_file_node(self): file_node = self.get_test_file() payload = { + 'action': 'file_removed', 'provider': file_node.provider, 'metadata': { 'path': '/test/Test', 'materialized': '/test/Test' } } - views.addon_delete_file_node(self=None, target=self.project, user=self.user, event_type='file_removed', payload=payload) + views.addon_delete_file_node(self=None, target=self.project, user=self.user, payload=payload) assert not GithubFileNode.load(file_node._id) assert TrashedFileNode.load(file_node._id) @@ -1568,7 +1564,7 @@ def test_delete_action_for_folder_deletes_subfolders_and_creates_trashed_file_no 'materialized': '/test/' } } - views.addon_delete_file_node(self=None, target=self.project, user=self.user, event_type='file_removed', payload=payload) + views.addon_delete_file_node(self=None, target=self.project, user=self.user, payload=payload) assert not GithubFileNode.load(subfolder._id) assert TrashedFileNode.load(file_node._id) diff --git a/tests/test_claim_views.py b/tests/test_claim_views.py index 025aa1a53eb..8d8986bbd10 100644 --- a/tests/test_claim_views.py +++ b/tests/test_claim_views.py @@ -228,13 +228,17 @@ def test_send_claim_registered_email_before_throttle_expires(self): unclaimed_user=self.user, node=self.project, ) - # second call raises error because it was called before throttle period + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_FORWARD_INVITE_REGISTERED + assert notifications[1]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION_REGISTERED + # second call raises error because it was called before throttle period + with capture_notifications() as notifications: with pytest.raises(HTTPError): - send_claim_registered_email( - claimer=reg_user, - unclaimed_user=self.user, - node=self.project, - ) + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project, + ) assert not notifications @mock.patch('website.project.views.contributor.send_claim_registered_email') diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index c067865c0d2..869a3d9c53d 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -20,20 +20,24 @@ @signal.connect def file_updated(self, target=None, user=None, payload=None): + if isinstance(target, Preprint): + return notification_type = { 'rename': NotificationType.Type.ADDON_FILE_RENAMED, 'copy': NotificationType.Type.ADDON_FILE_COPIED, 'create': NotificationType.Type.FILE_UPDATED, 'move': NotificationType.Type.ADDON_FILE_MOVED, 'delete': NotificationType.Type.FILE_REMOVED, + 'update': NotificationType.Type.FILE_UPDATED, }[payload.get('action')] - if isinstance(target, Preprint): - return - if notification_type not in event_registry: raise NotImplementedError(f' {notification_type} not in {event_registry}') - event = event_registry[notification_type](user, target, notification_type, payload=payload) - event.perform() + event_registry[notification_type]( + user, + target, + notification_type, + payload=payload + ).perform() class FileEvent(Event): diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index d0a217d1cc1..76b0dc938fb 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -210,14 +210,20 @@ def deserialize_contributors(node, user_dicts, auth, validate=False): @unreg_contributor_added.connect -def finalize_invitation(node, contributor, auth, email_template='default'): +def finalize_invitation(node, contributor, auth, notification_type='default'): try: record = contributor.get_unclaimed_record(node._primary_key) except ValueError: pass else: if record['email']: - send_claim_email(record['email'], contributor, node, notify=True, email_template=email_template) + send_claim_email( + record['email'], + contributor, + node, + notify=True, + notification_type=notification_type + ) @must_be_valid_project @@ -404,8 +410,11 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) # check throttle - timestamp = unclaimed_record.get('last_sent') - if not throttle_period_expired(timestamp, throttle): + if check_email_throttle( + contributor=claimer, + notification_type=NotificationType.Type.USER_FORWARD_INVITE_REGISTERED, + throttle=throttle + ): raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data=dict( message_long='User account can only be claimed with an existing user once every 24 hours' )) @@ -552,13 +561,16 @@ def send_claim_email( ) -def check_email_throttle(node, contributor, notification_type): +def check_email_throttle( + contributor, + notification_type, + throttle=settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE +): """ Check whether a 'contributor added' notification was sent recently (within the throttle period) for the given node and contributor. Args: - node (AbstractNode): The node to check. contributor (OSFUser): The contributor being notified. notification_type (str, optional): What type of notification to check for. @@ -566,25 +578,21 @@ def check_email_throttle(node, contributor, notification_type): bool: True if throttled (email was sent recently), False otherwise. """ from osf.models import Notification, NotificationSubscription - from website import settings from datetime import timedelta # Check for an active subscription for this contributor and this node subscription, create = NotificationSubscription.objects.get_or_create( user=contributor, - notification_type__name=notification_type, + notification_type=NotificationType.objects.get(name=notification_type), ) if create: return False # No subscription means no previous notifications, so no throttling # Check the most recent Notification for this subscription - last_notification = Notification.objects.filter( - subscription=subscription, - sent__isnull=False - ).order_by('-sent').first() - - if last_notification and last_notification.sent: - cutoff_time = timezone.now() - timedelta(seconds=settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE) - return last_notification.sent > cutoff_time + last_notification = Notification.objects.filter(subscription=subscription).last() + if last_notification: + cutoff_time = timezone.now() - timedelta(seconds=throttle) + if last_notification.sent: + return last_notification.sent > cutoff_time return False # No previous sent notification, not throttled @@ -625,7 +633,7 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a else: raise NotImplementedError(f'notification_type: {notification_type} not implemented.') - if check_email_throttle(node, contributor, notification_type): + if check_email_throttle(contributor, notification_type): return NotificationType.objects.get(name=notification_type).emit( From 74f42bf0a9b76c4e5e29f7e41dabd5d72af43d22 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 13:46:01 -0400 Subject: [PATCH 143/176] clean-up contributor throttle tests --- tests/test_adding_contributor_views.py | 46 ++++++++++++++++++-------- website/project/views/contributor.py | 24 +++----------- 2 files changed, 38 insertions(+), 32 deletions(-) diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index bb59a2eeef5..a2f6d31c33e 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -324,12 +324,22 @@ def test_notify_contributor_email_does_not_send_before_throttle_expires(self): project = ProjectFactory() auth = Auth(project.creator) with capture_notifications() as notifications: - notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth) + notify_added_contributor( + project, + contributor, + notification_type=NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + auth=auth + ) assert len(notifications) == 1 # 2nd call does not send email because throttle period has not expired with capture_notifications() as notifications: - notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth) + notify_added_contributor( + project, + contributor, + notification_type=NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + auth=auth + ) assert not notifications def test_notify_contributor_email_sends_after_throttle_expires(self): @@ -338,17 +348,27 @@ def test_notify_contributor_email_sends_after_throttle_expires(self): contributor = UserFactory() project = ProjectFactory() auth = Auth(project.creator) - with mock.patch.object(settings, 'CONTRIBUTOR_ADDED_EMAIL_THROTTLE', 1): - with capture_notifications() as notifications: - notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT - - time.sleep(settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE) # throttle period expires - with capture_notifications() as notifications: - notify_added_contributor(project, contributor, NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, auth, throttle=throttle) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + with capture_notifications() as notifications: + notify_added_contributor( + project, + contributor, + NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + auth, + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + + time.sleep(2) # throttle period expires + with capture_notifications() as notifications: + notify_added_contributor( + project, + contributor, + NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + auth, + throttle=1 + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT def test_add_contributor_to_fork_sends_email(self): contributor = UserFactory() diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 76b0dc938fb..d6f4072a5de 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -588,11 +588,11 @@ def check_email_throttle( if create: return False # No subscription means no previous notifications, so no throttling # Check the most recent Notification for this subscription - last_notification = Notification.objects.filter(subscription=subscription).last() + last_notification = Notification.objects.filter(subscription=subscription).order_by('created').last() + if last_notification: cutoff_time = timezone.now() - timedelta(seconds=throttle) - if last_notification.sent: - return last_notification.sent > cutoff_time + return last_notification.created > cutoff_time return False # No previous sent notification, not throttled @@ -614,26 +614,12 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a if not notification_type: return - notification_type = notification_type or NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT logo = settings.OSF_LOGO - - if notification_type == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT: - pass - elif notification_type == NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT: - pass - elif notification_type == NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT: - pass - elif notification_type == NotificationType.Type.USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST: - pass - elif notification_type == NotificationType.Type.NODE_INSTITUTIONAL_ACCESS_REQUEST: - pass - elif getattr(node, 'has_linked_published_preprints', None): + if getattr(node, 'has_linked_published_preprints', None): notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF logo = settings.OSF_PREPRINTS_LOGO - else: - raise NotImplementedError(f'notification_type: {notification_type} not implemented.') - if check_email_throttle(contributor, notification_type): + if check_email_throttle(contributor, notification_type, throttle=kwargs.get('throttle')): return NotificationType.objects.get(name=notification_type).emit( From 6eacd4d498eb8a2d18d51b920b43ada392259cb0 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 13:51:44 -0400 Subject: [PATCH 144/176] fix reporter and preprint tests --- notifications.yaml | 9 +++------ .../reporters/test_institutional_users_reporter.py | 2 ++ website/project/views/contributor.py | 3 ++- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index 62f636b8546..8537e269fa5 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -220,10 +220,6 @@ notification_types: __docs__: ... object_content_type_model_name: abstractprovider template: 'website/templates/emails/contributor_added_preprints.html.mako' - - name: provider_reviews_submission_confirmation - __docs__: ... - object_content_type_model_name: abstractprovider - template: 'website/templates/emails/reviews_submission_confirmation.html.mako' - name: provider_confirm_email_moderation subject: 'OSF Account Verification, {provider.name}' __docs__: ... @@ -318,10 +314,11 @@ notification_types: template: 'website/templates/emails/updates_rejected.html.mako' #### PREPRINT - - name: pending_retraction_admin + - name: preprint_contributor_added_preprint_node_from_osf + subject: 'You have been added as a contributor to an OSF project.' __docs__: ... object_content_type_model_name: preprint - template: 'website/templates/emails/pending_retraction_admin.html.mako' + template: 'website/templates/emails/contributor_added_preprint_node_from_osf.html.mako' - name: preprint_request_withdrawal_approved __docs__: ... object_content_type_model_name: preprint diff --git a/osf_tests/metrics/reporters/test_institutional_users_reporter.py b/osf_tests/metrics/reporters/test_institutional_users_reporter.py index 275fcb1e8a1..e399d848396 100644 --- a/osf_tests/metrics/reporters/test_institutional_users_reporter.py +++ b/osf_tests/metrics/reporters/test_institutional_users_reporter.py @@ -7,6 +7,7 @@ from api_tests.utils import create_test_file from osf import models as osfdb +from osf.management.commands.populate_notification_types import populate_notification_types from osf.metrics.reports import InstitutionalUserReport from osf.metrics.reporters import InstitutionalUsersReporter from osf.metrics.utils import YearMonth @@ -28,6 +29,7 @@ def _patch_now(fakenow: datetime.datetime): class TestInstiUsersReporter(TestCase): @classmethod def setUpTestData(cls): + populate_notification_types() cls._yearmonth = YearMonth(2012, 7) cls._now = datetime.datetime( cls._yearmonth.year, diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index d6f4072a5de..80ac1e2cb21 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -619,7 +619,8 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a notification_type = NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF logo = settings.OSF_PREPRINTS_LOGO - if check_email_throttle(contributor, notification_type, throttle=kwargs.get('throttle')): + throttle = kwargs.get('throttle', settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE) + if check_email_throttle(contributor, notification_type, throttle=throttle): return NotificationType.objects.get(name=notification_type).emit( From 59c5c2f98793adf22d2a745c118501cb4dd2eb53 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 16:40:08 -0400 Subject: [PATCH 145/176] fix throttle --- api_tests/users/views/test_user_claim.py | 13 +++++++-- website/project/views/contributor.py | 37 +++++++++++++----------- 2 files changed, 30 insertions(+), 20 deletions(-) diff --git a/api_tests/users/views/test_user_claim.py b/api_tests/users/views/test_user_claim.py index ddd7cfad4e5..01079aff6c9 100644 --- a/api_tests/users/views/test_user_claim.py +++ b/api_tests/users/views/test_user_claim.py @@ -1,5 +1,4 @@ import pytest -from django.utils import timezone from api.base.settings.defaults import API_BASE from api.users.views import ClaimUser @@ -217,8 +216,16 @@ def test_claim_auth_failure(self, app, url, claimer, wrong_preprint, project, un assert res.status_code == 403 def test_claim_auth_throttle_error(self, app, url, claimer, unreg_user, project): - unreg_user.unclaimed_records[project._id]['last_sent'] = timezone.now() - unreg_user.save() + with capture_notifications() as notifications: + app.post_json_api( + url.format(unreg_user._id), + self.payload(id=project._id), + auth=claimer.auth, + expect_errors=True + ) + assert len(notifications) == 2 + assert notifications[0]['type'] == NotificationType.Type.USER_FORWARD_INVITE_REGISTERED + assert notifications[1]['type'] == NotificationType.Type.USER_PENDING_VERIFICATION_REGISTERED with capture_notifications() as notifications: res = app.post_json_api( url.format(unreg_user._id), diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 80ac1e2cb21..e86d4bcd7ca 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -408,16 +408,7 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 """ unclaimed_record = unclaimed_user.get_unclaimed_record(node._primary_key) - # check throttle - if check_email_throttle( - contributor=claimer, - notification_type=NotificationType.Type.USER_FORWARD_INVITE_REGISTERED, - throttle=throttle - ): - raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data=dict( - message_long='User account can only be claimed with an existing user once every 24 hours' - )) # roll the valid token for each email, thus user cannot change email and approve a different email address verification_key = generate_verification_key(verification_type='claim') @@ -434,6 +425,17 @@ def send_claim_registered_email(claimer, unclaimed_user, node, throttle=24 * 360 token=unclaimed_record['token'], _absolute=True, ) + if check_email_throttle( + referrer, + notification_type=NotificationType.Type.USER_FORWARD_INVITE_REGISTERED, + throttle=throttle + ): + raise HTTPError( + http_status.HTTP_400_BAD_REQUEST, + data=dict( + message_long='User account can only be claimed with an existing user once every 24 hours' + ) + ) # Send mail to referrer, telling them to forward verification link to claimer NotificationType.objects.get( @@ -562,7 +564,7 @@ def send_claim_email( def check_email_throttle( - contributor, + user, notification_type, throttle=settings.CONTRIBUTOR_ADDED_EMAIL_THROTTLE ): @@ -571,25 +573,24 @@ def check_email_throttle( (within the throttle period) for the given node and contributor. Args: - contributor (OSFUser): The contributor being notified. + user (OSFUser): The contributor being notified. notification_type (str, optional): What type of notification to check for. Returns: bool: True if throttled (email was sent recently), False otherwise. """ from osf.models import Notification, NotificationSubscription - from datetime import timedelta # Check for an active subscription for this contributor and this node - subscription, create = NotificationSubscription.objects.get_or_create( - user=contributor, + subscription = NotificationSubscription.objects.filter( + user=user, notification_type=NotificationType.objects.get(name=notification_type), ) - if create: + if not subscription: return False # No subscription means no previous notifications, so no throttling # Check the most recent Notification for this subscription + subscription = subscription.get() last_notification = Notification.objects.filter(subscription=subscription).order_by('created').last() - if last_notification: cutoff_time = timezone.now() - timedelta(seconds=throttle) return last_notification.created > cutoff_time @@ -623,7 +624,9 @@ def notify_added_contributor(node, contributor, notification_type, auth=None, *a if check_email_throttle(contributor, notification_type, throttle=throttle): return - NotificationType.objects.get(name=notification_type).emit( + NotificationType.objects.get( + name=notification_type + ).emit( user=contributor, event_context={ 'user': contributor.id, From 3c6dfa80b4b64df1518c72d38a49f6e401c3fae7 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 17:22:51 -0400 Subject: [PATCH 146/176] fix addons and campaign tests --- framework/auth/campaigns.py | 28 +++++++++++++++++++--------- framework/auth/views.py | 3 ++- osf/models/notification_type.py | 7 +++++++ tests/test_addons.py | 1 + 4 files changed, 29 insertions(+), 10 deletions(-) diff --git a/framework/auth/campaigns.py b/framework/auth/campaigns.py index 74445e6c259..6b484e9ae18 100644 --- a/framework/auth/campaigns.py +++ b/framework/auth/campaigns.py @@ -3,8 +3,8 @@ from django.utils import timezone -from website import mails, settings -from osf.models import PreprintProvider +from website import settings +from osf.models import PreprintProvider, NotificationType from website.settings import DOMAIN, CAMPAIGN_REFRESH_THRESHOLD from website.util.metrics import OsfSourceTags, OsfClaimedTags, CampaignSourceTags, CampaignClaimedTags, provider_source_tag from framework.utils import throttle_period_expired @@ -26,7 +26,7 @@ def get_campaigns(): 'erpc': { 'system_tag': CampaignSourceTags.ErpChallenge.value, 'redirect_url': furl(DOMAIN).add(path='erpc/').url, - 'confirmation_email_template': mails.CONFIRM_EMAIL_ERPC, + 'confirmation_email_template': NotificationType.Type.USER_CAMPAIGN_CONFIRM_EMAIL_ERPC, 'login_type': 'native', }, } @@ -44,12 +44,13 @@ def get_campaigns(): preprint_providers = PreprintProvider.objects.all() for provider in preprint_providers: if provider._id == 'osf': - template = 'osf' + confirmation_email_template = NotificationType.Type.USER_CAMPAIGN_CONFIRM_PREPRINTS_OSF name = 'OSF' url_path = 'preprints/' external_url = None else: - template = 'branded' + confirmation_email_template = NotificationType.Type.USER_CAMPAIGN_CONFIRM_PREPRINTS_BRANDED + name = provider.name url_path = f'preprints/{provider._id}' external_url = provider.domain @@ -60,7 +61,7 @@ def get_campaigns(): 'system_tag': system_tag, 'redirect_url': furl(DOMAIN).add(path=url_path).url, 'external_url': external_url, - 'confirmation_email_template': mails.CONFIRM_EMAIL_PREPRINTS(template, name), + 'confirmation_email_template': confirmation_email_template, 'login_type': 'proxy', 'provider': name, 'logo': provider._id if name != 'OSF' else settings.OSF_PREPRINTS_LOGO, @@ -73,7 +74,7 @@ def get_campaigns(): 'osf-registries': { 'system_tag': provider_source_tag('osf', 'registry'), 'redirect_url': furl(DOMAIN).add(path='registries/').url, - 'confirmation_email_template': mails.CONFIRM_EMAIL_REGISTRIES_OSF, + 'confirmation_email_template': None, 'login_type': 'proxy', 'provider': 'osf', 'logo': settings.OSF_REGISTRIES_LOGO @@ -84,18 +85,27 @@ def get_campaigns(): 'osf-registered-reports': { 'system_tag': CampaignSourceTags.OsfRegisteredReports.value, 'redirect_url': furl(DOMAIN).add(path='rr/').url, - 'confirmation_email_template': mails.CONFIRM_EMAIL_REGISTRIES_OSF, + 'confirmation_email_template': NotificationType.Type.USER_CAMPAIGN_CONFIRM_EMAIL_REGISTRIES_OSF, 'login_type': 'proxy', 'provider': 'osf', 'logo': settings.OSF_REGISTRIES_LOGO } }) + newest_campaigns.update({ + 'agu_conference_2023': { + 'system_tag': CampaignSourceTags.AguConference2023.value, + 'redirect_url': furl(DOMAIN).add(path='dashboard/').url, + 'confirmation_email_template': NotificationType.Type.USER_CAMPAIGN_CONFIRM_EMAIL_AGU_CONFERENCE_2023, + 'login_type': 'native', + } + }) + newest_campaigns.update({ 'agu_conference': { 'system_tag': CampaignSourceTags.AguConference.value, 'redirect_url': furl(DOMAIN).add(path='dashboard/').url, - 'confirmation_email_template': mails.CONFIRM_EMAIL_AGU_CONFERENCE, + 'confirmation_email_template': NotificationType.Type.USER_CAMPAIGN_CONFIRM_EMAIL_AGU_CONFERENCE, 'login_type': 'native', } }) diff --git a/framework/auth/views.py b/framework/auth/views.py index 81b362532e9..35e913949c9 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -844,6 +844,8 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte notification_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_CREATE elif user.external_identity[external_id_provider][external_id] == 'LINK': notification_type = NotificationType.Type.USER_EXTERNAL_LOGIN_CONFIRM_EMAIL_LINK + else: + raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={}) elif merge_target: # Merge account confirmation notification_type = NotificationType.Type.USER_CONFIRM_MERGE @@ -857,7 +859,6 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte # Account creation confirmation: from OSF notification_type = NotificationType.Type.USER_INITIAL_CONFIRM_EMAIL - print(notification_type) NotificationType.objects.get(name=notification_type).emit( user=user, event_context={ diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 6d9fe407d93..134809c63b4 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -67,6 +67,13 @@ class Type(str, Enum): USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'user_contributor_added_access_request' USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' + USER_CAMPAIGN_CONFIRM_PREPRINTS_BRANDED = 'user_campaign_confirm_preprint_branded' + USER_CAMPAIGN_CONFIRM_PREPRINTS_OSF = 'user_campaign_confirm_preprint_osf' + USER_CAMPAIGN_CONFIRM_EMAIL_AGU_CONFERENCE = 'user_campaign_confirm_email_agu_conference' + USER_CAMPAIGN_CONFIRM_EMAIL_AGU_CONFERENCE_2023 = 'user_campaign_confirm_email_agu_conference_2023' + USER_CAMPAIGN_CONFIRM_EMAIL_REGISTRIES_OSF = 'user_campaign_confirm_email_registries_osf' + USER_CAMPAIGN_CONFIRM_EMAIL_ERPC = 'user_campaign_confirm_email_erpc' + # Node notifications NODE_COMMENT = 'node_comments' NODE_FILES_UPDATED = 'node_files_updated' diff --git a/tests/test_addons.py b/tests/test_addons.py index aaf4de9cc6c..5ba35b6c760 100644 --- a/tests/test_addons.py +++ b/tests/test_addons.py @@ -1558,6 +1558,7 @@ def test_delete_action_for_folder_deletes_subfolders_and_creates_trashed_file_no ) subfolder.save() payload = { + 'action': 'file_removed', 'provider': file_node.provider, 'metadata': { 'path': '/test/', From 24c503b607cafac160999d43804ba8c85f6e9d29 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 18:02:20 -0400 Subject: [PATCH 147/176] fix schema response tests --- osf_tests/test_schema_responses.py | 32 ++++++++++++++++++------------ osf_tests/utils.py | 18 ----------------- tests/test_spam_mixin.py | 4 ++-- 3 files changed, 21 insertions(+), 33 deletions(-) diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index f3f831224c6..c924aebcd17 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -254,10 +254,13 @@ def test_create_from_previous_response_notification( with capture_notifications() as notifications: schema_response.SchemaResponse.create_from_previous_response( - previous_response=initial_response, initiator=admin_user + previous_response=initial_response, + initiator=admin_user ) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert len(notifications) == len(notification_recipients) + assert all(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_INITIATED + for notification in notifications) + assert all(notification['kwargs']['user'].username in notification_recipients for notification in notifications) @pytest.mark.parametrize( 'invalid_response_state', @@ -580,8 +583,8 @@ def test_submit_response_notification( with capture_notifications() as notifications: revised_response.submit(user=admin_user, required_approvers=[admin_user]) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert len(notifications) == 3 + assert any(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_SUBMITTED for notification in notifications) def test_no_submit_notification_on_initial_response(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.IN_PROGRESS) @@ -681,8 +684,8 @@ def test_approve_response_notification( assert not notifications # Should only send email on final approval with capture_notifications() as notifications: revised_response.approve(user=alternate_user) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert len(notifications) == 3 + assert all(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED for notification in notifications) def test_no_approve_notification_on_initial_response(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -749,8 +752,9 @@ def test_reject_response_notification( with capture_notifications() as notifications: revised_response.reject(user=admin_user) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert len(notifications) == 3 + assert all(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_REJECTED + for notification in notifications) def test_no_reject_notification_on_initial_response(self, initial_response, admin_user): initial_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -909,8 +913,9 @@ def test_moderator_accept_notification( with capture_notifications() as notifications: revised_response.accept(user=moderator) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert len(notifications) == 3 + assert all(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_INITIATED + for notification in notifications) def test_no_moderator_accept_notification_on_initial_response( self, initial_response, moderator): @@ -949,8 +954,9 @@ def test_moderator_reject_notification( with capture_notifications() as notifications: revised_response.reject(user=moderator) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert len(notifications) == 3 + assert all(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_REJECTED + for notification in notifications) def test_no_moderator_reject_notification_on_initial_response( self, initial_response, moderator): diff --git a/osf_tests/utils.py b/osf_tests/utils.py index 884c4249de9..adb00482168 100644 --- a/osf_tests/utils.py +++ b/osf_tests/utils.py @@ -218,21 +218,3 @@ def get_default_test_schema(): create_schema_blocks_for_atomic_schema(test_schema) return test_schema - -def assert_notification_correctness(send_mail_mock, expected_template, expected_recipients): - '''Confirms that a mocked send_mail function contains the appropriate calls.''' - assert send_mail_mock.call_count == len(expected_recipients) - - recipients = set() - templates = set() - for _, call_kwargs in send_mail_mock.call_args_list: - recipients.add(call_kwargs['to_addr']) - templates.add(call_kwargs['mail']) - - assert recipients == expected_recipients - - try: - assert templates == {expected_template} - except AssertionError: # the non-static subject attributes mean we need a different comparison - assert {template.tpl_prefix for template in list(templates)} == {expected_template.tpl_prefix} - assert {template._subject for template in list(templates)} == {expected_template._subject} diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index af509272425..59b04ec1fa9 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -26,8 +26,8 @@ def test_throttled_autoban(): proj.flag_spam() proj.save() projects.append(proj) - assert len(notifications) == 7 - assert notifications[0]['type'] == NotificationType.Type.USER_CONFIRM_EMAIL + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.USER_SPAM_BANNED user.reload() assert user.is_disabled for project in projects: From 94734fdf02848b4749e21c784dc77a4e785e475e Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 18:50:38 -0400 Subject: [PATCH 148/176] clean-up sanction code --- notifications.yaml | 13 +++++++ osf/models/notification_type.py | 1 + osf/models/sanctions.py | 57 +++++++++--------------------- website/notifications/listeners.py | 22 ++++++++++++ 4 files changed, 52 insertions(+), 41 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index 8537e269fa5..d9a8d173e88 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -206,6 +206,11 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/archive_uncaught_error_user.html.mako' + - name: user_new_public_project + subject: 'Problem Registering' + __docs__: ... + object_content_type_model_name: osfuser + template: 'website/templates/emails/archive_uncaught_error_user.html.mako' #### PROVIDER - name: provider_new_pending_submissions @@ -271,6 +276,14 @@ notification_types: __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_embargo_non_admin.html.mako' + - name: node_pending_embargo_termination_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_embargo_termination_admin.html.mako' + - name: node_pending_embargo_termination_non_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_embargo_termination_non_admin.html.mako' - name: node_affiliation_changed __docs__: ... object_content_type_model_name: abstractnode diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 134809c63b4..08435b7441e 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -66,6 +66,7 @@ class Type(str, Enum): USER_CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = 'user_contributor_added_preprint_node_from_osf' USER_CONTRIBUTOR_ADDED_ACCESS_REQUEST = 'user_contributor_added_access_request' USER_ARCHIVE_JOB_UNCAUGHT_ERROR = 'user_archive_job_uncaught_error' + USER_NEW_PUBLIC_PROJECT = 'user_new_public_project' USER_CAMPAIGN_CONFIRM_PREPRINTS_BRANDED = 'user_campaign_confirm_preprint_branded' USER_CAMPAIGN_CONFIRM_PREPRINTS_OSF = 'user_campaign_confirm_preprint_osf' diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index a5b19f3a917..c3e76a5dddf 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -345,8 +345,6 @@ class Meta: class EmailApprovableSanction(TokenApprovableSanction): - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = None - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = None VIEW_URL_TEMPLATE = '' APPROVE_URL_TEMPLATE = '' @@ -375,12 +373,6 @@ def _format_or_empty(template, context): return template.format(**context) return '' - def _get_authoriser_notification_type(self): - return None - - def _get_non_authoriser_notification_type(self): - return None - def _view_url(self, user_id, node): return self._format_or_empty(self.VIEW_URL_TEMPLATE, self._view_url_context(user_id, node)) @@ -414,22 +406,20 @@ def _email_template_context(self, user, node, is_authorizer=False): return {} def _notify_authorizer(self, authorizer, node): - if notification_type := self._get_authoriser_notification_type(): - notification_type.emit( + return NotificationType.objects.get(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE).emit( + user=authorizer, + event_context=self._email_template_context( authorizer, - event_context=self._email_template_context( - authorizer, - node, - is_authorizer=True - ) + node, + is_authorizer=True ) + ) def _notify_non_authorizer(self, user, node): - if notification_type := self._get_authoriser_notification_type(): - notification_type.emit( - user, - event_context=self._email_template_context(user, node) - ) + return NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE).emit( + user=user, + event_context=self._email_template_context(user, node) + ) def ask(self, group): """ @@ -478,8 +468,8 @@ class Embargo(SanctionCallbackMixin, EmailApprovableSanction): DISPLAY_NAME = 'Embargo' SHORT_NAME = 'embargo' - AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_embargo_admin' - NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_embargo_non_admin' + AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_EMBARGO_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_EMBARGO_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -513,12 +503,6 @@ def embargo_end_date(self): def pending_registration(self): return not self.for_existing_registration and self.is_pending_approval - def _get_authoriser_notification_type(self): - return NotificationType.objects.get(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) - - def _get_non_authoriser_notification_type(self): - return NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) - def _get_registration(self): return self.registrations.first() @@ -785,11 +769,8 @@ class RegistrationApproval(SanctionCallbackMixin, EmailApprovableSanction): DISPLAY_NAME = 'Approval' SHORT_NAME = 'registration_approval' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_REGISTRATION_NON_ADMIN - - AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_pending_registration_admin' - NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = 'node_pending_registration_non_admin' + AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_REGISTRATION_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' @@ -809,12 +790,6 @@ def find_approval_backlog(): guid=models.F('_id') ).order_by('-initiation_date') - def _get_authoriser_notification_type(self): - return NotificationType.objects.get(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE) - - def _get_non_authoriser_notification_type(self): - return NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE) - def _get_registration(self): return self.registrations.first() @@ -961,8 +936,8 @@ class EmbargoTerminationApproval(EmailApprovableSanction): DISPLAY_NAME = 'Embargo Termination Request' SHORT_NAME = 'embargo_termination_approval' - AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index 2ed837308bb..1395b606592 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -6,6 +6,8 @@ from osf.models import NotificationSubscription, NotificationType from website.project.signals import contributor_added, project_created from framework.auth.signals import user_confirmed +from website.project.signals import privacy_set_public +from website import settings logger = logging.getLogger(__name__) @@ -56,3 +58,23 @@ def subscribe_confirmed_user(user): user=user, notification_type=NotificationType.objects.get(name=NotificationType.Type.USER_REVIEWS) ) + + +@privacy_set_public.connect +def queue_first_public_project_email(user, node): + """Queue and email after user has made their first + non-OSF4M project public. + """ + NotificationType.objects.get( + name=NotificationType.Type.USER_NEW_PUBLIC_PROJECT, + ).emit( + user=user, + event_context={ + 'node': node, + 'user': user, + 'nid': node._id, + 'fullname': user.fullname, + 'project_title': node.title, + 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + } + ) From 51df115072b15ea9e202619b215cb448cbd033d6 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 18:56:14 -0400 Subject: [PATCH 149/176] remove mails and copy over subject lines --- notifications.yaml | 7 ++++++- osf/models/node.py | 2 +- osf/models/sanctions.py | 3 +++ tests/test_preprints.py | 2 +- tests/test_resend_confirmation.py | 2 +- website/mails/mails.py | 24 ------------------------ website/notifications/listeners.py | 4 +--- 7 files changed, 13 insertions(+), 31 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index d9a8d173e88..952dd62be67 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -34,6 +34,7 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_draft_registration.html.mako' - name: user_contributor_added_osf_preprint + subject: 'You have been added as a contributor to an OSF preprint.' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/contributor_added_preprint_node_from_osf.html.mako' @@ -67,6 +68,7 @@ notification_types: object_content_type_model_name: osfuser template: 'website/templates/emails/institution_deactivation.html.mako' - name: user_invite_preprints_osf + subject: 'You have been added as a contributor to an OSF preprint.' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_preprints_osf.html.mako' @@ -74,11 +76,13 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_preprints.html.mako' - - name: invite_draft_registration + - name: user_invite_draft_registration + subject: 'You have a new registration draft' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_draft_registration.html.mako' - name: user_invite_default + subject: 'You have been added as a contributor to an OSF project.' __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/invite_default.html.mako' @@ -257,6 +261,7 @@ notification_types: object_content_type_model_name: abstractnode template: 'website/templates/emails/node_request_institutional_access_request.html.mako' - name: node_contributor_added_default + subject: 'You have been added as a contributor to an OSF project.' __docs__: This email notifies the user that they have been added as a contributor to a node. object_content_type_model_name: abstractnode template: 'website/templates/emails/contributor_added_default.html.mako' diff --git a/osf/models/node.py b/osf/models/node.py index d6b35c81335..6ee777037da 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -1245,7 +1245,7 @@ def set_privacy(self, permissions, auth=None, log=True, save=True, meeting_creat if save: self.save() if auth and permissions == 'public': - project_signals.privacy_set_public.send(auth.user, node=self, meeting_creation=meeting_creation) + project_signals.privacy_set_public.send(auth.user, node=self) return True @property diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index c3e76a5dddf..9f072eaaeb4 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -650,6 +650,9 @@ class Retraction(EmailApprovableSanction): DISPLAY_NAME = 'Retraction' SHORT_NAME = 'retraction' + AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_REGISTRATION_NON_ADMIN + VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' REJECT_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 6f1eda5876b..b3c97ece060 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -999,7 +999,7 @@ def test_check_spam_on_private_preprint_bans_new_spam_user(self, preprint, user) @mock.patch('website.mailchimp_utils.unsubscribe_mailchimp') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) - def test_check_spam_on_private_preprint_does_not_ban_existing_user(self, preprint, user): + def test_check_spam_on_private_preprint_does_not_ban_existing_user(self, mock_mailchimp, preprint, user): preprint.is_public = False preprint.save() with mock.patch('osf.models.Preprint._get_spam_content', mock.Mock(return_value='some content!')): diff --git a/tests/test_resend_confirmation.py b/tests/test_resend_confirmation.py index 95609e5ad76..53fd2ba25f2 100644 --- a/tests/test_resend_confirmation.py +++ b/tests/test_resend_confirmation.py @@ -63,7 +63,7 @@ def test_cannot_receive_resend_confirmation_email_2(self): with capture_notifications() as notifications: res = form.submit(self.app) # check email, request and response - assert notifications + assert not notifications assert res.status_code == 200 assert res.request.path == self.post_url assert_in_html('If there is an OSF account', res.text) diff --git a/website/mails/mails.py b/website/mails/mails.py index db684f7e84f..4d47b52b5cb 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -100,38 +100,14 @@ def get_english_article(word): # Contributor added confirmation emails -INVITE_DEFAULT = Mail( - 'invite_default', - subject='You have been added as a contributor to an OSF project.' -) -INVITE_OSF_PREPRINT = Mail( - 'invite_preprints_osf', - subject='You have been added as a contributor to an OSF preprint.' -) INVITE_PREPRINT = lambda provider: Mail( 'invite_preprints', subject=f'You have been added as a contributor to {get_english_article(provider.name)} {provider.name} {provider.preprint_word}.' ) -INVITE_DRAFT_REGISTRATION = Mail( - 'invite_draft_registration', - subject='You have a new registration draft' -) -CONTRIBUTOR_ADDED_DEFAULT = Mail( - 'contributor_added_default', - subject='You have been added as a contributor to an OSF project.' -) -CONTRIBUTOR_ADDED_OSF_PREPRINT = Mail( - 'contributor_added_preprints_osf', - subject='You have been added as a contributor to an OSF preprint.' -) CONTRIBUTOR_ADDED_PREPRINT = lambda provider: Mail( 'contributor_added_preprints', subject=f'You have been added as a contributor to {get_english_article(provider.name)} {provider.name} {provider.preprint_word}.' ) -CONTRIBUTOR_ADDED_PREPRINT_NODE_FROM_OSF = Mail( - 'contributor_added_preprint_node_from_osf', - subject='You have been added as a contributor to an OSF project.' -) MODERATOR_ADDED = lambda provider: Mail( 'moderator_added', subject=f'You have been added as a moderator for {provider.name}' diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index 1395b606592..871d6d56792 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -70,11 +70,9 @@ def queue_first_public_project_email(user, node): ).emit( user=user, event_context={ - 'node': node, - 'user': user, 'nid': node._id, 'fullname': user.fullname, 'project_title': node.title, - 'osf_support_email': settings.OSF_SUPPORT_EMAIL, + 'osf_url': settings.DOMAIN, } ) From 5ec04a074cba829bd9d7a2bb6360ed0ca9c7cc3b Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 19:56:02 -0400 Subject: [PATCH 150/176] remove old comment based notification tests --- notifications.yaml | 82 ++- osf/models/notification_type.py | 1 + osf/models/sanctions.py | 34 +- osf_tests/test_comment.py | 685 +------------------ osf_tests/test_node.py | 4 +- tests/test_preprints.py | 2 +- tests/test_registrations/test_retractions.py | 4 +- website/mails/mails.py | 4 - 8 files changed, 97 insertions(+), 719 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index 952dd62be67..0a8b559f10f 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -168,10 +168,6 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/external_confirm_create.html.mako' - - name: user_primary_email_changed - __docs__: ... - object_content_type_model_name: osfuser - template: 'website/templates/emails/primary_email_changed.html.mako' - name: user_spam_banned __docs__: ... object_content_type_model_name: osfuser @@ -215,6 +211,42 @@ notification_types: __docs__: ... object_content_type_model_name: osfuser template: 'website/templates/emails/archive_uncaught_error_user.html.mako' + - name: user_confirm_email_erpc + subject: 'OSF Account Verification, Election Research Preacceptance Competition' + object_content_type_model_name: osfuser + template: 'website/templates/emails/confirm_erpc.html.mako' + - name: user_confirm_email_agu_conference + subject: 'OSF Account Verification, from the American Geophysical Union Conference' + object_content_type_model_name: osfuser + template: 'website/templates/emails/confirm_agu_conference.html.mako' + - name: user_confirm_email_registries_osf + subject: 'OSF Account Verification, OSF Registries' + object_content_type_model_name: osfuser + template: 'website/templates/emails/confirm_registries_osf.html.mako' + - name: user_confirm_merge + subject: 'Confirm account merge' + object_content_type_model_name: osfuser + template: 'website/templates/emails/confirm_merge.html.mako' + - name: user_primary_email_changed + subject: 'Primary email changed' + object_content_type_model_name: osfuser + template: 'website/templates/emails/primary_email_changed.html.mako' + - name: user_spam_files_detected + subject: '[auto] Spam files audit' + object_content_type_model_name: osfuser + template: 'website/templates/emails/spam_files_detected.html.mako' + - name: user_crossref_doi_pending + subject: 'There are ${pending_doi_count} preprints with crossref DOI pending.' + object_content_type_model_name: osfuser + template: 'website/templates/emails/crossref_doi_pending.html.mako' + - name: user_terms_of_use_updated + subject: 'Updated Terms of Use for COS Websites and Services' + object_content_type_model_name: osfuser + template: 'website/templates/emails/tou_notif.html.mako' + - name: user_registration_bulk_upload_product_owner + subject: 'Registry Could Not Bulk Upload Registrations' + object_content_type_model_name: osfuser + template: 'website/templates/emails/registration_bulk_upload_product_owner.html.mako' #### PROVIDER - name: provider_new_pending_submissions @@ -273,11 +305,19 @@ notification_types: __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_registration_admin.html.mako' - - name: node_embargo_admin + - name: node_pending_retraction_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_retraction_admin.html.mako' + - name: node_pending_retraction_non_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_retraction_non_admin.html.mako' + - name: node_pending_embargo_admin __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_embargo_admin.html.mako' - - name: node_embargo_nonadmin + - name: node_pending_embargo_non_admin __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_embargo_non_admin.html.mako' @@ -330,6 +370,26 @@ notification_types: __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/updates_rejected.html.mako' + - name: node_archive_file_not_found_desk + subject: 'Problem registering ${unescape_entities(src.title)}' + object_content_type_model_name: abstractnode + template: 'website/templates/emails/archive_file_not_found_desk.html.mako' + - name: node_archive_file_not_found_user + subject: 'Registration failed because of altered files' + object_content_type_model_name: abstractnode + template: 'website/templates/emails/archive_file_not_found_user.html.mako' + - name: node_archive_uncaught_error_desk + subject: 'Problem registering ${unescape_entities(src.title)}' + object_content_type_model_name: abstractnode + template: 'website/templates/emails/archive_uncaught_error_desk.html.mako' + - name: node_archive_registration_stuck_desk + subject: '[auto] Stuck registrations audit' + object_content_type_model_name: abstractnode + template: 'website/templates/emails/archive_registration_stuck_desk.html.mako' + - name: node_archive_success + subject: 'Registration of ${unescape_entities(src.title)} complete' + object_content_type_model_name: abstractnode + template: 'website/templates/emails/archive_success.html.mako' #### PREPRINT - name: preprint_contributor_added_preprint_node_from_osf @@ -349,6 +409,16 @@ notification_types: __docs__: ... object_content_type_model_name: preprint template: 'website/templates/emails/contributor_added_preprints.html.mako' + - name: preprint_withdrawal_request_granted + subject: 'Your ${document_type} has been withdrawn' + object_content_type_model_name: preprint + template: 'website/templates/emails/withdrawal_request_granted.html.mako' + - name: preprint_withdrawal_request_declined + subject: 'Your withdrawal request has been declined' + object_content_type_model_name: preprint + template: 'website/templates/emails/withdrawal_request_declined.html.mako' + + #### SUPPORT - name: crossref_error __docs__: ... diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 08435b7441e..48f82cbb685 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -89,6 +89,7 @@ class Type(str, Enum): NODE_PENDING_EMBARGO_ADMIN = 'node_pending_embargo_admin' NODE_PENDING_EMBARGO_NON_ADMIN = 'node_pending_embargo_non_admin' NODE_PENDING_RETRACTION_NON_ADMIN = 'node_pending_retraction_non_admin' + NODE_PENDING_RETRACTION_ADMIN = 'node_pending_retraction_admin' NODE_PENDING_REGISTRATION_NON_ADMIN = 'node_pending_registration_non_admin' NODE_PENDING_REGISTRATION_ADMIN = 'node_pending_registration_admin' NODE_PENDING_EMBARGO_TERMINATION_NON_ADMIN = 'node_pending_embargo_termination_non_admin' diff --git a/osf/models/sanctions.py b/osf/models/sanctions.py index 9f072eaaeb4..a0f12bcc6d9 100644 --- a/osf/models/sanctions.py +++ b/osf/models/sanctions.py @@ -405,22 +405,6 @@ def _send_approval_request_email(self, user, template, context): def _email_template_context(self, user, node, is_authorizer=False): return {} - def _notify_authorizer(self, authorizer, node): - return NotificationType.objects.get(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE).emit( - user=authorizer, - event_context=self._email_template_context( - authorizer, - node, - is_authorizer=True - ) - ) - - def _notify_non_authorizer(self, user, node): - return NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE).emit( - user=user, - event_context=self._email_template_context(user, node) - ) - def ask(self, group): """ :param list group: List of (user, node) tuples containing contributors to notify about the @@ -430,9 +414,19 @@ def ask(self, group): return for contrib, node in group: if contrib._id in self.approval_state: - self._notify_authorizer(contrib, node) + return NotificationType.objects.get(name=self.AUTHORIZER_NOTIFY_EMAIL_TYPE).emit( + user=contrib, + event_context=self._email_template_context( + contrib, + node, + is_authorizer=True + ) + ) else: - self._notify_non_authorizer(contrib, node) + return NotificationType.objects.get(name=self.NON_AUTHORIZER_NOTIFY_EMAIL_TYPE).emit( + user=contrib, + event_context=self._email_template_context(contrib, node) + ) def add_authorizer(self, user, node, **kwargs): super().add_authorizer(user, node, **kwargs) @@ -650,8 +644,8 @@ class Retraction(EmailApprovableSanction): DISPLAY_NAME = 'Retraction' SHORT_NAME = 'retraction' - AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN - NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_REGISTRATION_NON_ADMIN + AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_RETRACTION_ADMIN + NON_AUTHORIZER_NOTIFY_EMAIL_TYPE = NotificationType.Type.NODE_PENDING_RETRACTION_NON_ADMIN VIEW_URL_TEMPLATE = VIEW_PROJECT_URL_TEMPLATE APPROVE_URL_TEMPLATE = osf_settings.DOMAIN + 'token_action/{node_id}/?token={token}' diff --git a/osf_tests/test_comment.py b/osf_tests/test_comment.py index 62a295367fd..27cd5bced2a 100644 --- a/osf_tests/test_comment.py +++ b/osf_tests/test_comment.py @@ -3,20 +3,10 @@ import pytest import datetime from django.utils import timezone -from collections import OrderedDict - -from addons.box.models import BoxFile -from addons.dropbox.models import DropboxFile -from addons.github.models import GithubFile -from addons.googledrive.models import GoogleDriveFile -from addons.osfstorage.models import OsfStorageFile -from addons.s3.models import S3File from website import settings -from addons.osfstorage import settings as osfstorage_settings -from website.project.views.comment import update_file_guid_referent from framework.exceptions import PermissionsError from tests.base import capture_signals -from osf.models import Comment, NodeLog, Guid, BaseFileNode +from osf.models import Comment, NodeLog, Guid from osf.utils import permissions from framework.auth.core import Auth from .factories import ( @@ -395,676 +385,3 @@ def test_find_unread_does_not_include_deleted_comments(self): CommentFactory(node=project, user=project.creator, is_deleted=True) n_unread = Comment.find_n_unread(user=user, node=project, page='node') assert n_unread == 0 - - -# copied from tests/test_comments.py -class FileCommentMoveRenameTestMixin: - id_based_providers = ['osfstorage'] - - @pytest.fixture() - def project(self, user): - p = ProjectFactory(creator=user) - p_settings = p.get_or_add_addon(self.provider, Auth(user)) - p_settings.folder = '/Folder1' - p_settings.save() - p.save() - return p - - @pytest.fixture() - def component(self, user, project): - c = NodeFactory(parent=project, creator=user) - c_settings = c.get_or_add_addon(self.provider, Auth(user)) - c_settings.folder = '/Folder2' - c_settings.save() - c.save() - return c - - @property - def provider(self): - raise NotImplementedError - - @property - def ProviderFile(self): - raise NotImplementedError - - @classmethod - def _format_path(cls, path, file_id=None): - return path - - def _create_source_payload(self, path, node, provider, file_id=None): - return OrderedDict([('materialized', path), - ('name', path.split('/')[-1]), - ('nid', node._id), - ('path', self._format_path(path, file_id)), - ('provider', provider), - ('url', '/project/{}/files/{}/{}/'.format(node._id, provider, path.strip('/'))), - ('node', {'url': f'/{node._id}/', '_id': node._id, 'title': node.title}), - ('addon', provider)]) - - def _create_destination_payload(self, path, node, provider, file_id, children=None): - destination_path = PROVIDER_CLASS.get(provider)._format_path(path=path, file_id=file_id) - destination = OrderedDict([('contentType', ''), - ('etag', 'abcdefghijklmnop'), - ('extra', OrderedDict([('revisionId', '12345678910')])), - ('kind', 'file'), - ('materialized', path), - ('modified', 'Tue, 02 Feb 2016 17:55:48 +0000'), - ('name', path.split('/')[-1]), - ('nid', node._id), - ('path', destination_path), - ('provider', provider), - ('size', 1000), - ('url', '/project/{}/files/{}/{}/'.format(node._id, provider, path.strip('/'))), - ('node', {'url': f'/{node._id}/', '_id': node._id, 'title': node.title}), - ('addon', provider)]) - if children: - destination_children = [self._create_destination_payload(child['path'], child['node'], child['provider'], file_id) for child in children] - destination.update({'children': destination_children}) - return destination - - def _create_payload(self, action, user, source, destination, file_id, destination_file_id=None): - return OrderedDict([ - ('action', action), - ('auth', OrderedDict([('email', user.username), ('id', user._id), ('name', user.fullname)])), - ('destination', self._create_destination_payload(path=destination['path'], - node=destination['node'], - provider=destination['provider'], - file_id=destination_file_id or file_id, - children=destination.get('children', []))), - ('source', self._create_source_payload(source['path'], source['node'], source['provider'], file_id=file_id)), - ('time', 100000000), - ('node', source['node']), - ('project', None) - ]) - - def _create_file_with_comment(self, node, path, user): - self.file = self.ProviderFile.create( - target=node, - path=path, - name=path.strip('/'), - materialized_path=path) - self.file.save() - self.guid = self.file.get_guid(create=True) - self.comment = CommentFactory(user=user, node=node, target=self.guid) - - def test_comments_move_on_file_rename(self, project, user): - source = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/file_renamed.txt', - 'node': project, - 'provider': self.provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_on_folder_rename(self, project, user): - source = { - 'path': '/subfolder1/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder2/', - 'node': project, - 'provider': self.provider - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_on_subfolder_file_when_parent_folder_is_renamed(self, project, user): - source = { - 'path': '/subfolder1/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder2/', - 'node': project, - 'provider': self.provider - } - file_path = 'sub-subfolder/file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_path), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_path), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_file_moved_to_subfolder(self, project, user): - source = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/file.txt', - 'node': project, - 'provider': self.provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_file_moved_from_subfolder_to_root(self, project, user): - source = { - 'path': '/subfolder/file.txt', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_file_moved_from_project_to_component(self, project, component, user): - source = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/file.txt', - 'node': component, - 'provider': self.provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - assert self.guid.referent.target._id == destination['node']._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_file_moved_from_component_to_project(self, project, component, user): - source = { - 'path': '/file.txt', - 'node': component, - 'provider': self.provider - } - destination = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - assert self.guid.referent.target._id == destination['node']._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_folder_moved_to_subfolder(self, user, project): - source = { - 'path': '/subfolder/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder2/subfolder/', - 'node': project, - 'provider': self.provider - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_folder_moved_from_subfolder_to_root(self, project, user): - source = { - 'path': '/subfolder2/subfolder/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/', - 'node': project, - 'provider': self.provider - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_folder_moved_from_project_to_component(self, project, component, user): - source = { - 'path': '/subfolder/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/', - 'node': component, - 'provider': self.provider - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_folder_moved_from_component_to_project(self, project, component, user): - source = { - 'path': '/subfolder/', - 'node': component, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/', - 'node': project, - 'provider': self.provider - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_file_moved_to_osfstorage(self, project, user): - osfstorage = project.get_addon('osfstorage') - root_node = osfstorage.get_root() - osf_file = root_node.append_file('file.txt') - osf_file.create_version(user, { - 'object': '06d80e', - 'service': 'cloud', - osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', - }, { - 'size': 1337, - 'contentType': 'img/png', - 'etag': 'abcdefghijklmnop' - }).save() - - source = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': osf_file.path, - 'node': project, - 'provider': 'osfstorage' - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - payload = self._create_payload('move', user, source, destination, self.file._id, destination_file_id=destination['path'].strip('/')) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class('osfstorage', BaseFileNode.FILE).get_or_create(destination['node'], destination['path']) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_folder_moved_to_osfstorage(self, project, user): - osfstorage = project.get_addon('osfstorage') - root_node = osfstorage.get_root() - osf_folder = root_node.append_folder('subfolder') - osf_file = osf_folder.append_file('file.txt') - osf_file.create_version(user, { - 'object': '06d80e', - 'service': 'cloud', - osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', - }, { - 'size': 1337, - 'contentType': 'img/png', - 'etag': '1234567890abcde' - }).save() - - source = { - 'path': '/subfolder/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/', - 'node': project, - 'provider': 'osfstorage', - 'children': [{ - 'path': '/subfolder/file.txt', - 'node': project, - 'provider': 'osfstorage' - }] - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id, destination_file_id=osf_file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class('osfstorage', BaseFileNode.FILE).get_or_create(destination['node'], osf_file._id) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - @pytest.mark.parametrize( - ['destination_provider', 'destination_path'], - [('box', '/1234567890'), ('dropbox', '/file.txt'), ('github', '/file.txt'), ('googledrive', '/file.txt'), ('s3', '/file.txt')] - ) - def test_comments_move_when_file_moved_to_different_provider(self, destination_provider, destination_path, project, user): - if self.provider == destination_provider: - assert True - return - - project.add_addon(destination_provider, auth=Auth(user)) - project.save() - self.addon_settings = project.get_addon(destination_provider) - self.addon_settings.folder = '/AddonFolder' - self.addon_settings.save() - - source = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': destination_path, - 'node': project, - 'provider': destination_provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(destination_provider, BaseFileNode.FILE).get_or_create(destination['node'], destination['path']) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - @pytest.mark.parametrize( - ['destination_provider', 'destination_path'], - [('box', '/1234567890'), ('dropbox', '/subfolder/file.txt'), ('github', '/subfolder/file.txt'), ('googledrive', '/subfolder/file.txt'), ('s3', '/subfolder/file.txt'), ] - ) - def test_comments_move_when_folder_moved_to_different_provider(self, destination_provider, destination_path, project, user): - if self.provider == destination_provider: - assert True - return - - project.add_addon(destination_provider, auth=Auth(user)) - project.save() - self.addon_settings = project.get_addon(destination_provider) - self.addon_settings.folder = '/AddonFolder' - self.addon_settings.save() - - source = { - 'path': '/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/', - 'node': project, - 'provider': destination_provider, - 'children': [{ - 'path': '/subfolder/file.txt', - 'node': project, - 'provider': destination_provider - }] - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(destination_provider, BaseFileNode.FILE).get_or_create(destination['node'], destination_path) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - -# copied from tests/test_comments.py -class TestOsfstorageFileCommentMoveRename(FileCommentMoveRenameTestMixin): - - provider = 'osfstorage' - ProviderFile = OsfStorageFile - - @classmethod - def _format_path(cls, path, file_id=None): - super()._format_path(path) - return '/{}{}'.format(file_id, ('/' if path.endswith('/') else '')) - - def _create_file_with_comment(self, node, path, user): - osfstorage = node.get_addon(self.provider) - root_node = osfstorage.get_root() - self.file = root_node.append_file('file.txt') - self.file.create_version(user, { - 'object': '06d80e', - 'service': 'cloud', - osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', - }, { - 'size': 1337, - 'contentType': 'img/png', - 'etag': 'abcdefghijklmnop' - }).save() - self.file.materialized_path = path - self.guid = self.file.get_guid(create=True) - self.comment = CommentFactory(user=user, node=node, target=self.guid) - - def test_comments_move_when_file_moved_from_project_to_component(self, project, component, user): - source = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/file.txt', - 'node': component, - 'provider': self.provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - self.file.move_under(destination['node'].get_addon(self.provider).get_root()) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - assert self.guid.referent.target._id == destination['node']._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_file_moved_from_component_to_project(self, project, component, user): - source = { - 'path': '/file.txt', - 'node': component, - 'provider': self.provider - } - destination = { - 'path': '/file.txt', - 'node': project, - 'provider': self.provider - } - self._create_file_with_comment(node=source['node'], path=source['path'], user=user) - self.file.move_under(destination['node'].get_addon(self.provider).get_root()) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path(destination['path'], file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - assert self.guid.referent.target._id == destination['node']._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_folder_moved_from_project_to_component(self, project, component, user): - source = { - 'path': '/subfolder/', - 'node': project, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/', - 'node': component, - 'provider': self.provider - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - self.file.move_under(destination['node'].get_addon(self.provider).get_root()) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_folder_moved_from_component_to_project(self, project, component, user): - source = { - 'path': '/subfolder/', - 'node': component, - 'provider': self.provider - } - destination = { - 'path': '/subfolder/', - 'node': project, - 'provider': self.provider - } - file_name = 'file.txt' - self._create_file_with_comment(node=source['node'], path='{}{}'.format(source['path'], file_name), user=user) - self.file.move_under(destination['node'].get_addon(self.provider).get_root()) - payload = self._create_payload('move', user, source, destination, self.file._id) - update_file_guid_referent(self=None, target=destination['node'], payload=payload) - self.guid.reload() - - file_node = BaseFileNode.resolve_class(self.provider, BaseFileNode.FILE).get_or_create(destination['node'], self._format_path('{}{}'.format(destination['path'], file_name), file_id=self.file._id)) - assert self.guid._id == file_node.get_guid()._id - file_comments = Comment.objects.filter(root_target=self.guid.pk) - assert file_comments.count() == 1 - - def test_comments_move_when_file_moved_to_osfstorage(self): - # Already in OSFStorage - pass - - def test_comments_move_when_folder_moved_to_osfstorage(self): - # Already in OSFStorage - pass - -# copied from tests/test_comments.py -class TestBoxFileCommentMoveRename(FileCommentMoveRenameTestMixin): - - provider = 'box' - ProviderFile = BoxFile - - def _create_file_with_comment(self, node, path, user): - self.file = self.ProviderFile.create( - target=node, - path=self._format_path(path), - name=path.strip('/'), - materialized_path=path) - self.file.save() - self.guid = self.file.get_guid(create=True) - self.comment = CommentFactory(user=user, node=node, target=self.guid) - - @classmethod - def _format_path(cls, path, file_id=None): - super()._format_path(path) - return '/9876543210/' if path.endswith('/') else '/1234567890' - - -class TestDropboxFileCommentMoveRename(FileCommentMoveRenameTestMixin): - - provider = 'dropbox' - ProviderFile = DropboxFile - - def _create_file_with_comment(self, node, path, user): - self.file = self.ProviderFile.create( - target=node, - path=f'{node.get_addon(self.provider).folder}{path}', - name=path.strip('/'), - materialized_path=path) - self.file.save() - self.guid = self.file.get_guid(create=True) - self.comment = CommentFactory(user=user, node=node, target=self.guid) - - -class TestGoogleDriveFileCommentMoveRename(FileCommentMoveRenameTestMixin): - - provider = 'googledrive' - ProviderFile = GoogleDriveFile - -class TestGithubFileCommentMoveRename(FileCommentMoveRenameTestMixin): - - provider = 'github' - ProviderFile = GithubFile - -class TestS3FileCommentMoveRename(FileCommentMoveRenameTestMixin): - - provider = 's3' - ProviderFile = S3File - - -PROVIDER_CLASS = { - 'osfstorage': TestOsfstorageFileCommentMoveRename, - 'box': TestBoxFileCommentMoveRename, - 'dropbox': TestDropboxFileCommentMoveRename, - 'github': TestGithubFileCommentMoveRename, - 'googledrive': TestGoogleDriveFileCommentMoveRename, - 's3': TestS3FileCommentMoveRename - -} diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index 3b04ceba292..e6a34c31050 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -2131,14 +2131,14 @@ def test_set_privacy_sends_mail_default(self, node, auth): node.set_privacy('private', auth=auth) node.set_privacy('public', auth=auth) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[0]['type'] == NotificationType.Type.USER_NEW_PUBLIC_PROJECT def test_set_privacy_sends_mail(self, node, auth): with capture_notifications() as notifications: node.set_privacy('private', auth=auth) node.set_privacy('public', auth=auth, meeting_creation=False) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT + assert notifications[0]['type'] == NotificationType.Type.USER_NEW_PUBLIC_PROJECT def test_set_privacy_skips_mail_if_meeting(self, node, auth): with capture_notifications() as notifications: diff --git a/tests/test_preprints.py b/tests/test_preprints.py index b3c97ece060..b1920669a1f 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -996,9 +996,9 @@ def test_check_spam_on_private_preprint_bans_new_spam_user(self, preprint, user) preprint3.reload() assert preprint3.is_public is True - @mock.patch('website.mailchimp_utils.unsubscribe_mailchimp') @mock.patch.object(settings, 'SPAM_SERVICES_ENABLED', True) @mock.patch.object(settings, 'SPAM_ACCOUNT_SUSPENSION_ENABLED', True) + @mock.patch('website.mailchimp_utils.unsubscribe_mailchimp') def test_check_spam_on_private_preprint_does_not_ban_existing_user(self, mock_mailchimp, preprint, user): preprint.is_public = False preprint.save() diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 5874fad6fa6..280b0efd2a3 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -805,7 +805,7 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): auth=self.user.auth, ) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( @@ -897,7 +897,7 @@ def test_valid_POST_calls_send_mail_with_username(self): auth=self.user.auth, ) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.NODE_PENDING_RETRACTION_ADMIN def test_non_contributor_GET_approval_returns_HTTPError_FORBIDDEN(self): non_contributor = AuthUserFactory() diff --git a/website/mails/mails.py b/website/mails/mails.py index 4d47b52b5cb..7ca0da552ad 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -133,10 +133,6 @@ def get_english_article(word): 'pending_retraction_non_admin', subject='Withdrawal pending for one of your registrations.' ) -PENDING_RETRACTION_NON_ADMIN = Mail( - 'pending_retraction_non_admin', - subject='Withdrawal pending for one of your projects.' -) # Embargo related Mail objects PENDING_EMBARGO_ADMIN = Mail( 'pending_embargo_admin', From f281e994e3e6d27379b019c1c138dfc6b4c0e51b Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 22:57:59 -0400 Subject: [PATCH 151/176] remove mails.py --- api_tests/mailhog/test_mailhog.py | 5 - conftest.py | 2 - notifications.yaml | 2 +- .../commands/populate_notification_types.py | 2 +- osf_tests/test_archiver.py | 3 - osf_tests/test_merging_users.py | 1 - scripts/create_fakes.py | 1 - tests/base.py | 1 - tests/test_auth.py | 1 - tests/test_auth_views.py | 1 - tests/test_misc_views.py | 1 - tests/test_preprints.py | 1 - tests/test_registrations/test_embargoes.py | 5 +- tests/test_registrations/test_retractions.py | 3 +- website/mails/__init__.py | 1 - website/mails/mails.py | 317 ------------------ website/settings/local-dist.py | 3 - 17 files changed, 5 insertions(+), 345 deletions(-) delete mode 100644 website/mails/__init__.py delete mode 100644 website/mails/mails.py diff --git a/api_tests/mailhog/test_mailhog.py b/api_tests/mailhog/test_mailhog.py index fb9b8fba771..d21a0f37fa9 100644 --- a/api_tests/mailhog/test_mailhog.py +++ b/api_tests/mailhog/test_mailhog.py @@ -12,7 +12,6 @@ fake ) from framework import auth -from unittest import mock from osf.models import OSFUser, NotificationType from tests.base import ( OsfTestCase, @@ -23,7 +22,6 @@ @pytest.mark.django_db class TestMailHog: - @mock.patch('website.mails.settings.ENABLE_TEST_EMAIL', True) def test_mailhog_received_mail(self): with override_switch(features.ENABLE_MAILHOG, active=True): mailhog_v1 = f'{settings.MAILHOG_API_HOST}/api/v1/messages' @@ -33,7 +31,6 @@ def test_mailhog_received_mail(self): NotificationType.objects.get( name=NotificationType.Type.USER_REGISTRATION_BULK_UPLOAD_FAILURE_ALL ).emit( - user=None, destination_address='to_addr@mail.com', event_context={ 'fullname': '', @@ -53,8 +50,6 @@ def test_mailhog_received_mail(self): @pytest.mark.django_db -@mock.patch('website.mails.settings.ENABLE_TEST_EMAIL', True) -@mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthMailhog(OsfTestCase): def setUp(self): diff --git a/conftest.py b/conftest.py index b30cb6271a1..79c23380e63 100644 --- a/conftest.py +++ b/conftest.py @@ -35,7 +35,6 @@ def pytest_configure(config): 'framework.auth.core', 'website.app', 'website.archiver.tasks', - 'website.mails', 'website.notifications.listeners', 'website.search.elastic_search', 'website.search_migration.migrate', @@ -66,7 +65,6 @@ def override_settings(): website_settings.SHARE_ENABLED = False # Set this here instead of in SILENT_LOGGERS, in case developers # call setLevel in local.py - logging.getLogger('website.mails.mails').setLevel(logging.CRITICAL) @pytest.fixture() diff --git a/notifications.yaml b/notifications.yaml index 0a8b559f10f..1cc60553ac9 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -218,7 +218,7 @@ notification_types: - name: user_confirm_email_agu_conference subject: 'OSF Account Verification, from the American Geophysical Union Conference' object_content_type_model_name: osfuser - template: 'website/templates/emails/confirm_agu_conference.html.mako' + template: 'website/templates/emails/confirm_erpc.html.mako' - name: user_confirm_email_registries_osf subject: 'OSF Account Verification, OSF Registries' object_content_type_model_name: osfuser diff --git a/osf/management/commands/populate_notification_types.py b/osf/management/commands/populate_notification_types.py index a65b3f081ff..4e5d8921e59 100644 --- a/osf/management/commands/populate_notification_types.py +++ b/osf/management/commands/populate_notification_types.py @@ -21,7 +21,7 @@ def populate_notification_types(*args, **kwargs): with open(settings.NOTIFICATION_TYPES_YAML) as stream: notification_types = yaml.safe_load(stream) for notification_type in notification_types['notification_types']: - notification_type.pop('__docs__') + notification_type.pop('__docs__', None) object_content_type_model_name = notification_type.pop('object_content_type_model_name') if object_content_type_model_name == 'desk': diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 4d0491b21c9..bc5efc2c3f9 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -720,7 +720,6 @@ def test_archive_success_same_file_in_component(self): assert child_reg._id in question['extra'][0]['viewUrl'] -@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverUtils(ArchiverTestCase): def test_handle_archive_fail(self): @@ -849,7 +848,6 @@ def test_get_file_map_memoization(self): archiver_utils.get_file_map(node) assert mock_get_file_tree.call_count == call_count -@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverListeners(ArchiverTestCase): @mock.patch('website.archiver.tasks.archive') @@ -1082,7 +1080,6 @@ def test_find_failed_registrations(self): assert pk not in failed -@mock.patch('website.mails.settings.USE_CELERY', False) class TestArchiverBehavior(OsfTestCase): @mock.patch('osf.models.AbstractNode.update_search') diff --git a/osf_tests/test_merging_users.py b/osf_tests/test_merging_users.py index 9317260fb1b..ce2cd71cbdd 100644 --- a/osf_tests/test_merging_users.py +++ b/osf_tests/test_merging_users.py @@ -28,7 +28,6 @@ @pytest.mark.enable_implicit_clean @pytest.mark.enable_bookmark_creation -@mock.patch('website.mails.settings.USE_CELERY', False) class TestUserMerging(OsfTestCase): def setUp(self): super().setUp() diff --git a/scripts/create_fakes.py b/scripts/create_fakes.py index 8b4db177de7..379331f24bc 100644 --- a/scripts/create_fakes.py +++ b/scripts/create_fakes.py @@ -256,7 +256,6 @@ def science_text(cls, max_nb_chars=200): logger = logging.getLogger('create_fakes') SILENT_LOGGERS = [ 'factory', - 'website.mails', ] for logger_name in SILENT_LOGGERS: logging.getLogger(logger_name).setLevel(logging.CRITICAL) diff --git a/tests/base.py b/tests/base.py index 1eacefc066d..35ae00bb445 100644 --- a/tests/base.py +++ b/tests/base.py @@ -53,7 +53,6 @@ def get_default_metaschema(): 'framework.auth.core', 'website.app', 'website.archiver.tasks', - 'website.mails', 'website.notifications.listeners', 'website.search.elastic_search', 'website.search_migration.migrate', diff --git a/tests/test_auth.py b/tests/test_auth.py index 25068c024c8..05f6d243e33 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -43,7 +43,6 @@ logger = logging.getLogger(__name__) -@mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthUtils(OsfTestCase): def setUp(self): diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index 7f2b4c4136a..ca4476d17d3 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -43,7 +43,6 @@ pytestmark = pytest.mark.django_db -@mock.patch('website.mails.settings.USE_CELERY', False) class TestAuthViews(OsfTestCase): def setUp(self): diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index d9c735b97dd..78596d1eef2 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -361,7 +361,6 @@ def test_explore(self): assert res.status_code == 200 -@mock.patch('website.mails.settings.USE_CELERY', False) class TestExternalAuthViews(OsfTestCase): def setUp(self): diff --git a/tests/test_preprints.py b/tests/test_preprints.py index b1920669a1f..724dda3b0ae 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -1983,7 +1983,6 @@ def test_update_or_enqueue_on_preprint_doi_created(self): assert should_update_preprint_identifiers(self.private_preprint, {}) -@mock.patch('website.mails.settings.USE_CELERY', False) class TestPreprintConfirmationEmails(OsfTestCase): def setUp(self): super().setUp() diff --git a/tests/test_registrations/test_embargoes.py b/tests/test_registrations/test_embargoes.py index 7b06887c86b..7e2a7b71971 100644 --- a/tests/test_registrations/test_embargoes.py +++ b/tests/test_registrations/test_embargoes.py @@ -1060,7 +1060,6 @@ def test_GET_from_authorized_user_with_registration_rej_token_deleted_node(self) @pytest.mark.enable_bookmark_creation -@mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationEmbargoViewsTestCase(OsfTestCase): def setUp(self): super().setUp() @@ -1156,8 +1155,8 @@ def test_embargoed_registration_set_privacy_sends_mail(self): for contributor in self.registration.contributors: if Contributor.objects.get(user_id=contributor.id, node_id=self.registration.id).permission == permissions.ADMIN: admin_contributors.append(contributor) - for admin in admin_contributors: - assert any([each['kwargs']['user'] == admin for each in notifications]) + + assert all([each['kwargs']['user'] in admin_contributors for each in notifications]) @mock.patch('osf.models.sanctions.EmailApprovableSanction.ask') def test_make_child_embargoed_registration_public_asks_all_admins_in_tree(self, mock_ask): diff --git a/tests/test_registrations/test_retractions.py b/tests/test_registrations/test_retractions.py index 280b0efd2a3..a477baeaaed 100644 --- a/tests/test_registrations/test_retractions.py +++ b/tests/test_registrations/test_retractions.py @@ -752,7 +752,6 @@ def test_POST_retraction_to_subproject_component_returns_HTTPError_BAD_REQUEST(s @pytest.mark.enable_bookmark_creation @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') -@mock.patch('website.mails.settings.USE_CELERY', False) class RegistrationRetractionViewsTestCase(OsfTestCase): def setUp(self): super().setUp() @@ -805,7 +804,7 @@ def test_POST_retraction_does_not_send_email_to_unregistered_admins(self): auth=self.user.auth, ) assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_PENDING_REGISTRATION_ADMIN + assert notifications[0]['type'] == NotificationType.Type.NODE_PENDING_RETRACTION_ADMIN def test_POST_pending_embargo_returns_HTTPError_HTTPOK(self): self.registration.embargo_registration( diff --git a/website/mails/__init__.py b/website/mails/__init__.py deleted file mode 100644 index 1ed0bb2c90a..00000000000 --- a/website/mails/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .mails import * # noqa diff --git a/website/mails/mails.py b/website/mails/mails.py deleted file mode 100644 index 7ca0da552ad..00000000000 --- a/website/mails/mails.py +++ /dev/null @@ -1,317 +0,0 @@ -"""OSF mailing utilities. - -Email templates go in website/templates/emails -Templates must end in ``.txt.mako`` for plaintext emails or``.html.mako`` for html emails. - -You can then create a `Mail` object given the basename of the template and -the email subject. :: - - CONFIRM_EMAIL = Mail(tpl_prefix='confirm', subject="Confirm your email address") - -You can then use ``send_mail`` to send the email. - -Usage: :: - - from website import mails - ... - mails.send_mail('foo@bar.com', mails.CONFIRM_EMAIL, user=user) - -""" -import os -import logging - -from mako.lookup import TemplateLookup, Template - -from website import settings - -logger = logging.getLogger(__name__) - -EMAIL_TEMPLATES_DIR = os.path.join(settings.TEMPLATES_PATH, 'emails') - -_tpl_lookup = TemplateLookup( - directories=[EMAIL_TEMPLATES_DIR], -) - -HTML_EXT = '.html.mako' - -class Mail: - """An email object. - - :param str tpl_prefix: The template name prefix. - :param str subject: The subject of the email. - :param iterable categories: Categories to add to the email using SendGrid's - SMTPAPI. Used for email analytics. - See https://sendgrid.com/docs/User_Guide/Statistics/categories.html - :param: bool engagement: Whether this is an engagement email that can be disabled with - the disable_engagement_emails waffle flag - """ - - def __init__(self, tpl_prefix, subject, categories=None, engagement=False): - self.tpl_prefix = tpl_prefix - self._subject = subject - self.categories = categories - self.engagement = engagement - - def html(self, **context): - """Render the HTML email message.""" - tpl_name = self.tpl_prefix + HTML_EXT - return render_message(tpl_name, **context) - - def subject(self, **context): - return Template(self._subject).render(**context) - - -def render_message(tpl_name, **context): - """Render an email message.""" - tpl = _tpl_lookup.get_template(tpl_name) - return tpl.render(**context) - -def get_english_article(word): - """ - Decide whether to use 'a' or 'an' for a given English word. - - :param word: the word immediately after the article - :return: 'a' or 'an' - """ - return 'a' + ('n' if word[0].lower() in 'aeiou' else '') - - -# Predefined Emails -CONFIRM_EMAIL_ERPC = Mail( - 'confirm_erpc', - subject='OSF Account Verification, Election Research Preacceptance Competition' -) -CONFIRM_EMAIL_AGU_CONFERENCE = Mail( - 'confirm_agu_conference', - subject='OSF Account Verification, from the American Geophysical Union Conference' -) -CONFIRM_EMAIL_PREPRINTS = lambda name, provider: Mail( - f'confirm_preprints_{name}', - subject=f'OSF Account Verification, {provider}' -) -CONFIRM_EMAIL_REGISTRIES_OSF = Mail( - 'confirm_registries_osf', - subject='OSF Account Verification, OSF Registries' -) - -# Merge account, add or remove email confirmation emails. -CONFIRM_MERGE = Mail('confirm_merge', subject='Confirm account merge') -PRIMARY_EMAIL_CHANGED = Mail('primary_email_changed', subject='Primary email changed') - - -# Contributor added confirmation emails -INVITE_PREPRINT = lambda provider: Mail( - 'invite_preprints', - subject=f'You have been added as a contributor to {get_english_article(provider.name)} {provider.name} {provider.preprint_word}.' -) -CONTRIBUTOR_ADDED_PREPRINT = lambda provider: Mail( - 'contributor_added_preprints', - subject=f'You have been added as a contributor to {get_english_article(provider.name)} {provider.name} {provider.preprint_word}.' -) -MODERATOR_ADDED = lambda provider: Mail( - 'moderator_added', - subject=f'You have been added as a moderator for {provider.name}' -) -CONTRIBUTOR_ADDED_ACCESS_REQUEST = Mail( - 'contributor_added_access_request', - subject='Your access request to an OSF project has been approved' -) -REQUEST_EXPORT = Mail('support_request', subject='[via OSF] Export Request') - -SPAM_USER_BANNED = Mail('spam_user_banned', subject='[OSF] Account flagged as spam') -SPAM_FILES_DETECTED = Mail( - 'spam_files_detected', - subject='[auto] Spam files audit' -) - -# Retraction related Mail objects -PENDING_RETRACTION_ADMIN = Mail( - 'pending_retraction_admin', - subject='Withdrawal pending for one of your registrations.' -) -PENDING_RETRACTION_NON_ADMIN = Mail( - 'pending_retraction_non_admin', - subject='Withdrawal pending for one of your registrations.' -) -# Embargo related Mail objects -PENDING_EMBARGO_ADMIN = Mail( - 'pending_embargo_admin', - subject='Admin decision pending for one of your registrations.' -) -PENDING_EMBARGO_NON_ADMIN = Mail( - 'pending_embargo_non_admin', - subject='Admin decision pending for one of your registrations.' -) -# Registration related Mail Objects -PENDING_REGISTRATION_ADMIN = Mail( - 'pending_registration_admin', - subject='Admin decision pending for one of your registrations.' -) -PENDING_REGISTRATION_NON_ADMIN = Mail( - 'pending_registration_non_admin', - subject='Admin decision pending for one of your registrations.' -) -PENDING_EMBARGO_TERMINATION_ADMIN = Mail( - 'pending_embargo_termination_admin', - subject='Request to end an embargo early for one of your registrations.' -) -PENDING_EMBARGO_TERMINATION_NON_ADMIN = Mail( - 'pending_embargo_termination_non_admin', - subject='Request to end an embargo early for one of your projects.' -) - -FILE_OPERATION_SUCCESS = Mail( - 'file_operation_success', - subject='Your ${action} has finished', -) -FILE_OPERATION_FAILED = Mail( - 'file_operation_failed', - subject='Your ${action} has failed', -) - -UNESCAPE = '<% from osf.utils.sanitize import unescape_entities %> ${unescape_entities(src.title)}' -PROBLEM_REGISTERING = 'Problem registering ' + UNESCAPE -ARCHIVE_FILE_NOT_FOUND_DESK = Mail( - 'archive_file_not_found_desk', - subject=PROBLEM_REGISTERING -) -ARCHIVE_FILE_NOT_FOUND_USER = Mail( - 'archive_file_not_found_user', - subject='Registration failed because of altered files' -) - -ARCHIVE_UNCAUGHT_ERROR_DESK = Mail( - 'archive_uncaught_error_desk', - subject=PROBLEM_REGISTERING -) - -ARCHIVE_REGISTRATION_STUCK_DESK = Mail( - 'archive_registration_stuck_desk', - subject='[auto] Stuck registrations audit' -) - -ARCHIVE_SUCCESS = Mail( - 'archive_success', - subject='Registration of ' + UNESCAPE + ' complete' -) - -DUPLICATE_ACCOUNTS_OSF4I = Mail( - 'duplicate_accounts_sso_osf4i', - subject='Duplicate OSF Accounts' -) - -ADD_SSO_EMAIL_OSF4I = Mail( - 'add_sso_email_osf4i', - subject='Your OSF Account Email Address' -) - -EMPTY = Mail('empty', subject='${subject}') - -REVIEWS_SUBMISSION_CONFIRMATION = Mail( - 'reviews_submission_confirmation', - subject='Confirmation of your submission to ${provider_name}' -) - -REVIEWS_RESUBMISSION_CONFIRMATION = Mail( - 'reviews_resubmission_confirmation', - subject='Confirmation of your submission to ${provider_name}' -) - -CROSSREF_ERROR = Mail( - 'crossref_doi_error', - subject='There was an error creating a DOI for preprint(s). batch_id: ${batch_id}' -) - -CROSSREF_DOIS_PENDING = Mail( - 'crossref_doi_pending', - subject='There are ${pending_doi_count} preprints with crossref DOI pending.' -) - -WITHDRAWAL_REQUEST_GRANTED = Mail( - 'withdrawal_request_granted', - subject='Your ${document_type} has been withdrawn', -) - -WITHDRAWAL_REQUEST_DECLINED = Mail( - 'withdrawal_request_declined', - subject='Your withdrawal request has been declined', -) - -TOU_NOTIF = Mail( - 'tou_notif', - subject='Updated Terms of Use for COS Websites and Services', -) - -REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER = Mail( - 'registration_bulk_upload_product_owner', - subject='Registry Could Not Bulk Upload Registrations' -) - -REGISTRATION_BULK_UPLOAD_SUCCESS_ALL = Mail( - 'registration_bulk_upload_success_all', - subject='Registrations Successfully Bulk Uploaded to your Community\'s Registry' -) - -REGISTRATION_BULK_UPLOAD_SUCCESS_PARTIAL = Mail( - 'registration_bulk_upload_success_partial', - subject='Some Registrations Successfully Bulk Uploaded to your Community\'s Registry' -) - - -REGISTRATION_BULK_UPLOAD_FAILURE_DUPLICATES = Mail( - 'registration_bulk_upload_failure_duplicates', - subject='Registrations Were Not Bulk Uploaded to your Community\'s Registry' -) - -REGISTRATION_BULK_UPLOAD_UNEXPECTED_FAILURE = Mail( - 'registration_bulk_upload_unexpected_failure', - subject='Registrations Were Not Bulk Uploaded to your Community\'s Registry' -) - -SCHEMA_RESPONSE_INITIATED = Mail( - 'updates_initiated', - subject='Updates for ${resource_type} ${title} are in progress' -) - - -SCHEMA_RESPONSE_SUBMITTED = Mail( - 'updates_pending_approval', - subject='Updates for ${resource_type} ${title} are pending Admin approval' -) - - -SCHEMA_RESPONSE_APPROVED = Mail( - 'updates_approved', - subject='The updates for ${resource_type} ${title} have been approved' -) - - -SCHEMA_RESPONSE_REJECTED = Mail( - 'updates_rejected', - subject='The updates for ${resource_type} ${title} were not accepted' -) - -ADDONS_BOA_JOB_COMPLETE = Mail( - 'addons_boa_job_complete', - subject='Your Boa job has completed' -) - -ADDONS_BOA_JOB_FAILURE = Mail( - 'addons_boa_job_failure', - subject='Your Boa job has failed' -) - -NODE_REQUEST_INSTITUTIONAL_ACCESS_REQUEST = Mail( - 'node_request_institutional_access_request', - subject='Institutional Access Project Request' -) - -USER_MESSAGE_INSTITUTIONAL_ACCESS_REQUEST = Mail( - 'user_message_institutional_access_request', - subject='Message from Institutional Admin' -) - -PROJECT_AFFILIATION_CHANGED = Mail( - 'project_affiliation_changed', - subject='Project Affiliation Changed' -) diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py index 4124d621450..c421be3759e 100644 --- a/website/settings/local-dist.py +++ b/website/settings/local-dist.py @@ -144,9 +144,6 @@ class CeleryConfig(defaults.CeleryConfig): CHRONOS_USE_FAKE_FILE = True CHRONOS_FAKE_FILE_URL = 'https://staging2.osf.io/r2t5v/download' -# Show sent emails in console -logging.getLogger('website.mails.mails').setLevel(logging.DEBUG) - SHARE_ENABLED = False DATACITE_ENABLED = False From dbf481556686d8e32f8dbbc8a23b028190944c6b Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 23:22:44 -0400 Subject: [PATCH 152/176] fixing mailhog tests to use Django mail BE --- osf/email/__init__.py | 77 ++++++++++++---------------------- osf/models/notification.py | 8 ++-- website/settings/defaults.py | 1 - website/settings/local-ci.py | 3 +- website/settings/local-dist.py | 3 +- 5 files changed, 36 insertions(+), 56 deletions(-) diff --git a/osf/email/__init__.py b/osf/email/__init__.py index 9ac0a16e0b4..753c6087a48 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -1,6 +1,4 @@ import logging -import smtplib -from email.mime.text import MIMEText import waffle from sendgrid import SendGridAPIClient, Personalization, To, Cc, Category, ReplyTo, Bcc @@ -11,7 +9,7 @@ from django.core.mail import EmailMessage, get_connection -def send_email_over_smtp(to_addr, notification_type, context, email_context): +def send_email_over_smtp(to_email, notification_type, context, email_context): """Send an email notification using SMTP. This is typically not used in productions as other 3rd party mail services are preferred. This is to be used for tests and on staging environments and special situations. @@ -23,36 +21,38 @@ def send_email_over_smtp(to_addr, notification_type, context, email_context): """ if not settings.MAIL_SERVER: raise NotImplementedError('MAIL_SERVER is not set') - if not settings.MAIL_USERNAME and settings.MAIL_PASSWORD: - raise NotImplementedError('MAIL_USERNAME and MAIL_PASSWORD are required for STMP') if waffle.switch_is_active(features.ENABLE_MAILHOG): - send_to_mailhog( - subject=notification_type.subject, - message=notification_type.template.format(**context), - to_email=to_addr, - from_email=settings.MAIL_USERNAME, - ) - return + host = settings.MAILHOG_HOST + port = settings.MAILHOG_PORT + else: + host = settings.MAIL_SERVER + port = settings.MAIL_PORT - msg = MIMEText( - notification_type.template.format(**context), - 'html', - _charset='utf-8' + email = EmailMessage( + subject=notification_type.subject.format(**context), + body=notification_type.template.format(**context), + from_email=settings.OSF_SUPPORT_EMAIL, + to=[to_email], + connection=get_connection( + backend='django.core.mail.backends.smtp.EmailBackend', + host=host, + port=port, + username=settings.MAIL_USERNAME, + password=settings.MAIL_PASSWORD, + use_tls=False, + use_ssl=False, + ) ) + email.content_subtype = 'html' - if notification_type.subject: - msg['Subject'] = notification_type.subject.format(**context) - - with smtplib.SMTP(settings.MAIL_SERVER) as server: - server.ehlo() - server.login(settings.MAIL_USERNAME, settings.MAIL_PASSWORD) - server.sendmail( - settings.FROM_EMAIL, - [to_addr], - msg.as_string() - ) + if email_context: + attachment_name = email_context.get('attachment_name', None) + attachment_content = email_context.get('attachment_content', None) + if attachment_name and attachment_content: + email.attach(attachment_name, attachment_content) + email.send() def send_email_with_send_grid(to_addr, notification_type, context, email_context): """Send an email notification using SendGrid. @@ -115,26 +115,3 @@ def send_email_with_send_grid(to_addr, notification_type, context, email_context except Exception as exc: logging.error(f'Failed to send email notification to {to_addr}: {exc}') raise exc - -def send_to_mailhog(subject, message, from_email, to_email, attachment_name=None, attachment_content=None): - email = EmailMessage( - subject=subject, - body=message, - from_email=from_email, - to=[to_email], - connection=get_connection( - backend='django.core.mail.backends.smtp.EmailBackend', - host=settings.MAILHOG_HOST, - port=settings.MAILHOG_PORT, - username='', - password='', - use_tls=False, - use_ssl=False, - ) - ) - email.content_subtype = 'html' - - if attachment_name and attachment_content: - email.attach(attachment_name, attachment_content) - - email.send() diff --git a/osf/models/notification.py b/osf/models/notification.py index 228ee3e9d5a..e0775b192d3 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -1,11 +1,13 @@ import logging +import waffle from django.db import models from django.utils import timezone from website import settings from api.base import settings as api_settings -from osf import email +from osf import email, features + class Notification(models.Model): subscription = models.ForeignKey( @@ -31,7 +33,7 @@ def send( raise NotImplementedError(f'Protocol type {protocol_type}. Email notifications are only implemented.') recipient_address = destination_address or self.subscription.user.username - if protocol_type == 'email' and settings.ENABLE_TEST_EMAIL: + if protocol_type == 'email' and waffle.switch_is_active(features.ENABLE_MAILHOG): email.send_email_over_smtp( recipient_address, self.subscription.notification_type, @@ -41,7 +43,7 @@ def send( elif protocol_type == 'email' and settings.DEV_MODE: if not api_settings.CI_ENV: logging.info( - f"Attempting to send email in DEV_MODE with ENABLE_TEST_EMAIL false just logs:" + f"Attempting to send email in DEV_MODE for just mocked logs:" f"\nto={recipient_address}" f"\ntype={self.subscription.notification_type.name}" f"\ncontext={self.event_context}" diff --git a/website/settings/defaults.py b/website/settings/defaults.py index a68414b6763..7581393e1db 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -143,7 +143,6 @@ def parent_dir(path): USE_CDN_FOR_CLIENT_LIBS = True FROM_EMAIL = 'openscienceframework-noreply@osf.io' -ENABLE_TEST_EMAIL = False # support email OSF_SUPPORT_EMAIL = 'support@osf.io' # contact email diff --git a/website/settings/local-ci.py b/website/settings/local-ci.py index 2cab1ca4252..83049429921 100644 --- a/website/settings/local-ci.py +++ b/website/settings/local-ci.py @@ -47,9 +47,10 @@ USE_CELERY = False # Email -MAIL_SERVER = 'localhost:1025' # For local testing MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' +MAIL_SERVER = 'localhost' # For local testing +MAIL_PORT = 1025 # For local testing MAILHOG_HOST = 'localhost' MAILHOG_PORT = 1025 diff --git a/website/settings/local-dist.py b/website/settings/local-dist.py index c421be3759e..cb5e72f9b2e 100644 --- a/website/settings/local-dist.py +++ b/website/settings/local-dist.py @@ -57,9 +57,10 @@ ELASTIC_TIMEOUT = 10 # Email -MAIL_SERVER = 'localhost:1025' # For local testing MAIL_USERNAME = 'osf-smtp' MAIL_PASSWORD = 'CHANGEME' +MAIL_SERVER = 'localhost' # For local testing +MAIL_PORT = 'localhost' # For local testing MAILHOG_HOST = 'mailhog' MAILHOG_PORT = 1025 From ebffbe411e3a7d660763dc4eae1630415bd75fe7 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 23:26:06 -0400 Subject: [PATCH 153/176] more mails removal --- addons/boa/tests/test_tasks.py | 76 ++++----- .../commands/check_crossref_dois.py | 161 ------------------ osf/management/commands/email_all_users.py | 116 ------------- osf/management/commands/find_spammy_files.py | 114 ------------- osf_tests/test_archiver.py | 2 - scripts/osfstorage/usage_audit.py | 1 - scripts/stuck_registration_audit.py | 6 +- tests/test_auth.py | 1 - tests/test_auth_views.py | 2 +- 9 files changed, 35 insertions(+), 444 deletions(-) delete mode 100644 osf/management/commands/check_crossref_dois.py delete mode 100644 osf/management/commands/email_all_users.py delete mode 100644 osf/management/commands/find_spammy_files.py diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py index f31185fa789..1580205048e 100644 --- a/addons/boa/tests/test_tasks.py +++ b/addons/boa/tests/test_tasks.py @@ -8,7 +8,7 @@ from addons.boa import settings as boa_settings from addons.boa.boa_error_code import BoaErrorCode -from addons.boa.tasks import submit_to_boa, submit_to_boa_async, handle_boa_error +from addons.boa.tasks import submit_to_boa, handle_boa_error from osf.models import NotificationType from osf_tests.factories import AuthUserFactory, ProjectFactory from tests.base import OsfTestCase @@ -50,7 +50,6 @@ def test_boa_error_code(self): assert BoaErrorCode.FILE_TOO_LARGE_ERROR == 6 assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7 - @mock.patch('website.mails.settings.USE_CELERY', False) def test_handle_boa_error(self): with mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error: @@ -59,16 +58,15 @@ def test_handle_boa_error(self): self.error_message, BoaErrorCode.UNKNOWN, self.user_username, - self.user_fullname, self.project_url, self.file_full_path, - query_file_name=self.query_file_name, - file_size=self.file_size, - output_file_name=self.output_file_name, - job_id=self.job_id + self.query_file_name, + self.file_size, + self.output_file_name, + self.job_id ) assert len(notifications) == 1 - assert notifications[0]['typr'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED + assert notifications[0]['type'] == NotificationType.Type.ADDONS_BOA_JOB_FAILURE mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True) mock_logger_error.assert_called_with(self.error_message) assert return_value == BoaErrorCode.UNKNOWN @@ -90,30 +88,21 @@ def setUp(self): self.query_download_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/1a2b3c4d' self.output_upload_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/?kind=file' - def tearDown(self): - super().tearDown() - def test_submit_to_boa_async_called(self): - with mock.patch( - 'addons.boa.tasks.submit_to_boa_async', - new_callable=AsyncMock, - return_value=BoaErrorCode.NO_ERROR - ) as mock_submit_to_boa_async: - return_value = submit_to_boa( - self.host, - self.username, - self.password, - self.user_guid, - self.project_guid, - self.query_dataset, - self.query_file_name, - self.file_size, - self.file_full_path, - self.query_download_url, - self.output_upload_url - ) - assert return_value == BoaErrorCode.NO_ERROR - mock_submit_to_boa_async.assert_called() + return_value = submit_to_boa( + self.host, + self.username, + self.password, + self.user_guid, + self.project_guid, + self.query_dataset, + self.query_file_name, + self.file_size, + self.file_full_path, + self.query_download_url, + self.output_upload_url + ) + assert return_value == BoaErrorCode.NO_ERROR @pytest.mark.django_db @@ -150,7 +139,6 @@ def setUp(self): boa_settings.REFRESH_JOB_INTERVAL = DEFAULT_REFRESH_JOB_INTERVAL boa_settings.MAX_JOB_WAITING_TIME = DEFAULT_MAX_JOB_WAITING_TIME - @mock.patch('website.mails.settings.USE_CELERY', False) async def test_submit_success(self): with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \ mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \ @@ -162,7 +150,7 @@ async def test_submit_success(self): mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None) as mock_async_sleep, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: with capture_notifications() as notifications: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -190,7 +178,7 @@ async def test_download_error(self): mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \ mock.patch('urllib.request.urlopen', side_effect=http_404), \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -221,7 +209,7 @@ async def test_login_error(self): mock.patch('boaapi.boa_client.BoaClient.login', side_effect=BoaException()) as mock_login, \ mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -255,7 +243,7 @@ async def test_data_set_error(self): mock.patch('boaapi.boa_client.BoaClient.get_dataset', side_effect=BoaException()) as mock_get_dataset, \ mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -290,7 +278,7 @@ async def test_submit_error(self): mock.patch('boaapi.boa_client.BoaClient.query', side_effect=BoaException()) as mock_query, \ mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -328,7 +316,7 @@ async def test_compile_error(self): mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -366,7 +354,7 @@ async def test_execute_error(self): mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -403,7 +391,7 @@ async def test_output_error_(self): mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -441,7 +429,7 @@ async def test_upload_error_conflict(self): mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -479,7 +467,7 @@ async def test_upload_error_other(self): mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None), \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -510,7 +498,7 @@ async def test_file_too_large_error(self): with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \ mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, @@ -546,7 +534,7 @@ async def test_job_timeout_error(self): mock.patch('boaapi.boa_client.BoaClient.query', return_value=self.mock_job), \ mock.patch('boaapi.boa_client.BoaClient.close', return_value=None) as mock_close, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: - return_value = await submit_to_boa_async( + return_value = submit_to_boa( self.host, self.username, self.password, diff --git a/osf/management/commands/check_crossref_dois.py b/osf/management/commands/check_crossref_dois.py deleted file mode 100644 index bff7ca7e07f..00000000000 --- a/osf/management/commands/check_crossref_dois.py +++ /dev/null @@ -1,161 +0,0 @@ -from datetime import timedelta -import logging -import requests - -import django -from django.core.mail import send_mail -from django.core.management.base import BaseCommand -from django.utils import timezone -django.setup() - -from framework import sentry -from framework.celery_tasks import app as celery_app -from osf.models import Guid, Preprint -from website import mails, settings - - -logger = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO) - -time_since_published = timedelta(days=settings.DAYS_CROSSREF_DOIS_MUST_BE_STUCK_BEFORE_EMAIL) - -CHECK_DOIS_BATCH_SIZE = 20 - - -def pop_slice(lis, n): - tem = lis[:n] - del lis[:n] - return tem - -def mint_doi_for_preprints_locally(confirm_local=False): - """This method creates identifiers for preprints which have pending DOI in local environment only. - """ - if not settings.DEV_MODE or not settings.DEBUG_MODE: - logger.error('This command should only run in the local development environment.') - return - if not confirm_local: - logger.error('You must explicitly set `confirm_local` to run this command.') - return - - preprints_with_pending_doi = Preprint.objects.filter(preprint_doi_created__isnull=True, is_published=True) - total_created = 0 - for preprint in preprints_with_pending_doi: - client = preprint.get_doi_client() - doi = client.build_doi(preprint=preprint) if client else None - if doi: - logger.info(f'Minting DOI [{doi}] for Preprint [{preprint._id}].') - preprint.set_identifier_values(doi, save=True) - total_created += 1 - logger.info(f'[{total_created}] DOIs minted.') - -def check_crossref_dois(dry_run=True): - """ - This script is to check for any DOI confirmation messages we may have missed during downtime and alert admins to any - DOIs that have been pending for X number of days. It creates url to check with crossref if all our pending crossref - DOIs are minted, then sets all identifiers which are confirmed minted. - - :param dry_run: - :return: - """ - - preprints_with_pending_dois = Preprint.objects.filter( - preprint_doi_created__isnull=True, - is_published=True - ).exclude(date_published__gt=timezone.now() - time_since_published) - - if not preprints_with_pending_dois.exists(): - return - - preprints = list(preprints_with_pending_dois) - - while preprints: - preprint_batch = pop_slice(preprints, CHECK_DOIS_BATCH_SIZE) - - pending_dois = [] - for preprint in preprint_batch: - doi_prefix = preprint.provider.doi_prefix - if not doi_prefix: - sentry.log_message(f'Preprint [_id={preprint._id}] has been skipped for CrossRef DOI Check ' - f'since the provider [_id={preprint.provider._id}] has invalid DOI Prefix ' - f'[doi_prefix={doi_prefix}]') - continue - pending_dois.append(f'doi:{settings.DOI_FORMAT.format(prefix=doi_prefix, guid=preprint._id)}') - - if not pending_dois: - continue - - url = '{}works?filter={}'.format(settings.CROSSREF_JSON_API_URL, ','.join(pending_dois)) - - try: - resp = requests.get(url) - resp.raise_for_status() - except requests.exceptions.HTTPError as exc: - sentry.log_message(f'Could not contact crossref to check for DOIs, response returned with exception {exc}') - continue - - preprints_response = resp.json()['message']['items'] - - for preprint in preprints_response: - preprint__id = preprint['DOI'].split('/')[-1] - base_guid, version = Guid.split_guid(preprint__id) - if not base_guid or not version: - sentry.log_message(f'[Skipped] Preprint [_id={preprint__id}] returned by CrossRef API has invalid _id') - continue - pending_preprint = preprints_with_pending_dois.filter( - versioned_guids__guid___id=base_guid, - versioned_guids__version=version, - ).first() - if not pending_preprint: - sentry.log_message(f'[Skipped] Preprint [_id={preprint__id}] returned by CrossRef API is not found.') - continue - if not dry_run: - logger.debug(f'Set identifier for {pending_preprint._id}') - pending_preprint.set_identifier_values(preprint['DOI'], save=True) - else: - logger.info(f'DRY RUN: Set identifier for {pending_preprint._id}') - - -def report_stuck_dois(dry_run=True): - - preprints_with_pending_dois = Preprint.objects.filter(preprint_doi_created__isnull=True, - is_published=True, - date_published__lt=timezone.now() - time_since_published) - - if preprints_with_pending_dois: - guids = ', '.join(preprints_with_pending_dois.values_list('guids___id', flat=True)) - if not dry_run: - send_mail( - to_addr=settings.OSF_SUPPORT_EMAIL, - mail=mails.CROSSREF_DOIS_PENDING, - pending_doi_count=preprints_with_pending_dois.count(), - time_since_published=time_since_published.days, - guids=guids, - ) - else: - logger.info('DRY RUN') - - logger.info(f'There were {preprints_with_pending_dois.count()} stuck registrations for CrossRef, email sent to help desk') - - -@celery_app.task(name='management.commands.check_crossref_dois') -def main(dry_run=False): - check_crossref_dois(dry_run=dry_run) - report_stuck_dois(dry_run=dry_run) - - -class Command(BaseCommand): - help = '''Checks if we've missed any Crossref DOI confirmation emails. ''' - - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Dry run', - ) - - # Management command handler - def handle(self, *args, **options): - dry_run = options.get('dry_run', True) - main(dry_run=dry_run) diff --git a/osf/management/commands/email_all_users.py b/osf/management/commands/email_all_users.py deleted file mode 100644 index 774f8b5af2d..00000000000 --- a/osf/management/commands/email_all_users.py +++ /dev/null @@ -1,116 +0,0 @@ -# This is a management command, rather than a migration script, for two primary reasons: -# 1. It makes no changes to database structure (e.g. AlterField), only database content. -# 2. It takes a long time to run and the site doesn't need to be down that long. - -import logging - - -import django -from django.core.mail import send_mail - -django.setup() - -from django.core.management.base import BaseCommand -from framework import sentry - -from website import mails - -from osf.models import OSFUser - -logger = logging.getLogger(__name__) - -OFFSET = 500000 - -def email_all_users(email_template, dry_run=False, ids=None, start_id=0, offset=OFFSET): - - if ids: - active_users = OSFUser.objects.filter(id__in=ids) - else: - lower_bound = start_id - upper_bound = start_id + offset - base_query = OSFUser.objects.filter(date_confirmed__isnull=False, deleted=None).exclude(date_disabled__isnull=False).exclude(is_active=False) - active_users = base_query.filter(id__gt=lower_bound, id__lte=upper_bound).order_by('id') - - if dry_run: - active_users = active_users.exclude(is_superuser=False) - - total_active_users = active_users.count() - - logger.info(f'About to send an email to {total_active_users} users.') - - template = getattr(mails, email_template, None) - if not template: - raise RuntimeError('Invalid email template specified!') - - total_sent = 0 - for user in active_users.iterator(): - logger.info(f'Sending email to {user.id}') - try: - send_mail( - to_addr=user.email, - mail=template, - given_name=user.given_name or user.fullname, - ) - except Exception as e: - logger.error(f'Exception encountered sending email to {user.id}') - sentry.log_exception(e) - continue - else: - total_sent += 1 - - logger.info(f'Emails sent to {total_sent}/{total_active_users} users') - - -class Command(BaseCommand): - """ - Add subscription to all active users for given notification type. - """ - def add_arguments(self, parser): - super().add_arguments(parser) - parser.add_argument( - '--dry', - action='store_true', - dest='dry_run', - help='Test - Only send to superusers' - ) - - parser.add_argument( - '--t', - type=str, - dest='template', - required=True, - help='Specify which template to use' - ) - - parser.add_argument( - '--start-id', - type=int, - dest='start_id', - default=0, - help='Specify id to start from.' - ) - - parser.add_argument( - '--ids', - dest='ids', - nargs='+', - help='Specific IDs to email, otherwise will email all users' - ) - - parser.add_argument( - '--o', - type=int, - dest='offset', - default=OFFSET, - help=f'How many users to email in this run, default is {OFFSET}' - ) - - def handle(self, *args, **options): - dry_run = options.get('dry_run', False) - template = options.get('template') - start_id = options.get('start_id') - ids = options.get('ids') - offset = options.get('offset', OFFSET) - email_all_users(template, dry_run, start_id=start_id, ids=ids, offset=offset) - if dry_run: - raise RuntimeError('Dry run, only superusers emailed') diff --git a/osf/management/commands/find_spammy_files.py b/osf/management/commands/find_spammy_files.py deleted file mode 100644 index 7feeab508fa..00000000000 --- a/osf/management/commands/find_spammy_files.py +++ /dev/null @@ -1,114 +0,0 @@ -import io -import csv -from datetime import timedelta -import logging - -from django.core.mail import send_mail -from django.core.management.base import BaseCommand -from django.utils import timezone - -from addons.osfstorage.models import OsfStorageFile -from framework.celery_tasks import app -from website import mails - -logger = logging.getLogger(__name__) - - -@app.task(name='osf.management.commands.find_spammy_files') -def find_spammy_files(sniff_r=None, n=None, t=None, to_addrs=None): - if not sniff_r: - raise RuntimeError('Require arg sniff_r not found') - if isinstance(sniff_r, str): - sniff_r = [sniff_r] - if isinstance(to_addrs, str): - to_addrs = [to_addrs] - for sniff in sniff_r: - filename = f'spam_files_{sniff}.csv' - filepath = f'/tmp/{filename}' - fieldnames = ['f.name', 'f._id', 'f.created', 'n._id', 'u._id', 'u.username', 'u.fullname'] - output = io.StringIO() - writer = csv.DictWriter(output, fieldnames) - writer.writeheader() - qs = OsfStorageFile.objects.filter(name__iregex=sniff) - if t: - qs = qs.filter(created__gte=timezone.now() - timedelta(days=t)) - if n: - qs = qs[:n] - ct = 0 - for f in qs: - node = f.target - user = getattr(f.versions.first(), 'creator', node.creator) - if f.target.deleted or user.is_disabled: - continue - ct += 1 - writer.writerow({ - 'f.name': f.name, - 'f._id': f._id, - 'f.created': f.created, - 'n._id': node._id, - 'u._id': user._id, - 'u.username': user.username, - 'u.fullname': user.fullname - }) - if ct: - if to_addrs: - for addr in to_addrs: - send_mail( - mail=mails.SPAM_FILES_DETECTED, - to_addr=addr, - ct=ct, - sniff_r=sniff, - attachment_name=filename, - attachment_content=output.getvalue(), - can_change_preferences=False, - ) - else: - with open(filepath, 'w') as writeFile: - writeFile.write(output.getvalue()) - -class Command(BaseCommand): - help = '''Script to match filenames to common spammy names.''' - - def add_arguments(self, parser): - parser.add_argument( - '--sniff_r', - type=str, - nargs='+', - required=True, - help='Regex to match against file.name', - ) - parser.add_argument( - '--n', - type=int, - default=None, - help='Max number of files to return', - ) - parser.add_argument( - '--t', - type=int, - default=None, - help='Number of days to search through', - ) - parser.add_argument( - '--to_addrs', - type=str, - nargs='*', - default=None, - help='Email address(es) to send the resulting file to. If absent, write to csv in /tmp/', - ) - - def handle(self, *args, **options): - script_start_time = timezone.now() - logger.info(f'Script started time: {script_start_time}') - logger.debug(options) - - sniff_r = options.get('sniff_r') - n = options.get('n', None) - t = options.get('t', None) - to_addrs = options.get('to_addrs', None) - - find_spammy_files(sniff_r=sniff_r, n=n, t=t, to_addrs=to_addrs) - - script_finish_time = timezone.now() - logger.info(f'Script finished time: {script_finish_time}') - logger.info(f'Run time {script_finish_time - script_start_time}') diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index bc5efc2c3f9..34394e9a39c 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -12,8 +12,6 @@ from framework.auth import Auth from framework.celery_tasks import handlers -from website import mails - from website.archiver import ( ARCHIVER_INITIATED, ) diff --git a/scripts/osfstorage/usage_audit.py b/scripts/osfstorage/usage_audit.py index c50e3f57640..200f3fda0e7 100644 --- a/scripts/osfstorage/usage_audit.py +++ b/scripts/osfstorage/usage_audit.py @@ -21,7 +21,6 @@ from framework.celery_tasks import app as celery_app from osf.models import TrashedFile, Node -from website import mails from website.app import init_app from website.settings.defaults import GBs diff --git a/scripts/stuck_registration_audit.py b/scripts/stuck_registration_audit.py index c9bce059fb9..d165b256a61 100644 --- a/scripts/stuck_registration_audit.py +++ b/scripts/stuck_registration_audit.py @@ -9,15 +9,13 @@ from django.utils import timezone -from website import mails from website import settings from framework.auth import Auth from framework.celery_tasks import app as celery_app from osf.management.commands import force_archive as fa -from osf.models import ArchiveJob, Registration, NotificationType -from website.archiver import ARCHIVER_INITIATED -from website.settings import ARCHIVE_TIMEOUT_TIMEDELTA, ADDONS_REQUESTED +from osf.models import Registration, NotificationType +from website.settings import ADDONS_REQUESTED from scripts import utils as scripts_utils diff --git a/tests/test_auth.py b/tests/test_auth.py index 05f6d243e33..1b7ec29df1e 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -27,7 +27,6 @@ from osf.models import OSFUser, NotificationType from osf.utils import permissions from tests.utils import capture_notifications -from website import mails from website import settings from website.project.decorators import ( must_have_permission, diff --git a/tests/test_auth_views.py b/tests/test_auth_views.py index ca4476d17d3..7c6e282a07f 100644 --- a/tests/test_auth_views.py +++ b/tests/test_auth_views.py @@ -38,7 +38,7 @@ fake, OsfTestCase, ) -from website import mails, settings +from website import settings from website.util import api_url_for, web_url_for pytestmark = pytest.mark.django_db From 795a7e6500e9d11c57eeb6b2947daa7d02a549e9 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 23:36:21 -0400 Subject: [PATCH 154/176] fix send grid code --- osf/email/__init__.py | 6 +++--- tests/test_preprints.py | 2 +- tests/test_spam_mixin.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/osf/email/__init__.py b/osf/email/__init__.py index 753c6087a48..5b2dae93a04 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -85,19 +85,19 @@ def send_email_with_send_grid(to_addr, notification_type, context, email_context personalization.add_to(To(to_addr)) - if cc_addr := email_context.get('cc_addr'): + if cc_addr := email_context.get('cc_addr', None): if isinstance(cc_addr, str): cc_addr = [cc_addr] for email in cc_addr: personalization.add_cc(Cc(email)) - if bcc_addr := email_context.get('cc_addr'): + if bcc_addr := email_context.get('bcc_addr', None): if isinstance(bcc_addr, str): bcc_addr = [bcc_addr] for email in bcc_addr: personalization.add_bcc(Bcc(email)) - if reply_to := email_context.get('reply_to'): + if reply_to := email_context.get('reply_to', None): message.reply_to = ReplyTo(reply_to) message.add_personalization(personalization) diff --git a/tests/test_preprints.py b/tests/test_preprints.py index 724dda3b0ae..91ed769a3e7 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -44,7 +44,7 @@ from osf.utils.workflows import DefaultStates, RequestTypes, ReviewStates from tests.base import assert_datetime_equal, OsfTestCase from tests.utils import assert_preprint_logs, capture_notifications -from website import settings, mails +from website import settings from website.identifiers.clients import CrossRefClient, ECSArXivCrossRefClient, crossref from website.identifiers.utils import request_identifiers from website.preprints.tasks import ( diff --git a/tests/test_spam_mixin.py b/tests/test_spam_mixin.py index 59b04ec1fa9..2c4cba1c8d7 100644 --- a/tests/test_spam_mixin.py +++ b/tests/test_spam_mixin.py @@ -12,7 +12,7 @@ from osf_tests.factories import UserFactory, CommentFactory, ProjectFactory, PreprintFactory, RegistrationFactory, AuthUserFactory from osf.models import NotableDomain, SpamStatus, NotificationType from tests.utils import capture_notifications -from website import settings, mails +from website import settings @pytest.mark.django_db From a90a2340c1abbcedaa0adbfb144e776b90958215 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 30 Jul 2025 23:48:27 -0400 Subject: [PATCH 155/176] fix more sanctions code --- notifications.yaml | 4 ++++ osf/utils/notifications.py | 5 +++-- website/reviews/listeners.py | 30 +++++++++++++++++++++++++++++- 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/notifications.yaml b/notifications.yaml index 1cc60553ac9..03c74a3bb03 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -305,6 +305,10 @@ notification_types: __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/pending_registration_admin.html.mako' + - name: node_pending_registration_non_admin + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/pending_registration_non_admin.html.mako' - name: node_pending_retraction_admin __docs__: ... object_content_type_model_name: abstractnode diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 8e432af12a5..76d6e255668 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -42,12 +42,13 @@ def notify_submit(resource, user, *args, **kwargs): context=context, recipients=recipients, resource=resource, + notification_type=NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION ) reviews_signals.reviews_email_submit_moderators_notifications.send( timestamp=timezone.now(), context=context, resource=resource, - user=user + user=user, ) @@ -59,7 +60,7 @@ def notify_resubmit(resource, user, *args, **kwargs): reviews_signals.reviews_email_submit.send( recipients=recipients, context=context, - template=NotificationType.Type.PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION, + notification_type=NotificationType.Type.PROVIDER_REVIEWS_RESUBMISSION_CONFIRMATION, resource=resource, ) reviews_signals.reviews_email_submit_moderators_notifications.send( diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index 6fa873e53a9..be4b3ff7c82 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -1,4 +1,5 @@ -from website.settings import DOMAIN +from osf.models import NotificationType +from website.settings import DOMAIN, OSF_PREPRINTS_LOGO, OSF_REGISTRIES_LOGO from website.reviews import signals as reviews_signals @@ -54,3 +55,30 @@ def reviews_withdrawal_requests_notification(self, timestamp, context): user=recipient, event_context=context, ) + + +@reviews_signals.reviews_email_submit.connect +def reviews_submit_notification(self, recipients, context, resource, notification_type=None): + """ + Handle email notifications for a new submission or a resubmission + """ + provider = resource.provider + if provider._id == 'osf': + if provider.type == 'osf.preprintprovider': + context['logo'] = OSF_PREPRINTS_LOGO + elif provider.type == 'osf.registrationprovider': + context['logo'] = OSF_REGISTRIES_LOGO + else: + raise NotImplementedError() + else: + context['logo'] = resource.provider._id + + for recipient in recipients: + context['is_creator'] = recipient == resource.creator + context['provider_name'] = resource.provider.name + NotificationType.objects.get( + name=notification_type + ).emit( + user=recipient, + event_context=context + ) From 8421403b7aa25a5c1979b5ea5b484c8512dcc503 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 07:34:13 -0400 Subject: [PATCH 156/176] fix more schema responses --- .../test_check_crossref_dois.py | 72 ------------------- .../test_email_all_users.py | 71 ------------------ osf_tests/test_schema_responses.py | 1 + 3 files changed, 1 insertion(+), 143 deletions(-) delete mode 100644 osf_tests/management_commands/test_check_crossref_dois.py delete mode 100644 osf_tests/management_commands/test_email_all_users.py diff --git a/osf_tests/management_commands/test_check_crossref_dois.py b/osf_tests/management_commands/test_check_crossref_dois.py deleted file mode 100644 index 802ce4fde0b..00000000000 --- a/osf_tests/management_commands/test_check_crossref_dois.py +++ /dev/null @@ -1,72 +0,0 @@ -import os -from unittest import mock -import pytest -import json -from datetime import timedelta -import responses - -from osf.models import NotificationType -from tests.utils import capture_notifications - -HERE = os.path.dirname(os.path.abspath(__file__)) - - -from osf_tests.factories import PreprintFactory -from website import settings - -from osf.management.commands.check_crossref_dois import check_crossref_dois, report_stuck_dois - - -@pytest.mark.django_db -class TestCheckCrossrefDOIs: - - @pytest.fixture() - def preprint(self): - return PreprintFactory() - - @pytest.fixture() - def stuck_preprint(self): - preprint = PreprintFactory(set_doi=False, set_guid='guid0') - preprint.date_published = preprint.date_published - timedelta(days=settings.DAYS_CROSSREF_DOIS_MUST_BE_STUCK_BEFORE_EMAIL + 1) - # match guid to the fixture crossref_works_response.json - guid = preprint.guids.first() - provider = preprint.provider - provider.doi_prefix = '10.31236' - provider.save() - guid._id = 'guid0' - guid.save() - - preprint.save() - return preprint - - @pytest.fixture() - def crossref_response(self): - with open(os.path.join(HERE, 'fixtures/crossref_works_response.json'), 'rb') as fp: - return json.loads(fp.read()) - - @responses.activate - @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated', mock.Mock()) - def test_check_crossref_dois(self, crossref_response, stuck_preprint, preprint): - doi = settings.DOI_FORMAT.format(prefix=stuck_preprint.provider.doi_prefix, guid=stuck_preprint._id) - responses.add( - responses.Response( - responses.GET, - url=f'{settings.CROSSREF_JSON_API_URL}works?filter=doi:{doi}', - json=crossref_response, - status=200 - ) - ) - - check_crossref_dois(dry_run=False) - - assert preprint.identifiers.count() == 1 - - assert stuck_preprint.identifiers.count() == 1 - assert stuck_preprint.identifiers.first().value == doi - - def test_report_stuck_dois(self, stuck_preprint): - with capture_notifications() as notifications: - report_stuck_dois(dry_run=False) - - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_REQUEST_ACCESS_DENIED diff --git a/osf_tests/management_commands/test_email_all_users.py b/osf_tests/management_commands/test_email_all_users.py deleted file mode 100644 index 9141e6b50d4..00000000000 --- a/osf_tests/management_commands/test_email_all_users.py +++ /dev/null @@ -1,71 +0,0 @@ -import pytest - -from django.utils import timezone - -from osf.models import NotificationType -from osf_tests.factories import UserFactory - -from osf.management.commands.email_all_users import email_all_users -from tests.utils import capture_notifications - - -class TestEmailAllUsers: - - @pytest.fixture() - def user(self): - return UserFactory(id=1) - - @pytest.fixture() - def user2(self): - return UserFactory(id=2) - - @pytest.fixture() - def superuser(self): - user = UserFactory() - user.is_superuser = True - user.save() - return user - - @pytest.fixture() - def deleted_user(self): - return UserFactory(deleted=timezone.now()) - - @pytest.fixture() - def inactive_user(self): - return UserFactory(is_disabled=True) - - @pytest.fixture() - def unconfirmed_user(self): - return UserFactory(date_confirmed=None) - - @pytest.fixture() - def unregistered_user(self): - return UserFactory(is_registered=False) - - @pytest.mark.django_db - def test_email_all_users_dry(self, superuser): - with capture_notifications() as notifications: - email_all_users('TOU_NOTIF', dry_run=True) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - - @pytest.mark.django_db - def test_dont_email_inactive_users( - self, deleted_user, inactive_user, unconfirmed_user, unregistered_user): - - with capture_notifications() as notifications: - email_all_users('TOU_NOTIF') - assert not notifications - - @pytest.mark.django_db - def test_email_all_users_offset(self, user, user2): - with capture_notifications() as notifications: - email_all_users('TOU_NOTIF', offset=1, start_id=0) - - email_all_users('TOU_NOTIF', offset=1, start_id=1) - - email_all_users('TOU_NOTIF', offset=1, start_id=2) - - assert len(notifications) == 2 - assert notifications[0]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED - assert notifications[1]['type'] == NotificationType.Type.PROVIDER_MODERATOR_ADDED diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index c924aebcd17..51db350814f 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -863,6 +863,7 @@ def test_accept_notification_sent_on_admin_approval(self, revised_response, admi with capture_notifications() as notifications: revised_response.approve(user=admin_user) assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'] == admin_user assert notifications[0]['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED def test_moderators_notified_on_admin_approval(self, revised_response, admin_user, moderator): From f9d4249d6586f0511327955b340cbb2f0639451b Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 09:02:16 -0400 Subject: [PATCH 157/176] fix boa --- addons/boa/tasks.py | 88 +++++++++++----------------------- addons/boa/tests/test_tasks.py | 44 ++++++++++------- 2 files changed, 54 insertions(+), 78 deletions(-) diff --git a/addons/boa/tasks.py b/addons/boa/tasks.py index 4b8753e5b39..c1918aed640 100644 --- a/addons/boa/tasks.py +++ b/addons/boa/tasks.py @@ -1,7 +1,9 @@ +import asyncio from http.client import HTTPException import logging import time +from asgiref.sync import async_to_sync, sync_to_async from boaapi.boa_client import BoaClient, BoaException from boaapi.status import CompilerStatus, ExecutionStatus from urllib import request @@ -35,34 +37,14 @@ def submit_to_boa(host, username, password, user_guid, project_guid, * Running asyncio in celery is tricky. Refer to the discussion below for details: * https://stackoverflow.com/questions/39815771/how-to-combine-celery-with-asyncio """ - return _submit_to_boa( - host, - username, - password, - user_guid, - project_guid, - query_dataset, - query_file_name, - file_size, - file_full_path, - query_download_url, - output_upload_url - ) + return async_to_sync(submit_to_boa_async)(host, username, password, user_guid, project_guid, + query_dataset, query_file_name, file_size, file_full_path, + query_download_url, output_upload_url) -def _submit_to_boa( - host, - username, - password, - user_guid, - project_guid, - query_dataset, - query_file_name, - file_size, - file_full_path, - query_download_url, - output_upload_url -): +async def submit_to_boa_async(host, username, password, user_guid, project_guid, + query_dataset, query_file_name, file_size, file_full_path, + query_download_url, output_upload_url): """ Download Boa query file, submit it to Boa API, wait for Boa to finish the job and upload result output to OSF. Send success / failure emails notifications. @@ -72,24 +54,21 @@ def _submit_to_boa( * See notes in ``submit_to_boa()`` for details. """ - user = OSFUser.objects.get(guids___id=user_guid) - cookie_value = user.get_or_create_cookie().decode() + logger.debug('>>>>>>>> Task begins') + user = await sync_to_async(OSFUser.objects.get)(guids___id=user_guid) + cookie_value = (await sync_to_async(user.get_or_create_cookie)()).decode() project_url = f'{osf_settings.DOMAIN}{project_guid}/' - output_file_name = query_file_name.replace( - '.boa', - boa_settings.OUTPUT_FILE_SUFFIX - ) + output_file_name = query_file_name.replace('.boa', boa_settings.OUTPUT_FILE_SUFFIX) if file_size > boa_settings.MAX_SUBMISSION_SIZE: message = f'Boa query file too large to submit: user=[{user_guid}], project=[{project_guid}], ' \ f'file_name=[{query_file_name}], file_size=[{file_size}], ' \ f'full_path=[{file_full_path}], url=[{query_download_url}] ...' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.FILE_TOO_LARGE_ERROR, user, - project_url, - file_full_path, + project_url, file_full_path, query_file_name=query_file_name, file_size=file_size ) @@ -104,7 +83,7 @@ def _submit_to_boa( except (ValueError, HTTPError, URLError, HTTPException): message = f'Failed to download Boa query file: user=[{user_guid}], project=[{project_guid}], ' \ f'file_name=[{query_file_name}], full_path=[{file_full_path}], url=[{query_download_url}] ...' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.UNKNOWN, user, @@ -124,7 +103,7 @@ def _submit_to_boa( except BoaException: # Don't call `client.close()`, since it will fail with `BoaException` if `client.login()` fails message = f'Boa login failed: boa_username=[{username}], boa_host=[{host}]!' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.AUTHN_ERROR, user, @@ -141,7 +120,7 @@ def _submit_to_boa( except BoaException: client.close() message = f'Failed to retrieve or verify the target Boa dataset: dataset=[{query_dataset}]!' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.UNKNOWN, user, @@ -159,13 +138,12 @@ def _submit_to_boa( except BoaException: client.close() message = f'Failed to submit the query to Boa API: : boa_host=[{host}], dataset=[{query_dataset}]!' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.UNKNOWN, user, project_url, - file_full_path, - query_file_name=query_file_name + file_full_path, query_file_name=query_file_name ) return BoaErrorCode.UNKNOWN logger.info('Query successfully submitted.') @@ -174,9 +152,8 @@ def _submit_to_boa( if time.time() - start_time > boa_settings.MAX_JOB_WAITING_TIME: client.close() message = f'Boa job did not complete in time: job_id=[{str(boa_job.id)}]!' - handle_boa_error( - message, - BoaErrorCode.JOB_TIME_OUT_ERROR, + await sync_to_async(handle_boa_error)( + message, BoaErrorCode.JOB_TIME_OUT_ERROR, user, project_url, file_full_path, @@ -186,11 +163,11 @@ def _submit_to_boa( return BoaErrorCode.JOB_TIME_OUT_ERROR logger.debug(f'Boa job still running, waiting 10s: job_id=[{str(boa_job.id)}] ...') boa_job.refresh() - time.sleep(boa_settings.REFRESH_JOB_INTERVAL) + await asyncio.sleep(boa_settings.REFRESH_JOB_INTERVAL) if boa_job.compiler_status is CompilerStatus.ERROR: client.close() message = f'Boa job failed with compile error: job_id=[{str(boa_job.id)}]!' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.QUERY_ERROR, user, @@ -203,7 +180,7 @@ def _submit_to_boa( elif boa_job.exec_status is ExecutionStatus.ERROR: client.close() message = f'Boa job failed with execution error: job_id=[{str(boa_job.id)}]!' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.QUERY_ERROR, user, @@ -219,7 +196,7 @@ def _submit_to_boa( except BoaException: client.close() message = f'Boa job output is not available: job_id=[{str(boa_job.id)}]!' - handle_boa_error( + await sync_to_async(handle_boa_error)( message, BoaErrorCode.OUTPUT_ERROR, user, @@ -250,7 +227,7 @@ def _submit_to_boa( message += f', http_error=[{e.code}: {e.reason}]' if e.code == 409: error_code = BoaErrorCode.UPLOAD_ERROR_CONFLICT - handle_boa_error( + await sync_to_async(handle_boa_error)( message, error_code, user, @@ -283,17 +260,8 @@ def _submit_to_boa( return BoaErrorCode.NO_ERROR -def handle_boa_error( - message, - code, - user, - project_url, - query_file_full_path, - query_file_name=None, - file_size=None, - output_file_name=None, - job_id=None -): +def handle_boa_error(message, code, user, project_url, query_file_full_path, + query_file_name=None, file_size=None, output_file_name=None, job_id=None): """Handle Boa and WB API errors and send emails. """ logger.error(message) diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py index 1580205048e..c1d2a410679 100644 --- a/addons/boa/tests/test_tasks.py +++ b/addons/boa/tests/test_tasks.py @@ -29,8 +29,7 @@ class TestBoaErrorHandling(OsfTestCase): def setUp(self): super().setUp() self.error_message = 'fake-error-message' - self.user_username = 'fake-user-username' - self.user_fullname = 'fake-user-fullname' + self.user = AuthUserFactory() self.project_url = 'http://localhost:5000/1a2b3' self.query_file_name = 'fake_boa_script.boa' self.file_size = 255 @@ -57,7 +56,7 @@ def test_handle_boa_error(self): return_value = handle_boa_error( self.error_message, BoaErrorCode.UNKNOWN, - self.user_username, + self.user, self.project_url, self.file_full_path, self.query_file_name, @@ -88,21 +87,30 @@ def setUp(self): self.query_download_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/1a2b3c4d' self.output_upload_url = f'http://localhost:7777/v1/resources/{self.project_guid}/providers/osfstorage/?kind=file' + def tearDown(self): + super().tearDown() + def test_submit_to_boa_async_called(self): - return_value = submit_to_boa( - self.host, - self.username, - self.password, - self.user_guid, - self.project_guid, - self.query_dataset, - self.query_file_name, - self.file_size, - self.file_full_path, - self.query_download_url, - self.output_upload_url - ) - assert return_value == BoaErrorCode.NO_ERROR + with mock.patch( + 'addons.boa.tasks.submit_to_boa_async', + new_callable=AsyncMock, + return_value=BoaErrorCode.NO_ERROR + ) as mock_submit_to_boa_async: + return_value = submit_to_boa( + self.host, + self.username, + self.password, + self.user_guid, + self.project_guid, + self.query_dataset, + self.query_file_name, + self.file_size, + self.file_full_path, + self.query_download_url, + self.output_upload_url + ) + assert return_value == BoaErrorCode.NO_ERROR + mock_submit_to_boa_async.assert_called() @pytest.mark.django_db @@ -150,7 +158,7 @@ async def test_submit_success(self): mock.patch('asyncio.sleep', new_callable=AsyncMock, return_value=None) as mock_async_sleep, \ mock.patch('addons.boa.tasks.handle_boa_error', return_value=None) as mock_handle_boa_error: with capture_notifications() as notifications: - return_value = submit_to_boa( + return_value = await submit_to_boa( self.host, self.username, self.password, From 4c2d14562fc8f240e8e875596ff8d7e73ab21069 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 09:15:59 -0400 Subject: [PATCH 158/176] fix mock user --- api/crossref/views.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api/crossref/views.py b/api/crossref/views.py index d93d5b43ef2..17bba7a3281 100644 --- a/api/crossref/views.py +++ b/api/crossref/views.py @@ -78,8 +78,10 @@ def post(self, request): if unexpected_errors: email_error_text = request.POST['body-plain'] batch_id = crossref_email_content.find('batch_id').text - NotificationType.objects.get(name=NotificationType.Type.DESK_OSF_SUPPORT_EMAIL).emit( - user=type('staff', (), {'username': settings.OSF_SUPPORT_EMAIL}), + NotificationType.objects.get( + name=NotificationType.Type.DESK_OSF_SUPPORT_EMAIL, + ).emit( + destination_address=settings.OSF_SUPPORT_EMAIL, event_context={ 'batch_id': batch_id, 'email_content': request.POST['body-plain'], From 8d9a26fdcd3f3671bb26db1a86ce98deaa1d64d0 Mon Sep 17 00:00:00 2001 From: ihorsokhanexoft Date: Thu, 31 Jul 2025 16:31:27 +0300 Subject: [PATCH 159/176] [ENG-8401] Earlier preprint versions download the current file (#11245) * fixed earlier version download the newest version file * fixed tests --- addons/base/views.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/addons/base/views.py b/addons/base/views.py index 2c61fdda232..2eee0ae0dd6 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -1006,14 +1006,17 @@ def persistent_file_download(auth, **kwargs): file = BaseFileNode.active.filter(_id=id_or_guid).first() if not file: guid = Guid.load(id_or_guid) - if guid: - referent = guid.referent - file = referent.primary_file if type(referent) is Preprint else referent - else: + if not guid: raise HTTPError(http_status.HTTP_404_NOT_FOUND, data={ 'message_short': 'File Not Found', 'message_long': 'The requested file could not be found.' }) + + file = guid.referent + if type(file) is Preprint: + referent, _ = Guid.load_referent(id_or_guid) + file = referent.primary_file + if not file.is_file: raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={ 'message_long': 'Downloading folders is not permitted.' From 7ccc8fb035c96885e2c6ef53ed36485fcae624d2 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 10:10:21 -0400 Subject: [PATCH 160/176] fix schema response tests --- api/providers/tasks.py | 2 +- osf/email/__init__.py | 1 + osf/models/schema_response.py | 16 ++++---- osf/utils/notifications.py | 4 +- osf_tests/test_archiver.py | 14 ++++--- osf_tests/test_schema_responses.py | 22 +++++++---- website/notifications/listeners.py | 62 ++++++++++++++++++++++++++++++ website/reviews/listeners.py | 56 +++++++++++++++++++++++---- 8 files changed, 144 insertions(+), 33 deletions(-) diff --git a/api/providers/tasks.py b/api/providers/tasks.py index 5891494cfb2..9896447bc69 100644 --- a/api/providers/tasks.py +++ b/api/providers/tasks.py @@ -705,7 +705,7 @@ def inform_product_of_errors(initiator=None, provider=None, message=None): NotificationType.objects.get( name=NotificationType.Type.DESK_REGISTRATION_BULK_UPLOAD_PRODUCT_OWNER, ).emit( - user=object('mockuser', (), {'username': email}), + destination_address=email, event_context={ 'user': user_info, 'provider_name': provider_name, diff --git a/osf/email/__init__.py b/osf/email/__init__.py index 5b2dae93a04..39819741cb2 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -61,6 +61,7 @@ def send_email_with_send_grid(to_addr, notification_type, context, email_context to_addr (str): The recipient's email address. notification_type (str): The subject of the notification. context (dict): The email content context. + email_context (dict): The email context for sending, such as header changes for BCC or reply-to """ if not settings.SENDGRID_API_KEY: raise NotImplementedError('SENDGRID_API_KEY is required for sendgrid notifications.') diff --git a/osf/models/schema_response.py b/osf/models/schema_response.py index 3c4f65155fb..b51256c3ee8 100644 --- a/osf/models/schema_response.py +++ b/osf/models/schema_response.py @@ -22,13 +22,6 @@ from website.settings import DOMAIN -EMAIL_TEMPLATES_PER_EVENT = { - 'create': NotificationType.Type.NODE_SCHEMA_RESPONSE_INITIATED, - 'submit': NotificationType.Type.NODE_SCHEMA_RESPONSE_SUBMITTED, - 'accept': NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED, - 'reject': NotificationType.Type.NODE_SCHEMA_RESPONSE_REJECTED, -} - class SchemaResponse(ObjectIDMixin, BaseModel): '''Collects responses for a schema associated with a parent object. @@ -483,10 +476,15 @@ def _notify_users(self, event, event_initiator): reviews_email_submit_moderators_notifications.send( timestamp=timezone.now(), context=email_context, - user=self.initiator + resource=self.parent ) - template = EMAIL_TEMPLATES_PER_EVENT.get(event) + template = { + 'create': NotificationType.Type.NODE_SCHEMA_RESPONSE_INITIATED, + 'submit': NotificationType.Type.NODE_SCHEMA_RESPONSE_SUBMITTED, + 'accept': NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED, + 'reject': NotificationType.Type.NODE_SCHEMA_RESPONSE_REJECTED, + }.get(event) if not template: return diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 76d6e255668..ee95a3cb811 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -9,11 +9,13 @@ def get_email_template_context(resource): is_preprint = resource.provider.type == 'osf.preprintprovider' url_segment = 'preprints' if is_preprint else 'registries' document_type = resource.provider.preprint_word if is_preprint else 'registration' + from website.profile.utils import get_profile_image_url base_context = { 'domain': DOMAIN, 'reviewable_title': resource.title, 'reviewable_absolute_url': resource.absolute_url, + 'profile_image_url': get_profile_image_url(resource.creator), 'reviewable_provider_name': resource.provider.name, 'workflow': resource.provider.reviews_workflow, 'provider_url': resource.provider.domain or f'{DOMAIN}{url_segment}/{resource.provider._id}', @@ -48,7 +50,6 @@ def notify_submit(resource, user, *args, **kwargs): timestamp=timezone.now(), context=context, resource=resource, - user=user, ) @@ -67,7 +68,6 @@ def notify_resubmit(resource, user, *args, **kwargs): timestamp=timezone.now(), context=context, resource=resource, - user=user ) diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 34394e9a39c..282c0c99ddd 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -1209,11 +1209,13 @@ def test_archiver_uncaught_error_mail_renders(): src = factories.ProjectFactory() user = src.creator job = factories.ArchiveJobFactory() - mail = mails.ARCHIVE_UNCAUGHT_ERROR_DESK - assert mail.html( + notification_type = NotificationType.Type.DESK_ARCHIVE_JOB_UNCAUGHT_ERROR.instance + assert notification_type.emit( user=user, - src=src, - results=job.target_addons.all(), - url=settings.INTERNAL_DOMAIN + src._id, - can_change_preferences=False, + event_context=dict( + src=str(src), + results=list(job.target_addons.all()), + url=settings.INTERNAL_DOMAIN + src._id, + can_change_preferences=False, + ) ) diff --git a/osf_tests/test_schema_responses.py b/osf_tests/test_schema_responses.py index 51db350814f..e138c8e01e4 100644 --- a/osf_tests/test_schema_responses.py +++ b/osf_tests/test_schema_responses.py @@ -855,16 +855,20 @@ def test_schema_response_action_to_state_following_moderated_approve_is_pending_ assert new_action.to_state == ApprovalStates.PENDING_MODERATION.db_name assert new_action.trigger == SchemaResponseTriggers.APPROVE.db_name - def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user): + def test_accept_notification_sent_on_admin_approval(self, revised_response, admin_user, moderator): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) revised_response.save() revised_response.pending_approvers.add(admin_user) with capture_notifications() as notifications: revised_response.approve(user=admin_user) - assert len(notifications) == 1 - assert notifications[0]['kwargs']['user'] == admin_user - assert notifications[0]['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED + assert len(notifications) == 3 + assert notifications[0]['kwargs']['user'] == moderator + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + assert notifications[1]['kwargs']['user'] == moderator + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + assert notifications[2]['kwargs']['user'] == admin_user + assert notifications[2]['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED def test_moderators_notified_on_admin_approval(self, revised_response, admin_user, moderator): revised_response.approvals_state_machine.set_state(ApprovalStates.UNAPPROVED) @@ -873,9 +877,13 @@ def test_moderators_notified_on_admin_approval(self, revised_response, admin_use with capture_notifications() as notifications: revised_response.approve(user=admin_user) - assert len(notifications) == 1 - assert notifications[0]['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED + assert len(notifications) == 3 assert notifications[0]['kwargs']['user'] == moderator + assert notifications[0]['type'] == NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + assert notifications[1]['kwargs']['user'] == moderator + assert notifications[1]['type'] == NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + assert notifications[2]['kwargs']['user'] == admin_user + assert notifications[2]['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED def test_no_moderator_notification_on_admin_approval_of_initial_response( self, initial_response, admin_user): @@ -915,7 +923,7 @@ def test_moderator_accept_notification( with capture_notifications() as notifications: revised_response.accept(user=moderator) assert len(notifications) == 3 - assert all(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_INITIATED + assert all(notification['type'] == NotificationType.Type.NODE_SCHEMA_RESPONSE_APPROVED for notification in notifications) def test_no_moderator_accept_notification_on_initial_response( diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index 871d6d56792..ceae7ba6e10 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -8,6 +8,7 @@ from framework.auth.signals import user_confirmed from website.project.signals import privacy_set_public from website import settings +from website.reviews import signals as reviews_signals logger = logging.getLogger(__name__) @@ -76,3 +77,64 @@ def queue_first_public_project_email(user, node): 'osf_url': settings.DOMAIN, } ) + +@reviews_signals.reviews_email_submit_moderators_notifications.connect +def reviews_submit_notification_moderators(self, timestamp, context, resource): + """ + Handle email notifications to notify moderators of new submissions or resubmission. + """ + # imports moved here to avoid AppRegistryNotReady error + from osf.models import NotificationType + from website.settings import DOMAIN + + provider = resource.provider + + # Set submission url + if provider.type == 'osf.preprintprovider': + context['reviews_submission_url'] = ( + f'{DOMAIN}reviews/preprints/{provider._id}/{resource._id}' + ) + elif provider.type == 'osf.registrationprovider': + context['reviews_submission_url'] = f'{DOMAIN}{resource._id}?mode=moderator' + else: + raise NotImplementedError(f'unsupported provider type {provider.type}') + + # Set message + revision_id = context.get('revision_id') + if revision_id: + context['message'] = f'submitted updates to "{resource.title}".' + context['reviews_submission_url'] += f'&revisionId={revision_id}' + else: + if context.get('resubmission'): + context['message'] = f'resubmitted "{resource.title}".' + else: + context['message'] = f'submitted "{resource.title}".' + provider_subscription, created = NotificationSubscription.objects.get_or_create( + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, + object_id=provider.id, + content_type=ContentType.objects.get_for_model(provider.__class__), + ) + for recipient in provider_subscription.subscribed_object.get_group('moderator').user_set.all(): + NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + ).emit( + user=recipient, + event_context=context + ) + +# Handle email notifications to notify moderators of new submissions. +@reviews_signals.reviews_withdraw_requests_notification_moderators.connect +def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user, resource): + from website.settings import DOMAIN + + provider = resource.provider + # Set message + context['message'] = f'has requested withdrawal of "{resource.title}".' + # Set submission url + context['reviews_submission_url'] = f'{DOMAIN}reviews/registries/{provider._id}/{resource._id}' + NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS + ).emit( + user=user, + event_context=context + ) diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index be4b3ff7c82..d208bdb099a 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -1,8 +1,9 @@ +from django.contrib.contenttypes.models import ContentType + from osf.models import NotificationType from website.settings import DOMAIN, OSF_PREPRINTS_LOGO, OSF_REGISTRIES_LOGO from website.reviews import signals as reviews_signals - @reviews_signals.reviews_withdraw_requests_notification_moderators.connect def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user, resource): context['referrer_fullname'] = user.fullname @@ -15,10 +16,8 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, object_id=provider.id, content_type=ContentType.objects.get_for_model(provider.__class__), ) - from website.profile.utils import get_profile_image_url context['message'] = f'has requested withdrawal of "{resource.title}".' - context['profile_image_url'] = get_profile_image_url(user) context['reviews_submission_url'] = f'{DOMAIN}reviews/registries/{provider._id}/{resource._id}' for recipient in provider_subscription.subscribed_object.get_group('moderator').user_set.all(): @@ -29,7 +28,6 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, event_context=context, ) - @reviews_signals.reviews_email_withdrawal_requests.connect def reviews_withdrawal_requests_notification(self, timestamp, context): preprint = context['reviewable'] @@ -42,13 +40,55 @@ def reviews_withdrawal_requests_notification(self, timestamp, context): object_id=preprint.provider.id, content_type=ContentType.objects.get_for_model(preprint.provider.__class__), ) - from website.profile.utils import get_profile_image_url - context['message'] = f'has requested withdrawal of the {preprint_word} "{preprint.title}".' - context['profile_image_url'] = get_profile_image_url(context['requester']) context['reviews_submission_url'] = f'{DOMAIN}reviews/preprints/{preprint.provider._id}/{preprint._id}' - for recipient in provider_subscription.preorint.contributors.all(): + for recipient in provider_subscription.subscribed_object.get_group('moderator').user_set.all(): + NotificationType.objects.get( + name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + ).emit( + user=recipient, + event_context=context, + ) + +@reviews_signals.reviews_email_submit_moderators_notifications.connect +def reviews_submit_notification_moderators(self, timestamp, resource, context): + """ + Handle email notifications to notify moderators of new submissions or resubmission. + """ + # imports moved here to avoid AppRegistryNotReady error + from osf.models import NotificationSubscription + + provider = resource.provider + + # Set submission url + if provider.type == 'osf.preprintprovider': + context['reviews_submission_url'] = ( + f'{DOMAIN}reviews/preprints/{provider._id}/{resource._id}' + ) + elif provider.type == 'osf.registrationprovider': + context['reviews_submission_url'] = f'{DOMAIN}{resource._id}?mode=moderator' + else: + raise NotImplementedError(f'unsupported provider type {provider.type}') + + # Set message + revision_id = context.get('revision_id') + if revision_id: + context['message'] = f'submitted updates to "{resource.title}".' + context['reviews_submission_url'] += f'&revisionId={revision_id}' + else: + if context.get('resubmission'): + context['message'] = f'resubmitted "{resource.title}".' + else: + context['message'] = f'submitted "{resource.title}".' + + # Get NotificationSubscription instance, which contains reference to all subscribers + provider_subscription, created = NotificationSubscription.objects.get_or_create( + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, + object_id=provider.id, + content_type=ContentType.objects.get_for_model(provider.__class__), + ) + for recipient in provider_subscription.subscribed_object.get_group('moderator').user_set.all(): NotificationType.objects.get( name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS ).emit( From c41802164d235b1bcc0f0f32581d6d48f3aa765a Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 10:50:28 -0400 Subject: [PATCH 161/176] fix insti reporter tests --- osf/models/preprint.py | 1 + .../reporters/test_institutional_summary_reporter.py | 2 ++ osf_tests/test_node.py | 6 ------ 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/osf/models/preprint.py b/osf/models/preprint.py index a2415643e2a..870099f0623 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -1052,6 +1052,7 @@ def _send_preprint_confirmation(self, auth): NotificationType.objects.get( name=NotificationType.Type.PROVIDER_REVIEWS_SUBMISSION_CONFIRMATION ).emit( + subscribed_object=self.provider, user=recipient, event_context=context, ) diff --git a/osf_tests/metrics/reporters/test_institutional_summary_reporter.py b/osf_tests/metrics/reporters/test_institutional_summary_reporter.py index 05baa4d38e7..b03e4de1161 100644 --- a/osf_tests/metrics/reporters/test_institutional_summary_reporter.py +++ b/osf_tests/metrics/reporters/test_institutional_summary_reporter.py @@ -12,12 +12,14 @@ AuthUserFactory, ) from ._testutils import list_monthly_reports +from osf.management.commands.populate_notification_types import populate_notification_types class TestInstiSummaryMonthlyReporter(TestCase): @classmethod def setUpTestData(cls): + populate_notification_types() cls._yearmonth = YearMonth(2018, 2) # February 2018 cls._institution = InstitutionFactory() cls._now = datetime.datetime(2018, 2, 4, tzinfo=datetime.UTC) diff --git a/osf_tests/test_node.py b/osf_tests/test_node.py index e6a34c31050..6348c87a144 100644 --- a/osf_tests/test_node.py +++ b/osf_tests/test_node.py @@ -2140,12 +2140,6 @@ def test_set_privacy_sends_mail(self, node, auth): assert len(notifications) == 1 assert notifications[0]['type'] == NotificationType.Type.USER_NEW_PUBLIC_PROJECT - def test_set_privacy_skips_mail_if_meeting(self, node, auth): - with capture_notifications() as notifications: - node.set_privacy('private', auth=auth) - node.set_privacy('public', auth=auth, meeting_creation=True) - assert not notifications - def test_set_privacy_can_not_cancel_pending_embargo_for_registration(self, node, user, auth): registration = RegistrationFactory(project=node) registration.embargo_registration( From 57ba198f4739d1efcf9d0d0dc96a52347d964188 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 12:09:15 -0400 Subject: [PATCH 162/176] clean resource contributors create method --- api/nodes/serializers.py | 50 ++++++++++++++++------------------------ 1 file changed, 20 insertions(+), 30 deletions(-) diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index c52787f2a6c..2fbcc9f4160 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -1237,44 +1237,35 @@ def validate_data(self, resource, user_id=None, full_name=None, email=None, inde raise exceptions.ValidationError(detail=f'{index} is not a valid contributor index for node with id {resource._id}') def create(self, validated_data): - id = validated_data.get('_id') - email = validated_data.get('user', {}).get('email', None) - index = None - if '_order' in validated_data: - index = validated_data.pop('_order') + user_id = validated_data.get('_id') + email = validated_data.get('user', {}).get('email') + index = validated_data.pop('_order', None) resource = self.context['resource'] auth = Auth(self.context['request'].user) full_name = validated_data.get('full_name') bibliographic = validated_data.get('bibliographic') - email_preference = self.context['request'].GET.get('send_email') or self.context['default_email'] + email_pref = self.context['request'].GET.get('send_email') or self.context['default_email'] permissions = self.get_proposed_permissions(validated_data) - self.validate_data( - resource, - user_id=id, - full_name=full_name, - email=email, - index=index, - ) - if email_preference not in self.email_preferences: - raise exceptions.ValidationError(detail=f'{email_preference} is not a valid email preference.') - - contributor = OSFUser.load(id) - if email or (contributor and contributor.is_registered): - is_published = getattr(resource, 'is_published', False) - notification_type = { - 'false': False, - 'default': NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, - 'draft_registration': NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT, - 'preprint': NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT if is_published else False, - }[email_preference] - else: - notification_type = False + self.validate_data(resource, user_id=user_id, full_name=full_name, email=email, index=index) + + if email_pref not in self.email_preferences: + raise exceptions.ValidationError(f'{email_pref} is not a valid email preference.') + + is_published = getattr(resource, 'is_published', False) + notification_type = { + 'false': False, + 'default': NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT, + 'draft_registration': NotificationType.Type.DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT, + 'preprint': NotificationType.Type.PREPRINT_CONTRIBUTOR_ADDED_DEFAULT if is_published else False, + }.get(email_pref, False) + contributor = OSFUser.load(user_id) + notification_type = notification_type if email or (contributor and contributor.is_registered) else False, try: - contributor_obj = resource.add_contributor_registered_or_not( + return resource.add_contributor_registered_or_not( auth=auth, - user_id=id, + user_id=user_id, email=email, full_name=full_name, notification_type=notification_type, @@ -1286,7 +1277,6 @@ def create(self, validated_data): raise exceptions.ValidationError(detail=e.messages[0]) except ValueError as e: raise exceptions.NotFound(detail=e.args[0]) - return contributor_obj class NodeContributorDetailSerializer(NodeContributorsSerializer): """ From 8745da462fb1dd777b8f63de2cebef9825306d2f Mon Sep 17 00:00:00 2001 From: antkryt Date: Thu, 31 Jul 2025 20:30:11 +0300 Subject: [PATCH 163/176] [ENG-8462] Institution setup fixes (#11241) * create monthly reports when institution is created * better exception handling; async generate report * fix test * handle deactivated institutions; minor fixes --- admin/institutions/urls.py | 1 + admin/institutions/views.py | 48 +++++++++++++++++++ admin/management/views.py | 1 + admin/templates/institutions/detail.html | 35 ++++++++++++++ admin/templates/management/commands.html | 2 +- admin_tests/institutions/test_views.py | 28 +++++++++++ api/institutions/views.py | 11 +++-- .../commands/monthly_reporters_go.py | 7 ++- osf/metrics/reports.py | 10 ++-- 9 files changed, 133 insertions(+), 10 deletions(-) diff --git a/admin/institutions/urls.py b/admin/institutions/urls.py index 8d12a9fe36c..6aa5cf7e0df 100644 --- a/admin/institutions/urls.py +++ b/admin/institutions/urls.py @@ -9,6 +9,7 @@ re_path(r'^import/$', views.ImportInstitution.as_view(), name='import'), re_path(r'^(?P[0-9]+)/$', views.InstitutionDetail.as_view(), name='detail'), re_path(r'^(?P[0-9]+)/export/$', views.InstitutionExport.as_view(), name='export'), + re_path(r'^(?P[0-9]+)/monthly_report/$', views.InstitutionMonthlyReporterDo.as_view(), name='monthly_report'), re_path(r'^(?P[0-9]+)/delete/$', views.DeleteInstitution.as_view(), name='delete'), re_path(r'^(?P[0-9]+)/deactivate/$', views.DeactivateInstitution.as_view(), name='deactivate'), re_path(r'^(?P[0-9]+)/reactivate/$', views.ReactivateInstitution.as_view(), name='reactivate'), diff --git a/admin/institutions/views.py b/admin/institutions/views.py index ad9f0c7571f..a7b76bc9109 100644 --- a/admin/institutions/views.py +++ b/admin/institutions/views.py @@ -1,4 +1,5 @@ import json +from dateutil.parser import isoparse from django.contrib import messages from django.contrib.auth.mixins import PermissionRequiredMixin @@ -15,6 +16,9 @@ from admin.base.forms import ImportFileForm from admin.institutions.forms import InstitutionForm, InstitutionalMetricsAdminRegisterForm from osf.models import Institution, Node, OSFUser +from osf.metrics.utils import YearMonth +from osf.metrics.reporters import AllMonthlyReporters +from osf.management.commands.monthly_reporters_go import monthly_reporter_do class InstitutionList(PermissionRequiredMixin, ListView): @@ -129,6 +133,38 @@ def get(self, request, *args, **kwargs): return response +class InstitutionMonthlyReporterDo(PermissionRequiredMixin, View): + permission_required = 'osf.view_institution' + raise_exception = True + + def post(self, request, *args, **kwargs): + institution_id = self.kwargs.get('institution_id') + try: + institution = Institution.objects.get_all_institutions().get(id=institution_id) + except Institution.DoesNotExist: + raise Http404(f"Institution with id {institution_id} is not found or deactivated.") + + monthly_report_date = request.POST.get('monthly_report_date', None) + if monthly_report_date: + try: + monthly_report_date = isoparse(monthly_report_date).date() + except ValueError as exc: + messages.error(request, str(exc)) + return redirect('institutions:detail', institution_id=institution.id) + else: + messages.error(request, 'Report date cannot be none.') + return redirect('institutions:detail', institution_id=institution.id) + + monthly_reporter_do.apply_async(kwargs={ + 'yearmonth': str(YearMonth.from_date(monthly_report_date)), + 'reporter_key': request.POST.get('monthly_reporter', None), + 'report_kwargs': {'institution_pk': institution.id}, + }) + + messages.success(request, 'Monthly reporter successfully went.') + return redirect('institutions:detail', institution_id=institution.id) + + class CreateInstitution(PermissionRequiredMixin, CreateView): permission_required = 'osf.change_institution' raise_exception = True @@ -141,6 +177,18 @@ def get_context_data(self, *args, **kwargs): kwargs['import_form'] = ImportFileForm() return super().get_context_data(*args, **kwargs) + def form_valid(self, form): + response = super().form_valid(form) + + # Make a report after Institution is created + monthly_reporter_do.apply_async(kwargs={ + 'yearmonth': str(YearMonth.from_date(self.object.created)), + 'reporter_key': AllMonthlyReporters.INSTITUTIONAL_SUMMARY.name, + 'report_kwargs': {'institution_pk': self.object.id}, + }) + + return response + class InstitutionNodeList(PermissionRequiredMixin, ListView): template_name = 'institutions/node_list.html' diff --git a/admin/management/views.py b/admin/management/views.py index 525f0d8d64a..d97e4f4b894 100644 --- a/admin/management/views.py +++ b/admin/management/views.py @@ -130,6 +130,7 @@ def post(self, request, *args, **kwargs): if report_date is not None else '' ), + reporter_key=request.POST.get('monthly_reporter', '') ) if errors: diff --git a/admin/templates/institutions/detail.html b/admin/templates/institutions/detail.html index 2bede6e7d92..47315d8e8d7 100644 --- a/admin/templates/institutions/detail.html +++ b/admin/templates/institutions/detail.html @@ -9,9 +9,17 @@ {% endblock title %} {% block content %}
    +
      + {% for message in messages %} + {{ message }} + {% endfor %} +
    Export institution metadata + + Run monthly report + {% if perms.osf.delete_institution %} Delete institution {% endif %} @@ -100,6 +108,33 @@

    Import from JSON

    + + {% endblock content %} {% block bottom_js %} diff --git a/admin/templates/management/commands.html b/admin/templates/management/commands.html index 93eeaf24c18..96fedb49307 100644 --- a/admin/templates/management/commands.html +++ b/admin/templates/management/commands.html @@ -96,7 +96,7 @@

    Daily Reporters, Go!

    Monthly Reporters, Go!

    -

    Use this management command to run all daily metrics reports.

    +

    Use this management command to run all monthly metrics reports.

    {% csrf_token %} diff --git a/admin_tests/institutions/test_views.py b/admin_tests/institutions/test_views.py index 531adb9b8f6..13cb1456ab9 100644 --- a/admin_tests/institutions/test_views.py +++ b/admin_tests/institutions/test_views.py @@ -1,10 +1,12 @@ import json import pytest +from unittest import mock from django.test import RequestFactory from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType from django.core.exceptions import PermissionDenied +from osf_tests.factories import faker, FakeList from tests.base import AdminTestCase from osf_tests.factories import ( @@ -199,6 +201,32 @@ def test_get_view(self): res = self.view.get(self.request) assert res.status_code == 200 + @mock.patch('admin.institutions.views.monthly_reporter_do.apply_async') + def test_monthly_reporter_called_on_create(self, mock_monthly_reporter_do): + data = { + '_id': 'wqhx1', + 'name': 'company', + 'login_url': faker.url(), + 'logout_url': faker.url(), + 'identifier_domain': faker.url(), + 'ror_uri': faker.url(), + 'domains': FakeList('url', n=3), + 'email_domains': FakeList('domain_name', n=1), + 'orcid_record_verified_source': '', + 'delegation_protocol': '', + 'institutional_request_access_enabled': False + } + form = InstitutionForm(data=data) + assert form.is_valid() + + view = setup_form_view(self.base_view(), self.request, form=form) + view.object = form.save() + view.form_valid(form) + + mock_monthly_reporter_do.assert_called_once() + _, kwargs = mock_monthly_reporter_do.call_args + assert kwargs['kwargs']['report_kwargs']['institution_pk'] == view.object.id + class TestAffiliatedNodeList(AdminTestCase): def setUp(self): diff --git a/api/institutions/views.py b/api/institutions/views.py index 124e523c7e8..b7b43d0e718 100644 --- a/api/institutions/views.py +++ b/api/institutions/views.py @@ -624,7 +624,11 @@ def get_object(self): return object def get_default_search(self): - yearmonth = InstitutionMonthlySummaryReport.most_recent_yearmonth() + base_search = InstitutionMonthlySummaryReport.search().filter( + 'term', + institution_id=self.get_institution()._id, + ) + yearmonth = InstitutionMonthlySummaryReport.most_recent_yearmonth(base_search=base_search) if report_date_str := self.request.query_params.get('report_yearmonth'): try: yearmonth = YearMonth.from_str(report_date_str) @@ -634,12 +638,9 @@ def get_default_search(self): if yearmonth is None: return None - return InstitutionMonthlySummaryReport.search().filter( + return base_search.filter( 'term', report_yearmonth=str(yearmonth), - ).filter( - 'term', - institution_id=self.get_institution()._id, ) diff --git a/osf/management/commands/monthly_reporters_go.py b/osf/management/commands/monthly_reporters_go.py index 7ab7b843434..83ed5f6d985 100644 --- a/osf/management/commands/monthly_reporters_go.py +++ b/osf/management/commands/monthly_reporters_go.py @@ -79,7 +79,12 @@ def schedule_monthly_reporter( retry_backoff=True, ) def monthly_reporter_do(reporter_key: str, yearmonth: str, report_kwargs: dict): - _reporter = _get_reporter(reporter_key, yearmonth) + try: + _reporter = _get_reporter(reporter_key, yearmonth) + except KeyError as exc: + framework.sentry.log_exception(exc) + return + _report = _reporter.report(**report_kwargs) if _report is not None: _report.report_yearmonth = _reporter.yearmonth diff --git a/osf/metrics/reports.py b/osf/metrics/reports.py index 28ca6cdb964..ffbcfb4c9b8 100644 --- a/osf/metrics/reports.py +++ b/osf/metrics/reports.py @@ -90,7 +90,7 @@ class Meta: @classmethod def most_recent_yearmonth(cls, base_search=None) -> YearMonth | None: _search = base_search or cls.search() - _search = _search.update_from_dict({'size': 0}) # omit hits + _search = _search[0:0] # omit hits _search.aggs.bucket( 'agg_most_recent_yearmonth', 'terms', @@ -101,8 +101,12 @@ def most_recent_yearmonth(cls, base_search=None) -> YearMonth | None: _response = _search.execute() if not _response.aggregations: return None - (_bucket,) = _response.aggregations.agg_most_recent_yearmonth.buckets - return _bucket.key + + buckets = _response.aggregations.agg_most_recent_yearmonth.buckets + if not buckets: + return None + + return buckets[0].key def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) From c635bebb8177e50123e2557196527676160e0ace Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 19:47:56 -0400 Subject: [PATCH 164/176] remove logs groups embedding --- tests/test_user_profile_view.py | 1 + website/static/js/components/logFeed.js | 2 +- website/static/js/myProjects.js | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_user_profile_view.py b/tests/test_user_profile_view.py index 735e54a0590..876acba9bf9 100644 --- a/tests/test_user_profile_view.py +++ b/tests/test_user_profile_view.py @@ -6,6 +6,7 @@ from rest_framework import status as http_status from addons.github.tests.factories import GitHubAccountFactory +from conftest import start_mock_send_grid from framework.celery_tasks import handlers from osf.external.spam import tasks as spam_tasks from osf.models import ( diff --git a/website/static/js/components/logFeed.js b/website/static/js/components/logFeed.js index 0587f9c6953..de5cd2b2154 100644 --- a/website/static/js/components/logFeed.js +++ b/website/static/js/components/logFeed.js @@ -18,7 +18,7 @@ var _buildLogUrl = function(node, page, limitLogs) { var logPage = page || 1; var urlPrefix = (node.isRegistration || node.is_registration) ? 'registrations' : 'nodes'; var size = limitLogs ? LOG_PAGE_SIZE_LIMITED : LOG_PAGE_SIZE; - var query = { 'page[size]': size, 'page': logPage, 'embed': ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node', 'group',], 'profile_image_size': PROFILE_IMAGE_SIZE}; + var query = { 'page[size]': size, 'page': logPage, 'embed': ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node'], 'profile_image_size': PROFILE_IMAGE_SIZE}; var viewOnly = $osf.urlParams().view_only; if (viewOnly) { query.view_only = viewOnly; diff --git a/website/static/js/myProjects.js b/website/static/js/myProjects.js index ddc1921def0..347e39f6aa4 100644 --- a/website/static/js/myProjects.js +++ b/website/static/js/myProjects.js @@ -532,7 +532,7 @@ var MyProjects = { if(!item.data.attributes.retracted){ var urlPrefix = item.data.attributes.registration ? 'registrations' : 'nodes'; // TODO assess sparse field usage (some already implemented) - var url = $osf.apiV2Url(urlPrefix + '/' + id + '/logs/', { query : { 'page[size]' : 6, 'embed' : ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node', 'group',], 'profile_image_size': PROFILE_IMAGE_SIZE, 'fields[users]' : sparseUserFields}}); + var url = $osf.apiV2Url(urlPrefix + '/' + id + '/logs/', { query : { 'page[size]' : 6, 'embed' : ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node'], 'profile_image_size': PROFILE_IMAGE_SIZE, 'fields[users]' : sparseUserFields}}); var promise = self.getLogs(url); return promise; } From 681a960e0fc7661c04fec0329a501cbfd786f43a Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Thu, 31 Jul 2025 19:47:56 -0400 Subject: [PATCH 165/176] remove logs groups embedding --- website/static/js/components/logFeed.js | 2 +- website/static/js/myProjects.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/website/static/js/components/logFeed.js b/website/static/js/components/logFeed.js index 0587f9c6953..de5cd2b2154 100644 --- a/website/static/js/components/logFeed.js +++ b/website/static/js/components/logFeed.js @@ -18,7 +18,7 @@ var _buildLogUrl = function(node, page, limitLogs) { var logPage = page || 1; var urlPrefix = (node.isRegistration || node.is_registration) ? 'registrations' : 'nodes'; var size = limitLogs ? LOG_PAGE_SIZE_LIMITED : LOG_PAGE_SIZE; - var query = { 'page[size]': size, 'page': logPage, 'embed': ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node', 'group',], 'profile_image_size': PROFILE_IMAGE_SIZE}; + var query = { 'page[size]': size, 'page': logPage, 'embed': ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node'], 'profile_image_size': PROFILE_IMAGE_SIZE}; var viewOnly = $osf.urlParams().view_only; if (viewOnly) { query.view_only = viewOnly; diff --git a/website/static/js/myProjects.js b/website/static/js/myProjects.js index ddc1921def0..347e39f6aa4 100644 --- a/website/static/js/myProjects.js +++ b/website/static/js/myProjects.js @@ -532,7 +532,7 @@ var MyProjects = { if(!item.data.attributes.retracted){ var urlPrefix = item.data.attributes.registration ? 'registrations' : 'nodes'; // TODO assess sparse field usage (some already implemented) - var url = $osf.apiV2Url(urlPrefix + '/' + id + '/logs/', { query : { 'page[size]' : 6, 'embed' : ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node', 'group',], 'profile_image_size': PROFILE_IMAGE_SIZE, 'fields[users]' : sparseUserFields}}); + var url = $osf.apiV2Url(urlPrefix + '/' + id + '/logs/', { query : { 'page[size]' : 6, 'embed' : ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node'], 'profile_image_size': PROFILE_IMAGE_SIZE, 'fields[users]' : sparseUserFields}}); var promise = self.getLogs(url); return promise; } From bb1d1f5decaee1d26b6225fac5b3d3abc2296149 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 1 Aug 2025 09:49:21 -0400 Subject: [PATCH 166/176] re-expose quickfiles migrated logs --- osf/models/nodelog.py | 1 + website/static/js/anonymousLogActionsList.json | 1 + website/static/js/logActionsList.json | 1 + 3 files changed, 3 insertions(+) diff --git a/osf/models/nodelog.py b/osf/models/nodelog.py index a9f0bf63103..0297c42bdec 100644 --- a/osf/models/nodelog.py +++ b/osf/models/nodelog.py @@ -140,6 +140,7 @@ class NodeLog(ObjectIDMixin, BaseModel): CONFIRM_HAM = 'confirm_ham' FLAG_SPAM = 'flag_spam' CONFIRM_SPAM = 'confirm_spam' + MIGRATED_QUICK_FILES = 'migrated_quickfiles' RESOURCE_ADDED = 'resource_identifier_added' RESOURCE_UPDATED = 'resource_identifier_udpated' diff --git a/website/static/js/anonymousLogActionsList.json b/website/static/js/anonymousLogActionsList.json index 17642a945f6..e047fbdfc29 100644 --- a/website/static/js/anonymousLogActionsList.json +++ b/website/static/js/anonymousLogActionsList.json @@ -92,6 +92,7 @@ "subjects_updated": "A user updated the subjects", "view_only_link_added": "A user created a view-only link to a project", "view_only_link_removed": "A user removed a view-only link to a project", + "migrated_quickfiles": "QuickFiles were migrated into a public project", "resource_identifier_added": "A Resource has been added to the Node", "resource_identifier_removed": "A Resource has been removed from the Node", "resource_identifier_updated": "A Resource on the Node has had its PID updated" diff --git a/website/static/js/logActionsList.json b/website/static/js/logActionsList.json index 4b17c8c855c..53c5ef02f04 100644 --- a/website/static/js/logActionsList.json +++ b/website/static/js/logActionsList.json @@ -104,6 +104,7 @@ "prereg_links_updated": "${user} has updated their preregistration data links", "why_no_prereg_updated": "${user} has updated their preregistration data availability statement", "prereg_links_info_updated": "${user} has updated their preregistration links to ${value}", + "migrated_quickfiles": "${user} had their QuickFiles migrated into ${node}", "resource_identifier_added": "${user} has added a Resource with DOI ${new_identifier} to Registration ${node}", "resource_identifier_removed": "${user} has removed a Resource with DOI ${obsolete_identifier} to Registration ${node}", "resource_identifier_updated": "${user} has updated a Resource DOI on Registration ${node} from ${obsolete_identifier} to ${new_identifier}" From 519b71036697a582f8125b0d49cec040629c8631 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 1 Aug 2025 10:41:41 -0400 Subject: [PATCH 167/176] mock mail for tests --- tests/identifiers/test_datacite.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/identifiers/test_datacite.py b/tests/identifiers/test_datacite.py index 5e5563f8917..768a400fc59 100644 --- a/tests/identifiers/test_datacite.py +++ b/tests/identifiers/test_datacite.py @@ -29,6 +29,7 @@ def _assert_unordered_list_of_dicts_equal(actual_list_of_dicts, expected_list_of @pytest.mark.django_db @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') +@mock.patch('website.mails.settings.USE_EMAIL', False) class TestDataCiteClient: @pytest.fixture() From 4c50c6c9bdf0745772f09e522def0fa61ff0858e Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Fri, 1 Aug 2025 14:41:37 -0400 Subject: [PATCH 168/176] fix mail mocking issues for password reset --- .../test_user_sanction_response.py | 1 - api_tests/users/views/test_user_settings.py | 22 ++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/api_tests/users/views/sanction_response/test_user_sanction_response.py b/api_tests/users/views/sanction_response/test_user_sanction_response.py index ef68231a04d..92c2f07ea32 100644 --- a/api_tests/users/views/sanction_response/test_user_sanction_response.py +++ b/api_tests/users/views/sanction_response/test_user_sanction_response.py @@ -51,7 +51,6 @@ def test_post_missing_fields(self, app, sanction_url, user): auth=user.auth, expect_errors=True ) - print(res.json) assert res.json['errors'] == [ { 'source': { diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index 4854e2528ee..48c5ca75687 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -9,6 +9,7 @@ AuthUserFactory, UserFactory, ) +from website import settings from django.middleware import csrf from osf.models import Email, NotableDomain from framework.auth.views import auth_email_logout @@ -92,7 +93,7 @@ class TestUserChangePassword: @pytest.fixture() def user_one(self): user = UserFactory() - user.set_password('password1') + user.set_password('password1', notify=False) user.auth = (user.username, 'password1') user.save() return user @@ -128,7 +129,8 @@ def test_post(self, app, user_one, user_two, url, payload): assert res.status_code == 403 # Logged in - res = app.post_json_api(url, payload, auth=user_one.auth) + with mock.patch.object(settings, 'USE_EMAIL', False): + res = app.post_json_api(url, payload, auth=user_one.auth) assert res.status_code == 204 user_one.reload() assert user_one.check_password('password2') @@ -267,7 +269,7 @@ def test_post_invalid_password(self, app, url, user_one, csrf_token): res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) assert res.status_code == 400 - def test_throttle(self, app, url, user_one): + def test_throttle(self, app, url, user_one, csrf_token): encoded_email = urllib.parse.quote(user_one.email) url = f'{url}?email={encoded_email}' app.get(url) @@ -282,8 +284,18 @@ def test_throttle(self, app, url, user_one): } } - res = app.post_json_api(url, payload, expect_errors=True) - res = app.post_json_api(url, payload, expect_errors=True) + res = app.post_json_api( + url, + payload, + headers={'X-CSRFToken': csrf_token}, + expect_errors=True + ) + res = app.post_json_api( + url, + payload, + headers={'X-CSRFToken': csrf_token}, + expect_errors=True + ) assert res.status_code == 429 res = app.get(url, expect_errors=True) From 8bbe32f87649e1a2af326177e0aaeb528e6238ee Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 4 Aug 2025 10:03:25 -0400 Subject: [PATCH 169/176] Merge branch 'feature/digest_notifications_update' of github.com:johnetordoff/osf.io into add-new-notifications-data-model * 'feature/digest_notifications_update' of github.com:johnetordoff/osf.io: Add notification digest unit tests fix mail mocking issues for password reset mock mail for tests re-expose quickfiles migrated logs review comments remove logs groups embedding remove logs groups embedding digest notifications update Add is_digest flag to notification subscriptions and related notifications # Conflicts: # admin/nodes/views.py # api_tests/users/views/test_user_settings_detail.py # framework/email/tasks.py # osf/email/__init__.py # osf/models/notification_type.py # tests/framework_tests/test_email.py # tests/test_user_profile_view.py # website/notifications/emails.py # website/notifications/listeners.py --- .../notifications/test_notification_digest.py | 120 ++++++++++ .../test_user_sanction_response.py | 1 - api_tests/users/views/test_user_settings.py | 145 ++++++++++- notifications.yaml | 10 + osf/email/__init__.py | 12 + ...ectpermission_unique_together_and_more.py} | 2 +- osf/migrations/0033_delete_queuedmail.py | 16 -- ...otificationsubscriptionlegacy_and_more.py} | 182 ++++++++++---- ...e_abstractnode_child_node_subscriptions.py | 17 -- ...ontributor_added_email_records_and_more.py | 25 -- .../0036_delete_notificationdigest.py | 16 -- osf/models/collection_submission.py | 1 + osf/models/nodelog.py | 1 + osf/models/notification.py | 8 + osf/models/notification_subscription.py | 11 + osf/models/notification_type.py | 12 +- pytest.ini | 2 +- tests/identifiers/test_datacite.py | 1 + website/notifications/constants.py | 6 - website/notifications/events/files.py | 6 +- website/notifications/listeners.py | 34 ++- website/notifications/tasks.py | 225 ++++++++++++++++++ website/reviews/listeners.py | 9 +- .../static/js/anonymousLogActionsList.json | 1 + website/static/js/components/logFeed.js | 2 +- website/static/js/logActionsList.json | 1 + website/static/js/myProjects.js | 2 +- 27 files changed, 709 insertions(+), 159 deletions(-) create mode 100644 api_tests/notifications/test_notification_digest.py rename osf/migrations/{0031_alter_osfgroupgroupobjectpermission_unique_together_and_more.py => 0032_alter_osfgroupgroupobjectpermission_unique_together_and_more.py} (96%) delete mode 100644 osf/migrations/0033_delete_queuedmail.py rename osf/migrations/{0032_alter_notificationsubscription_options_and_more.py => 0033_notification_notificationsubscriptionlegacy_and_more.py} (59%) delete mode 100644 osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py delete mode 100644 osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py delete mode 100644 osf/migrations/0036_delete_notificationdigest.py delete mode 100644 website/notifications/constants.py create mode 100644 website/notifications/tasks.py diff --git a/api_tests/notifications/test_notification_digest.py b/api_tests/notifications/test_notification_digest.py new file mode 100644 index 00000000000..8f3d329222f --- /dev/null +++ b/api_tests/notifications/test_notification_digest.py @@ -0,0 +1,120 @@ +import pytest +from unittest.mock import patch + +from datetime import datetime +from website.notifications.tasks import ( + send_users_email, + get_users_emails, + get_moderators_emails, +) +from osf_tests.factories import ( + AuthUserFactory, + NotificationSubscriptionFactory, + NotificationTypeFactory +) +from osf.models import Notification, NotificationType +from tests.utils import capture_notifications + + +@pytest.mark.django_db +class TestNotificationDigest: + + @pytest.fixture() + def user_one(self): + return AuthUserFactory() + + @pytest.fixture() + def user_two(self): + return AuthUserFactory() + + @pytest.fixture() + def test_notification_type(self): + return NotificationTypeFactory( + name='test_notification_type', + template='test template for {notifications}' + ) + + @pytest.fixture() + def notifications_user_one(self, user_one): + data = {'user': None, 'moderator': None} + notification_subscription = NotificationSubscriptionFactory( + user=user_one, + notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED), + message_frequency='monthly', + ) + notification_subscription.emit(event_context={'notifications': 'Test notification'}) + data['user'] = Notification.objects.get(subscription=notification_subscription).id + + notification_subscription = NotificationSubscriptionFactory( + user=user_one, + notification_type=NotificationType.objects.get(name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS), + message_frequency='monthly', + ) + notification_subscription.emit(event_context={'notifications': 'Test notification', 'provider_id': 1}) + data['moderator'] = Notification.objects.get(subscription=notification_subscription).id + return data + + @pytest.fixture() + def notifications_user_two(self, user_two, test_notification_type): + data = {'user': None, 'moderator': None} + notification_subscription = NotificationSubscriptionFactory( + user=user_two, + notification_type=NotificationType.objects.get(name='test_notification_type'), + message_frequency='daily', + ) + notification_subscription.emit(event_context={'notifications': 'Test notification'}) + data['user'] = Notification.objects.get(subscription=notification_subscription).id + return data + + @patch('website.notifications.tasks._send_reviews_moderator_emails') + @patch('website.notifications.tasks._send_global_and_node_emails') + @patch('website.notifications.tasks.datetime') + def test_send_users_email_daily(self, mock_datetime, mock__send_global_and_node_emails, mock__reviews_moderator_email): + mock_datetime.today.return_value = datetime(2025, 8, 2) # Saturday + send_users_email() + mock__send_global_and_node_emails.assert_called_once_with('daily') + mock__reviews_moderator_email.assert_called_once_with('daily') + + @patch('website.notifications.tasks._send_reviews_moderator_emails') + @patch('website.notifications.tasks._send_global_and_node_emails') + @patch('website.notifications.tasks.datetime') + def test_send_users_email_weekly(self, mock_datetime, mock__send_global_and_node_emails, mock__reviews_moderator_email): + mock_datetime.today.return_value = datetime(2025, 8, 4) # Monday + send_users_email() + assert mock__send_global_and_node_emails.call_count == 2 + assert mock__reviews_moderator_email.call_count == 2 + mock__send_global_and_node_emails.assert_any_call('daily') + mock__send_global_and_node_emails.assert_any_call('weekly') + + @patch('website.notifications.tasks._send_reviews_moderator_emails') + @patch('website.notifications.tasks._send_global_and_node_emails') + @patch('website.notifications.tasks.datetime') + def test_send_users_email_monthly(self, mock_datetime, mock__send_global_and_node_emails, mock__reviews_moderator_email): + mock_datetime.today.return_value = datetime(2025, 6, 30) # Last day of month and a Monday + send_users_email() + assert mock__send_global_and_node_emails.call_count == 3 + mock__send_global_and_node_emails.assert_any_call('daily') + mock__send_global_and_node_emails.assert_any_call('weekly') + mock__send_global_and_node_emails.assert_any_call('monthly') + + def test_get_emails(self, user_one, notifications_user_one): + users_emails = get_users_emails('monthly') + assert [el for el in users_emails] == [{'user_id': user_one._id, 'info': [{'notification_id': notifications_user_one['user']}]}] + moderators_emails = get_moderators_emails('monthly') + assert [el for el in moderators_emails] == [{'user_id': user_one._id, 'provider_id': '1', 'info': [{'notification_id': notifications_user_one['moderator']}]}] + + @patch('osf.models.Notification.send') + def test_send_users_email_sends_notifications(self, mock_send, user_two, notifications_user_two): + with capture_notifications() as notifications: + send_users_email() + + assert mock_send.called + assert Notification.objects.get(id=notifications_user_two['user']).sent + assert notifications[0]['type'] == 'user_digest' + assert notifications[0]['kwargs']['user'] == user_two + assert notifications[0]['kwargs']['is_digest'] + assert notifications[0]['kwargs']['event_context'] == { + 'notifications': 'test template for Test notification', + 'user_fullname': user_two.fullname, + 'can_change_preferences': False + } diff --git a/api_tests/users/views/sanction_response/test_user_sanction_response.py b/api_tests/users/views/sanction_response/test_user_sanction_response.py index ef68231a04d..92c2f07ea32 100644 --- a/api_tests/users/views/sanction_response/test_user_sanction_response.py +++ b/api_tests/users/views/sanction_response/test_user_sanction_response.py @@ -51,7 +51,6 @@ def test_post_missing_fields(self, app, sanction_url, user): auth=user.auth, expect_errors=True ) - print(res.json) assert res.json['errors'] == [ { 'source': { diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index 927b7892d71..0957270549b 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -1,12 +1,15 @@ +import urllib from unittest import mock import pytest -from api.base.settings.defaults import API_BASE +from api.base.settings.defaults import API_BASE, CSRF_COOKIE_NAME from api.base.utils import hashids from osf_tests.factories import ( AuthUserFactory, UserFactory, ) +from website import settings +from django.middleware import csrf from osf.models import Email, NotableDomain, NotificationType from framework.auth.views import auth_email_logout from tests.utils import capture_notifications @@ -93,7 +96,7 @@ class TestUserChangePassword: @pytest.fixture() def user_one(self): user = UserFactory() - user.set_password('password1') + user.set_password('password1', notify=False) user.auth = (user.username, 'password1') user.save() return user @@ -129,7 +132,8 @@ def test_post(self, app, user_one, user_two, url, payload): assert res.status_code == 403 # Logged in - res = app.post_json_api(url, payload, auth=user_one.auth) + with mock.patch.object(settings, 'USE_EMAIL', False): + res = app.post_json_api(url, payload, auth=user_one.auth) assert res.status_code == 204 user_one.reload() assert user_one.check_password('password2') @@ -168,6 +172,141 @@ def test_multiple_errors(self, app, user_one, url, payload): assert res.json['errors'][1]['detail'] == 'Password should be at least eight characters' +@pytest.mark.django_db +class TestResetPassword: + + @pytest.fixture() + def user_one(self): + user = UserFactory() + user.set_password('password1') + user.auth = (user.username, 'password1') + user.save() + return user + + @pytest.fixture() + def url(self): + return f'/{API_BASE}users/reset_password/' + + @pytest.fixture + def csrf_token(self): + return csrf._mask_cipher_secret(csrf._get_new_csrf_string()) + + def test_get(self, app, url, user_one): + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + with capture_notifications() as notifications: + res = app.get(url) + assert res.status_code == 200 + + user_one.reload() + assert len(notifications) == 1 + assert notifications[0]['kwargs']['user'].username == user_one.username + + def test_get_invalid_email(self, app, url): + url = f'{url}?email={'invalid_email'}' + with capture_notifications() as notifications: + res = app.get(url) + assert res.status_code == 200 + assert not notifications + + def test_post(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + res = app.get(url) + user_one.reload() + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': user_one.verification_key_v2['token'], + 'password': 'password2', + } + } + } + + res = app.post_json_api(url, payload, headers={'X-CSRFToken': csrf_token}) + user_one.reload() + assert res.status_code == 200 + assert user_one.check_password('password2') + + def test_post_empty_payload(self, app, url, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + payload = { + 'data': { + 'attributes': { + } + } + } + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-CSRFToken': csrf_token}) + assert res.status_code == 400 + + def test_post_invalid_token(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': 'invalid_token', + 'password': 'password2', + } + } + } + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) + assert res.status_code == 400 + + def test_post_invalid_password(self, app, url, user_one, csrf_token): + app.set_cookie(CSRF_COOKIE_NAME, csrf_token) + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + res = app.get(url) + user_one.reload() + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': user_one.verification_key_v2['token'], + 'password': user_one.username, + } + } + } + + res = app.post_json_api(url, payload, expect_errors=True, headers={'X-THROTTLE-TOKEN': 'test-token', 'X-CSRFToken': csrf_token}) + assert res.status_code == 400 + + def test_throttle(self, app, url, user_one, csrf_token): + encoded_email = urllib.parse.quote(user_one.email) + url = f'{url}?email={encoded_email}' + app.get(url) + user_one.reload() + payload = { + 'data': { + 'attributes': { + 'uid': user_one._id, + 'token': user_one.verification_key_v2['token'], + 'password': '12345', + } + } + } + + res = app.post_json_api( + url, + payload, + headers={'X-CSRFToken': csrf_token}, + expect_errors=True + ) + res = app.post_json_api( + url, + payload, + headers={'X-CSRFToken': csrf_token}, + expect_errors=True + ) + assert res.status_code == 429 + + res = app.get(url, expect_errors=True) + assert res.json['message'] == 'You have recently requested to change your password. Please wait a few minutes before trying again.' + + @pytest.mark.django_db class TestUserEmailsList: diff --git a/notifications.yaml b/notifications.yaml index 03c74a3bb03..97bdf4ce3c5 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -9,6 +9,16 @@ notification_types: `referrer` is sent an email to forward the confirmation link. object_content_type_model_name: osfuser template: 'website/templates/emails/pending_registered.html.mako' + - name: user_digest + subject: 'Digest email' + __docs__: Digest email + object_content_type_model_name: osfuser + template: 'website/templates/emails/digest.html.mako' + - name: digest_reviews_moderators + subject: 'Digest email' + __docs__: Digest email + object_content_type_model_name: osfuser + template: 'website/templates/emails/digest_reviews_moderators.html.mako' - name: user_pending_verification __docs__: ... object_content_type_model_name: osfuser diff --git a/osf/email/__init__.py b/osf/email/__init__.py index 39819741cb2..f84f1d16e74 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -116,3 +116,15 @@ def send_email_with_send_grid(to_addr, notification_type, context, email_context except Exception as exc: logging.error(f'Failed to send email notification to {to_addr}: {exc}') raise exc + +def render_notification(template, context): + """Render a notification template with the given context. + + Args: + template (str): The template string to render. + context (dict): The context to use for rendering the template. + + Returns: + str: The rendered template. + """ + return template.format(**context) if template else '' diff --git a/osf/migrations/0031_alter_osfgroupgroupobjectpermission_unique_together_and_more.py b/osf/migrations/0032_alter_osfgroupgroupobjectpermission_unique_together_and_more.py similarity index 96% rename from osf/migrations/0031_alter_osfgroupgroupobjectpermission_unique_together_and_more.py rename to osf/migrations/0032_alter_osfgroupgroupobjectpermission_unique_together_and_more.py index 255a735de5f..71af68020ca 100644 --- a/osf/migrations/0031_alter_osfgroupgroupobjectpermission_unique_together_and_more.py +++ b/osf/migrations/0032_alter_osfgroupgroupobjectpermission_unique_together_and_more.py @@ -6,7 +6,7 @@ class Migration(migrations.Migration): dependencies = [ - ('osf', '0030_abstractnode__manual_guid'), + ('osf', '0031_abstractprovider_registration_word'), ] operations = [ diff --git a/osf/migrations/0033_delete_queuedmail.py b/osf/migrations/0033_delete_queuedmail.py deleted file mode 100644 index febe0843df5..00000000000 --- a/osf/migrations/0033_delete_queuedmail.py +++ /dev/null @@ -1,16 +0,0 @@ -# Generated by Django 4.2.13 on 2025-07-27 21:30 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('osf', '0032_alter_notificationsubscription_options_and_more'), - ] - - operations = [ - migrations.DeleteModel( - name='QueuedMail', - ), - ] diff --git a/osf/migrations/0032_alter_notificationsubscription_options_and_more.py b/osf/migrations/0033_notification_notificationsubscriptionlegacy_and_more.py similarity index 59% rename from osf/migrations/0032_alter_notificationsubscription_options_and_more.py rename to osf/migrations/0033_notification_notificationsubscriptionlegacy_and_more.py index b4f273108d5..7058cb56618 100644 --- a/osf/migrations/0032_alter_notificationsubscription_options_and_more.py +++ b/osf/migrations/0033_notification_notificationsubscriptionlegacy_and_more.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.13 on 2025-07-08 17:07 +# Generated by Django 4.2.13 on 2025-08-04 13:57 from django.conf import settings import django.contrib.postgres.fields @@ -13,10 +13,77 @@ class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), - ('osf', '0031_alter_osfgroupgroupobjectpermission_unique_together_and_more'), + ('osf', '0032_alter_osfgroupgroupobjectpermission_unique_together_and_more'), ] operations = [ + migrations.RunSQL( + """ + DO $$ + DECLARE + idx record; + BEGIN + FOR idx IN + SELECT indexname + FROM pg_indexes + WHERE tablename = 'osf_notificationsubscription' + LOOP + EXECUTE format( + 'ALTER INDEX %I RENAME TO %I', + idx.indexname, + replace(idx.indexname, 'osf_notificationsubscription', 'osf_notificationsubscription_legacy') + ); + END LOOP; + END$$; + """ + ), + migrations.CreateModel( + name='Notification', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('event_context', models.JSONField()), + ('sent', models.DateTimeField(blank=True, null=True)), + ('seen', models.DateTimeField(blank=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True)), + ], + options={ + 'verbose_name': 'Notification', + 'verbose_name_plural': 'Notifications', + }, + ), + migrations.CreateModel( + name='NotificationSubscriptionLegacy', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('_id', models.CharField(db_index=True, max_length=100)), + ('event_name', models.CharField(max_length=100)), + ], + options={ + 'db_table': 'osf_notificationsubscription_legacy', + }, + bases=(models.Model, osf.models.base.QuerySetExplainMixin), + ), + migrations.CreateModel( + name='NotificationType', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('notification_interval_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=32), blank=True, default=osf.models.notification_type.get_default_frequency_choices, size=None)), + ('name', models.CharField(max_length=255, unique=True)), + ('template', models.TextField(help_text='Template used to render the event_info. Supports Django template syntax.')), + ('subject', models.TextField(blank=True, help_text='Template used to render the subject line of email. Supports Django template syntax.', null=True)), + ('object_content_type', models.ForeignKey(blank=True, help_text='Content type for subscribed objects. Null means global event.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='contenttypes.contenttype')), + ], + options={ + 'verbose_name': 'Notification Type', + 'verbose_name_plural': 'Notification Types', + }, + ), + migrations.RemoveField( + model_name='queuedmail', + name='user', + ), migrations.AlterModelOptions( name='notificationsubscription', options={'verbose_name': 'Notification Subscription', 'verbose_name_plural': 'Notification Subscriptions'}, @@ -25,6 +92,27 @@ class Migration(migrations.Migration): name='notificationsubscription', unique_together=set(), ), + migrations.RemoveField( + model_name='abstractnode', + name='child_node_subscriptions', + ), + migrations.RemoveField( + model_name='osfuser', + name='contributor_added_email_records', + ), + migrations.RemoveField( + model_name='osfuser', + name='group_connected_email_records', + ), + migrations.RemoveField( + model_name='osfuser', + name='member_added_email_records', + ), + migrations.AddField( + model_name='notificationsubscription', + name='_is_digest', + field=models.BooleanField(default=False), + ), migrations.AddField( model_name='notificationsubscription', name='content_type', @@ -45,35 +133,46 @@ class Migration(migrations.Migration): name='user', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to=settings.AUTH_USER_MODEL), ), - migrations.CreateModel( - name='NotificationType', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('notification_interval_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=32), blank=True, default=osf.models.notification_type.get_default_frequency_choices, size=None)), - ('name', models.CharField(max_length=255, unique=True)), - ('template', models.TextField(help_text='Template used to render the event_info. Supports Django template syntax.')), - ('subject', models.TextField(blank=True, help_text='Template used to render the subject line of email. Supports Django template syntax.', null=True)), - ('object_content_type', models.ForeignKey(blank=True, help_text='Content type for subscribed objects. Null means global event.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='contenttypes.contenttype')), - ], - options={ - 'verbose_name': 'Notification Type', - 'verbose_name_plural': 'Notification Types', - }, + migrations.DeleteModel( + name='NotificationDigest', ), - migrations.CreateModel( - name='Notification', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('event_context', models.JSONField()), - ('sent', models.DateTimeField(blank=True, null=True)), - ('seen', models.DateTimeField(blank=True, null=True)), - ('created', models.DateTimeField(auto_now_add=True)), - ('subscription', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='osf.notificationsubscription')), - ], - options={ - 'verbose_name': 'Notification', - 'verbose_name_plural': 'Notifications', - }, + migrations.DeleteModel( + name='QueuedMail', + ), + migrations.AddField( + model_name='notificationsubscriptionlegacy', + name='email_digest', + field=models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='notificationsubscriptionlegacy', + name='email_transactional', + field=models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='notificationsubscriptionlegacy', + name='node', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.node'), + ), + migrations.AddField( + model_name='notificationsubscriptionlegacy', + name='none', + field=models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='notificationsubscriptionlegacy', + name='provider', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.abstractprovider'), + ), + migrations.AddField( + model_name='notificationsubscriptionlegacy', + name='user', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='notification', + name='subscription', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='osf.notificationsubscription'), ), migrations.RemoveField( model_name='notificationsubscription', @@ -108,25 +207,8 @@ class Migration(migrations.Migration): name='notification_type', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='osf.notificationtype'), ), - migrations.CreateModel( - name='NotificationSubscriptionLegacy', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), - ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), - ('_id', models.CharField(db_index=True, max_length=100)), - ('event_name', models.CharField(max_length=100)), - ('email_digest', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), - ('email_transactional', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), - ('node', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.node')), - ('none', models.ManyToManyField(related_name='+', to=settings.AUTH_USER_MODEL)), - ('provider', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to='osf.abstractprovider')), - ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_subscriptions', to=settings.AUTH_USER_MODEL)), - ], - options={ - 'db_table': 'osf_notificationsubscription_legacy', - 'unique_together': {('_id', 'provider')}, - }, - bases=(models.Model, osf.models.base.QuerySetExplainMixin), + migrations.AlterUniqueTogether( + name='notificationsubscriptionlegacy', + unique_together={('_id', 'provider')}, ), ] diff --git a/osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py b/osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py deleted file mode 100644 index 79bd4ec9243..00000000000 --- a/osf/migrations/0034_remove_abstractnode_child_node_subscriptions.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 4.2.13 on 2025-07-27 23:06 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('osf', '0033_delete_queuedmail'), - ] - - operations = [ - migrations.RemoveField( - model_name='abstractnode', - name='child_node_subscriptions', - ), - ] diff --git a/osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py b/osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py deleted file mode 100644 index 48fd5f258da..00000000000 --- a/osf/migrations/0035_remove_osfuser_contributor_added_email_records_and_more.py +++ /dev/null @@ -1,25 +0,0 @@ -# Generated by Django 4.2.13 on 2025-07-29 17:41 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('osf', '0034_remove_abstractnode_child_node_subscriptions'), - ] - - operations = [ - migrations.RemoveField( - model_name='osfuser', - name='contributor_added_email_records', - ), - migrations.RemoveField( - model_name='osfuser', - name='group_connected_email_records', - ), - migrations.RemoveField( - model_name='osfuser', - name='member_added_email_records', - ), - ] diff --git a/osf/migrations/0036_delete_notificationdigest.py b/osf/migrations/0036_delete_notificationdigest.py deleted file mode 100644 index 8ab718d12d6..00000000000 --- a/osf/migrations/0036_delete_notificationdigest.py +++ /dev/null @@ -1,16 +0,0 @@ -# Generated by Django 4.2.13 on 2025-07-29 18:25 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('osf', '0035_remove_osfuser_contributor_added_email_records_and_more'), - ] - - operations = [ - migrations.DeleteModel( - name='NotificationDigest', - ), - ] diff --git a/osf/models/collection_submission.py b/osf/models/collection_submission.py index c7f5e93b3e9..af7d1b730ff 100644 --- a/osf/models/collection_submission.py +++ b/osf/models/collection_submission.py @@ -131,6 +131,7 @@ def _notify_moderators_pending(self, event_data): 'submitter': self.creator.id, 'requester_contributor_names': ''.join(self.guid.referent.contributors.values_list('fullname', flat=True)) }, + is_digest=True, ) def _validate_accept(self, event_data): diff --git a/osf/models/nodelog.py b/osf/models/nodelog.py index a9f0bf63103..0297c42bdec 100644 --- a/osf/models/nodelog.py +++ b/osf/models/nodelog.py @@ -140,6 +140,7 @@ class NodeLog(ObjectIDMixin, BaseModel): CONFIRM_HAM = 'confirm_ham' FLAG_SPAM = 'flag_spam' CONFIRM_SPAM = 'confirm_spam' + MIGRATED_QUICK_FILES = 'migrated_quickfiles' RESOURCE_ADDED = 'resource_identifier_added' RESOURCE_UPDATED = 'resource_identifier_udpated' diff --git a/osf/models/notification.py b/osf/models/notification.py index e0775b192d3..6699333b7ca 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -70,6 +70,14 @@ def mark_seen(self) -> None: # self.seen = timezone.now() # self.save(update_fields=['seen']) + def render(self) -> str: + """Render the notification message using the event context.""" + template = self.subscription.notification_type.template + if not template: + raise ValueError('Notification type must have a template to render the notification.') + notification = email.render_notification(template, self.event_context) + return notification + def __str__(self) -> str: return f'Notification for {self.subscription.user} [{self.subscription.notification_type.name}]' diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index 12e427b9e30..840e561cbac 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -1,3 +1,5 @@ +import logging + from django.db import models from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType @@ -28,6 +30,8 @@ class NotificationSubscription(BaseModel): object_id = models.CharField(max_length=255, null=True, blank=True) subscribed_object = GenericForeignKey('content_type', 'object_id') + _is_digest = models.BooleanField(default=False) + def clean(self): ct = self.notification_type.object_content_type @@ -65,6 +69,13 @@ def emit( to a test address or OSF desk support' email_context (dict, optional): Context for sending the email bcc, reply_to header etc """ + logging.info( + f"Attempting to create Notification:" + f"\nto={self.user.username}" + f"\ntype={self.notification_type.name}" + f"\nmessage_frequency={self.message_frequency}" + f"\ncontext={event_context}" + ) if self.message_frequency == 'instantly': notification = Notification.objects.create( subscription=self, diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 48f82cbb685..050ce7bca2a 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -74,6 +74,8 @@ class Type(str, Enum): USER_CAMPAIGN_CONFIRM_EMAIL_AGU_CONFERENCE_2023 = 'user_campaign_confirm_email_agu_conference_2023' USER_CAMPAIGN_CONFIRM_EMAIL_REGISTRIES_OSF = 'user_campaign_confirm_email_registries_osf' USER_CAMPAIGN_CONFIRM_EMAIL_ERPC = 'user_campaign_confirm_email_erpc' + USER_DIGEST = 'user_digest' + DIGEST_REVIEWS_MODERATORS = 'digest_reviews_moderators' # Node notifications NODE_COMMENT = 'node_comments' @@ -206,6 +208,7 @@ def emit( message_frequency='instantly', event_context=None, email_context=None, + is_digest=False ): """Emit a notification to a user by creating Notification and NotificationSubscription objects. @@ -221,11 +224,10 @@ def emit( subscription, created = NotificationSubscription.objects.get_or_create( notification_type=self, user=user, - defaults={ - 'object_id': subscribed_object.pk if subscribed_object else None, - 'message_frequency': message_frequency, - 'content_type': ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, - }, + content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + object_id=subscribed_object.pk if subscribed_object else None, + defaults={'message_frequency': message_frequency}, + _is_digest=is_digest, ) subscription.emit( destination_address=destination_address, diff --git a/pytest.ini b/pytest.ini index f0126e4dfd5..d3a32470017 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,5 @@ [pytest] -addopts = --ds=osf_tests.settings --tb=short --reuse-db --allow-hosts=127.0.0.1,192.168.168.167,localhost +addopts = --ds=osf_tests.settings --tb=short --allow-hosts=127.0.0.1,192.168.168.167,localhost filterwarnings = once::UserWarning ignore:.*U.*mode is deprecated:DeprecationWarning diff --git a/tests/identifiers/test_datacite.py b/tests/identifiers/test_datacite.py index 5e5563f8917..768a400fc59 100644 --- a/tests/identifiers/test_datacite.py +++ b/tests/identifiers/test_datacite.py @@ -29,6 +29,7 @@ def _assert_unordered_list_of_dicts_equal(actual_list_of_dicts, expected_list_of @pytest.mark.django_db @pytest.mark.usefixtures('mock_gravy_valet_get_verified_links') +@mock.patch('website.mails.settings.USE_EMAIL', False) class TestDataCiteClient: @pytest.fixture() diff --git a/website/notifications/constants.py b/website/notifications/constants.py deleted file mode 100644 index 6e05855582b..00000000000 --- a/website/notifications/constants.py +++ /dev/null @@ -1,6 +0,0 @@ -# Note: the python value None mean inherit from parent -NOTIFICATION_TYPES = { - 'email_transactional': 'Email when a change occurs', - 'email_digest': 'Daily email digest of all changes to this project', - 'none': 'None' -} diff --git a/website/notifications/events/files.py b/website/notifications/events/files.py index 869a3d9c53d..cc2b652e3a3 100644 --- a/website/notifications/events/files.py +++ b/website/notifications/events/files.py @@ -251,7 +251,8 @@ def perform(self): event_context={ 'profile_image_url': self.profile_image_url, 'url': self.url - } + }, + is_digest=True, ) @@ -277,5 +278,6 @@ def perform(self): event_context={ 'profile_image_url': self.profile_image_url, 'url': self.url - } + }, + is_digest=True, ) diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index ceae7ba6e10..aecd846d697 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,9 +1,7 @@ import logging from django.apps import apps -from django.contrib.contenttypes.models import ContentType -from osf.models import NotificationSubscription, NotificationType from website.project.signals import contributor_added, project_created from framework.auth.signals import user_confirmed from website.project.signals import privacy_set_public @@ -14,6 +12,10 @@ @project_created.connect def subscribe_creator(resource): + from osf.models import NotificationSubscription, NotificationType + + from django.contrib.contenttypes.models import ContentType + if resource.is_collection or resource.is_deleted: return None user = resource.creator @@ -31,6 +33,9 @@ def subscribe_creator(resource): @contributor_added.connect def subscribe_contributor(resource, contributor, auth=None, *args, **kwargs): + from django.contrib.contenttypes.models import ContentType + from osf.models import NotificationSubscription, NotificationType + from osf.models import Node if isinstance(resource, Node): if resource.is_collection or resource.is_deleted: @@ -51,21 +56,23 @@ def subscribe_contributor(resource, contributor, auth=None, *args, **kwargs): def subscribe_confirmed_user(user): NotificationSubscription = apps.get_model('osf.NotificationSubscription') NotificationType = apps.get_model('osf.NotificationType') - NotificationSubscription.objects.get_or_create( - user=user, - notification_type=NotificationType.objects.get(name=NotificationType.Type.USER_FILE_UPDATED) - ) - NotificationSubscription.objects.get_or_create( - user=user, - notification_type=NotificationType.objects.get(name=NotificationType.Type.USER_REVIEWS) - ) - + user_events = [ + NotificationType.Type.USER_FILE_UPDATED, + NotificationType.Type.USER_REVIEWS, + ] + for user_event in user_events: + NotificationSubscription.objects.get_or_create( + user=user, + notification_type__name=user_event + ) @privacy_set_public.connect def queue_first_public_project_email(user, node): """Queue and email after user has made their first non-OSF4M project public. """ + from osf.models import NotificationType + NotificationType.objects.get( name=NotificationType.Type.USER_NEW_PUBLIC_PROJECT, ).emit( @@ -83,8 +90,10 @@ def reviews_submit_notification_moderators(self, timestamp, context, resource): """ Handle email notifications to notify moderators of new submissions or resubmission. """ + # imports moved here to avoid AppRegistryNotReady error - from osf.models import NotificationType + from osf.models import NotificationSubscription, NotificationType + from django.contrib.contenttypes.models import ContentType from website.settings import DOMAIN provider = resource.provider @@ -126,6 +135,7 @@ def reviews_submit_notification_moderators(self, timestamp, context, resource): @reviews_signals.reviews_withdraw_requests_notification_moderators.connect def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user, resource): from website.settings import DOMAIN + from osf.models import NotificationType provider = resource.provider # Set message diff --git a/website/notifications/tasks.py b/website/notifications/tasks.py new file mode 100644 index 00000000000..94c9e71075a --- /dev/null +++ b/website/notifications/tasks.py @@ -0,0 +1,225 @@ +""" +Tasks for making even transactional emails consolidated. +""" +import itertools +from datetime import datetime +from calendar import monthrange + +from django.db import connection + +from framework.celery_tasks import app as celery_app +from framework.sentry import log_message +from osf.models import ( + OSFUser, + AbstractProvider, + RegistrationProvider, + CollectionProvider, + Notification, + NotificationType, +) +from osf.registrations.utils import get_registration_provider_submissions_url +from osf.utils.permissions import ADMIN +from website import settings + + +@celery_app.task(name='website.notifications.tasks.send_users_email', max_retries=0) +def send_users_email(): + """Send pending emails. + """ + today = datetime.today().date() + + # Run for yesterday + _send_global_and_node_emails('daily') + _send_reviews_moderator_emails('daily') + + # Run only on Mondays + if today.weekday() == 0: # Monday is 0 + _send_global_and_node_emails('weekly') + _send_reviews_moderator_emails('weekly') + + # Run only on the last day of the month + last_day = monthrange(today.year, today.month)[1] + if today.day == last_day: + _send_global_and_node_emails('monthly') + _send_reviews_moderator_emails('monthly') + + +def _send_global_and_node_emails(message_freq): + """ + Called by `send_users_email`. Send all global and node-related notification emails. + """ + grouped_emails = get_users_emails(message_freq) + for group in grouped_emails: + user = OSFUser.load(group['user_id']) + if not user: + log_message(f"User with id={group['user_id']} not found") + continue + if user.is_disabled: + continue + + info = group['info'] + notification_ids = [message['notification_id'] for message in info] + notifications_qs = Notification.objects.filter(id__in=notification_ids) + rendered_notifications = [notification.render() for notification in notifications_qs] + + if not rendered_notifications: + log_message(f"No notifications to send for user {user._id} with message frequency {message_freq}") + continue + event_context = { + 'notifications': '
    '.join(rendered_notifications), + 'user_fullname': user.fullname, + 'can_change_preferences': False + } + + notification_type = NotificationType.objects.get(name=NotificationType.Type.USER_DIGEST) + notification_type.emit(user=user, event_context=event_context, is_digest=True) + + for notification in notifications_qs: + notification.mark_sent() + +def _send_reviews_moderator_emails(message_freq): + """ + Called by `send_users_email`. Send all reviews triggered emails. + """ + grouped_emails = get_moderators_emails(message_freq) + for group in grouped_emails: + user = OSFUser.load(group['user_id']) + if not user: + log_message(f"User with id={group['user_id']} not found") + continue + if user.is_disabled: + continue + + info = group['info'] + notification_ids = [message['notification_id'] for message in info] + notifications_qs = Notification.objects.filter(id__in=notification_ids) + rendered_notifications = [notification.render() for notification in notifications_qs] + + provider = AbstractProvider.objects.get(id=group['provider_id']) + additional_context = dict() + if isinstance(provider, RegistrationProvider): + provider_type = 'registration' + submissions_url = get_registration_provider_submissions_url(provider) + withdrawals_url = f'{submissions_url}?state=pending_withdraw' + notification_settings_url = f'{settings.DOMAIN}registries/{provider._id}/moderation/notifications' + if provider.brand: + additional_context = { + 'logo_url': provider.brand.hero_logo_image, + 'top_bar_color': provider.brand.primary_color + } + elif isinstance(provider, CollectionProvider): + provider_type = 'collection' + submissions_url = f'{settings.DOMAIN}collections/{provider._id}/moderation/' + notification_settings_url = f'{settings.DOMAIN}registries/{provider._id}/moderation/notifications' + if provider.brand: + additional_context = { + 'logo_url': provider.brand.hero_logo_image, + 'top_bar_color': provider.brand.primary_color + } + withdrawals_url = '' + else: + provider_type = 'preprint' + submissions_url = f'{settings.DOMAIN}reviews/preprints/{provider._id}', + withdrawals_url = '' + notification_settings_url = f'{settings.DOMAIN}reviews/{provider_type}s/{provider._id}/notifications' + + if not rendered_notifications: + log_message(f"No notifications to send for user {user._id} with message frequency {message_freq}") + continue + event_context = { + 'notifications': '
    '.join(rendered_notifications), + 'user_fullname': user.fullname, + 'can_change_preferences': False, + 'notification_settings_url': notification_settings_url, + 'withdrawals_url': withdrawals_url, + 'submissions_url': submissions_url, + 'provider_type': provider_type, + 'additional_context': additional_context, + 'is_admin': provider.get_group(ADMIN).user_set.filter(id=user.id).exists() + } + + notification_type = NotificationType.objects.get(name=NotificationType.Type.DIGEST_REVIEWS_MODERATORS) + notification_type.emit(user=user, event_context=event_context, is_digest=True) + + for notification in notifications_qs: + notification.mark_sent() + + +def get_moderators_emails(message_freq: str): + """Get all emails for reviews moderators that need to be sent, grouped by users AND providers. + :param send_type: from NOTIFICATION_TYPES, could be "email_digest" or "email_transactional" + :return Iterable of dicts of the form: + """ + sql = """ + SELECT + json_build_object( + 'user_id', osf_guid._id, + 'provider_id', (n.event_context ->> 'provider_id'), + 'info', json_agg( + json_build_object( + 'notification_id', n.id + ) + ) + ) + FROM osf_notification AS n + INNER JOIN osf_notificationsubscription AS ns ON n.subscription_id = ns.id + INNER JOIN osf_notificationtype AS nt ON ns.notification_type_id = nt.id + LEFT JOIN osf_guid ON ns.user_id = osf_guid.object_id + WHERE n.sent IS NULL + AND ns.message_frequency = %s + AND nt.name IN (%s, %s) + AND osf_guid.content_type_id = ( + SELECT id FROM django_content_type WHERE model = 'osfuser' + ) + GROUP BY osf_guid._id, (n.event_context ->> 'provider_id') + ORDER BY osf_guid._id ASC + """ + + with connection.cursor() as cursor: + cursor.execute(sql, + [ + message_freq, + NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.value, + NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS.value + ] + ) + return itertools.chain.from_iterable(cursor.fetchall()) + +def get_users_emails(message_freq): + """Get all emails that need to be sent. + NOTE: These do not include reviews triggered emails for moderators. + """ + + sql = """ + SELECT + json_build_object( + 'user_id', osf_guid._id, + 'info', json_agg( + json_build_object( + 'notification_id', n.id + ) + ) + ) + FROM osf_notification AS n + INNER JOIN osf_notificationsubscription AS ns ON n.subscription_id = ns.id + INNER JOIN osf_notificationtype AS nt ON ns.notification_type_id = nt.id + LEFT JOIN osf_guid ON ns.user_id = osf_guid.object_id + WHERE n.sent IS NULL + AND ns.message_frequency = %s + AND nt.name NOT IN (%s, %s) + AND osf_guid.content_type_id = ( + SELECT id FROM django_content_type WHERE model = 'osfuser' + ) + GROUP BY osf_guid._id + ORDER BY osf_guid._id ASC + """ + + with connection.cursor() as cursor: + cursor.execute(sql, + [ + message_freq, + NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS.value, + NotificationType.Type.PROVIDER_NEW_PENDING_WITHDRAW_REQUESTS.value + ] + ) + return itertools.chain.from_iterable(cursor.fetchall()) diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index d208bdb099a..e453ae792e0 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -1,6 +1,4 @@ -from django.contrib.contenttypes.models import ContentType -from osf.models import NotificationType from website.settings import DOMAIN, OSF_PREPRINTS_LOGO, OSF_REGISTRIES_LOGO from website.reviews import signals as reviews_signals @@ -26,6 +24,7 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, ).emit( user=recipient, event_context=context, + is_digest=True, ) @reviews_signals.reviews_email_withdrawal_requests.connect @@ -49,6 +48,7 @@ def reviews_withdrawal_requests_notification(self, timestamp, context): ).emit( user=recipient, event_context=context, + is_digest=True, ) @reviews_signals.reviews_email_submit_moderators_notifications.connect @@ -82,6 +82,9 @@ def reviews_submit_notification_moderators(self, timestamp, resource, context): else: context['message'] = f'submitted "{resource.title}".' + from django.contrib.contenttypes.models import ContentType + from osf.models import NotificationType + # Get NotificationSubscription instance, which contains reference to all subscribers provider_subscription, created = NotificationSubscription.objects.get_or_create( notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, @@ -102,6 +105,8 @@ def reviews_submit_notification(self, recipients, context, resource, notificatio """ Handle email notifications for a new submission or a resubmission """ + from osf.models import NotificationType + provider = resource.provider if provider._id == 'osf': if provider.type == 'osf.preprintprovider': diff --git a/website/static/js/anonymousLogActionsList.json b/website/static/js/anonymousLogActionsList.json index 17642a945f6..e047fbdfc29 100644 --- a/website/static/js/anonymousLogActionsList.json +++ b/website/static/js/anonymousLogActionsList.json @@ -92,6 +92,7 @@ "subjects_updated": "A user updated the subjects", "view_only_link_added": "A user created a view-only link to a project", "view_only_link_removed": "A user removed a view-only link to a project", + "migrated_quickfiles": "QuickFiles were migrated into a public project", "resource_identifier_added": "A Resource has been added to the Node", "resource_identifier_removed": "A Resource has been removed from the Node", "resource_identifier_updated": "A Resource on the Node has had its PID updated" diff --git a/website/static/js/components/logFeed.js b/website/static/js/components/logFeed.js index 0587f9c6953..de5cd2b2154 100644 --- a/website/static/js/components/logFeed.js +++ b/website/static/js/components/logFeed.js @@ -18,7 +18,7 @@ var _buildLogUrl = function(node, page, limitLogs) { var logPage = page || 1; var urlPrefix = (node.isRegistration || node.is_registration) ? 'registrations' : 'nodes'; var size = limitLogs ? LOG_PAGE_SIZE_LIMITED : LOG_PAGE_SIZE; - var query = { 'page[size]': size, 'page': logPage, 'embed': ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node', 'group',], 'profile_image_size': PROFILE_IMAGE_SIZE}; + var query = { 'page[size]': size, 'page': logPage, 'embed': ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node'], 'profile_image_size': PROFILE_IMAGE_SIZE}; var viewOnly = $osf.urlParams().view_only; if (viewOnly) { query.view_only = viewOnly; diff --git a/website/static/js/logActionsList.json b/website/static/js/logActionsList.json index 4b17c8c855c..53c5ef02f04 100644 --- a/website/static/js/logActionsList.json +++ b/website/static/js/logActionsList.json @@ -104,6 +104,7 @@ "prereg_links_updated": "${user} has updated their preregistration data links", "why_no_prereg_updated": "${user} has updated their preregistration data availability statement", "prereg_links_info_updated": "${user} has updated their preregistration links to ${value}", + "migrated_quickfiles": "${user} had their QuickFiles migrated into ${node}", "resource_identifier_added": "${user} has added a Resource with DOI ${new_identifier} to Registration ${node}", "resource_identifier_removed": "${user} has removed a Resource with DOI ${obsolete_identifier} to Registration ${node}", "resource_identifier_updated": "${user} has updated a Resource DOI on Registration ${node} from ${obsolete_identifier} to ${new_identifier}" diff --git a/website/static/js/myProjects.js b/website/static/js/myProjects.js index ddc1921def0..347e39f6aa4 100644 --- a/website/static/js/myProjects.js +++ b/website/static/js/myProjects.js @@ -532,7 +532,7 @@ var MyProjects = { if(!item.data.attributes.retracted){ var urlPrefix = item.data.attributes.registration ? 'registrations' : 'nodes'; // TODO assess sparse field usage (some already implemented) - var url = $osf.apiV2Url(urlPrefix + '/' + id + '/logs/', { query : { 'page[size]' : 6, 'embed' : ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node', 'group',], 'profile_image_size': PROFILE_IMAGE_SIZE, 'fields[users]' : sparseUserFields}}); + var url = $osf.apiV2Url(urlPrefix + '/' + id + '/logs/', { query : { 'page[size]' : 6, 'embed' : ['original_node', 'user', 'linked_node', 'linked_registration', 'template_node'], 'profile_image_size': PROFILE_IMAGE_SIZE, 'fields[users]' : sparseUserFields}}); var promise = self.getLogs(url); return promise; } From 496fc2379f873cd0c71cf15170e51c3ae30b0ff4 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 4 Aug 2025 11:54:24 -0400 Subject: [PATCH 170/176] fix boa send_mail --- addons/boa/tasks.py | 68 ++++++++++++++++++---------------- addons/boa/tests/test_tasks.py | 58 +++++++++++------------------ 2 files changed, 57 insertions(+), 69 deletions(-) diff --git a/addons/boa/tasks.py b/addons/boa/tasks.py index a64110e69b5..4c19d70986a 100644 --- a/addons/boa/tasks.py +++ b/addons/boa/tasks.py @@ -14,10 +14,9 @@ from addons.boa.boa_error_code import BoaErrorCode from framework import sentry from framework.celery_tasks import app as celery_app -from osf.models import OSFUser +from osf.models import OSFUser, NotificationType from osf.utils.fields import ensure_str, ensure_bytes from website import settings as osf_settings -from website.mails import send_mail, ADDONS_BOA_JOB_COMPLETE, ADDONS_BOA_JOB_FAILURE logger = logging.getLogger(__name__) @@ -184,22 +183,24 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, logger.info('Successfully uploaded query output to OSF.') logger.debug('Task ends <<<<<<<<') - await sync_to_async(send_mail)( - to_addr=user.username, - mail=ADDONS_BOA_JOB_COMPLETE, - fullname=user.fullname, - query_file_name=query_file_name, - query_file_full_path=file_full_path, - output_file_name=output_file_name, - job_id=boa_job.id, - project_url=project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.ADDONS_BOA_JOB_COMPLETE + ).emit( + user=user, + event_context={ + 'fullname': user.fullname, + 'query_file_name': query_file_name, + 'query_file_full_path': file_full_path, + 'output_file_name': output_file_name, + 'job_id': boa_job.id, + 'project_url': project_url, + 'boa_job_list_url': boa_settings.BOA_JOB_LIST_URL, + 'boa_support_email': boa_settings.BOA_SUPPORT_EMAIL, + 'osf_support_email': osf_settings.OSF_SUPPORT_EMAIL, + } ) return BoaErrorCode.NO_ERROR - def handle_boa_error(message, code, username, fullname, project_url, query_file_full_path, query_file_name=None, file_size=None, output_file_name=None, job_id=None): """Handle Boa and WB API errors and send emails. @@ -209,22 +210,25 @@ def handle_boa_error(message, code, username, fullname, project_url, query_file_ sentry.log_message(message, skip_session=True) except Exception: pass - send_mail( - to_addr=username, - mail=ADDONS_BOA_JOB_FAILURE, - fullname=fullname, - code=code, - message=message, - query_file_name=query_file_name, - file_size=file_size, - max_file_size=boa_settings.MAX_SUBMISSION_SIZE, - query_file_full_path=query_file_full_path, - output_file_name=output_file_name, - job_id=job_id, - max_job_wait_hours=boa_settings.MAX_JOB_WAITING_TIME / 3600, - project_url=project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, + NotificationType.objects.get( + name=NotificationType.Type.ADDONS_BOA_JOB_FAILURE + ).emit( + destination_address=username, + event_context={ + 'fullname': fullname, + 'code': code, + 'query_file_name': query_file_name, + 'file_size': file_size, + 'max_file_size': boa_settings.MAX_SUBMISSION_SIZE, + 'query_file_full_path': query_file_full_path, + 'output_file_name': output_file_name, + 'job_id': job_id, + 'max_job_wait_hours': boa_settings.MAX_JOB_WAITING_TIME / 3600, + 'project_url': project_url, + 'boa_job_list_url': boa_settings.BOA_JOB_LIST_URL, + 'boa_support_email': boa_settings.BOA_SUPPORT_EMAIL, + 'osf_support_email': osf_settings.OSF_SUPPORT_EMAIL, + + } ) return code diff --git a/addons/boa/tests/test_tasks.py b/addons/boa/tests/test_tasks.py index a4842d6c417..d58568029ca 100644 --- a/addons/boa/tests/test_tasks.py +++ b/addons/boa/tests/test_tasks.py @@ -9,10 +9,11 @@ from addons.boa import settings as boa_settings from addons.boa.boa_error_code import BoaErrorCode from addons.boa.tasks import submit_to_boa, submit_to_boa_async, handle_boa_error +from osf.models import NotificationType from osf_tests.factories import AuthUserFactory, ProjectFactory from tests.base import OsfTestCase +from tests.utils import capture_notifications from website import settings as osf_settings -from website.mails import ADDONS_BOA_JOB_COMPLETE, ADDONS_BOA_JOB_FAILURE DEFAULT_REFRESH_JOB_INTERVAL = boa_settings.REFRESH_JOB_INTERVAL DEFAULT_MAX_JOB_WAITING_TIME = boa_settings.MAX_JOB_WAITING_TIME @@ -53,42 +54,25 @@ def test_boa_error_code(self): assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7 def test_handle_boa_error(self): - with mock.patch('addons.boa.tasks.send_mail', return_value=None) as mock_send_mail, \ - mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \ - mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error: - return_value = handle_boa_error( - self.error_message, - BoaErrorCode.UNKNOWN, - self.user_username, - self.user_fullname, - self.project_url, - self.file_full_path, - query_file_name=self.query_file_name, - file_size=self.file_size, - output_file_name=self.output_file_name, - job_id=self.job_id - ) - mock_send_mail.assert_called_with( - to_addr=self.user_username, - mail=ADDONS_BOA_JOB_FAILURE, - fullname=self.user_fullname, - code=BoaErrorCode.UNKNOWN, - message=self.error_message, - query_file_name=self.query_file_name, - file_size=self.file_size, - max_file_size=boa_settings.MAX_SUBMISSION_SIZE, - query_file_full_path=self.file_full_path, - output_file_name=self.output_file_name, - job_id=self.job_id, - max_job_wait_hours=self.max_job_wait_hours, - project_url=self.project_url, - boa_job_list_url=boa_settings.BOA_JOB_LIST_URL, - boa_support_email=boa_settings.BOA_SUPPORT_EMAIL, - osf_support_email=osf_settings.OSF_SUPPORT_EMAIL, - ) - mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True) - mock_logger_error.assert_called_with(self.error_message) - assert return_value == BoaErrorCode.UNKNOWN + with mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message: + with mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error: + with capture_notifications() as notifications: + return_value = handle_boa_error( + self.error_message, + BoaErrorCode.UNKNOWN, + self.user_username, + self.user_fullname, + self.project_url, + self.file_full_path, + file_size=self.file_size, + output_file_name=self.output_file_name, + job_id=self.job_id + ) + assert len(notifications) == 1 + assert notifications[0]['type'] == NotificationType.Type.ADDONS_BOA_JOB_FAILURE + mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True) + mock_logger_error.assert_called_with(self.error_message) + assert return_value == BoaErrorCode.UNKNOWN class TestSubmitToBoa(OsfTestCase): From a73fb3a64c88643d03db95d0e9d02fe8ef0d747b Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 4 Aug 2025 12:32:10 -0400 Subject: [PATCH 171/176] remove old archiver test --- osf_tests/test_archiver.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 282c0c99ddd..bc809a7f011 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -1203,19 +1203,3 @@ def test_archive_tree_finished_with_nodes(self): node.archive_job.update_target(target.name, ARCHIVER_SUCCESS) for node in reg.node_and_primary_descendants(): assert node.archive_job.archive_tree_finished() - -# Regression test for https://openscience.atlassian.net/browse/OSF-9085 -def test_archiver_uncaught_error_mail_renders(): - src = factories.ProjectFactory() - user = src.creator - job = factories.ArchiveJobFactory() - notification_type = NotificationType.Type.DESK_ARCHIVE_JOB_UNCAUGHT_ERROR.instance - assert notification_type.emit( - user=user, - event_context=dict( - src=str(src), - results=list(job.target_addons.all()), - url=settings.INTERNAL_DOMAIN + src._id, - can_change_preferences=False, - ) - ) From 466685a6a3a0d2d6c7eeacf8985e0cd884fe17f2 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Mon, 4 Aug 2025 14:09:30 -0400 Subject: [PATCH 172/176] fix migrate management command --- notifications.yaml | 4 ++ .../commands/migrate_notifications.py | 31 +++++---- osf/models/notification_type.py | 2 +- .../test_migrate_notifications.py | 26 ++++--- website/notifications/listeners.py | 66 +++++++++++++++++- website/notifications/utils.py | 67 ------------------- 6 files changed, 100 insertions(+), 96 deletions(-) delete mode 100644 website/notifications/utils.py diff --git a/notifications.yaml b/notifications.yaml index 97bdf4ce3c5..406034678e9 100644 --- a/notifications.yaml +++ b/notifications.yaml @@ -298,6 +298,10 @@ notification_types: __docs__: ... object_content_type_model_name: abstractnode template: 'website/templates/emails/file_updated.html.mako' + - name: node_file_updated + __docs__: ... + object_content_type_model_name: abstractnode + template: 'website/templates/emails/file_updated.html.mako' - name: node_institutional_access_request __docs__: ... object_content_type_model_name: abstractnode diff --git a/osf/management/commands/migrate_notifications.py b/osf/management/commands/migrate_notifications.py index f4dfaf3c0c8..113b152e679 100644 --- a/osf/management/commands/migrate_notifications.py +++ b/osf/management/commands/migrate_notifications.py @@ -13,6 +13,11 @@ 'email_digest': 'weekly', 'email_transactional': 'instantly', } +EVENT_NAME_TO_NOTIFICATION_TYPE = { + 'new_pending_submissions': NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, + 'file_updated': NotificationType.Type.NODE_FILE_UPDATED, + 'comments': None, +} def migrate_legacy_notification_subscriptions(*args, **kwargs): """ @@ -35,22 +40,16 @@ def migrate_legacy_notification_subscriptions(*args, **kwargs): else: raise NotImplementedError(f'Invalid Notification id {event_name}') content_type = ContentType.objects.get_for_model(subscribed_object.__class__) - subscription, _ = NotificationSubscription.objects.update_or_create( - notification_type=NotificationType.objects.get(name=event_name), - user=legacy.user, - content_type=content_type, - object_id=subscribed_object.id, - defaults={ - 'user': legacy.user, - 'message_frequency': ( - ('weekly' if legacy.email_digest.exists() else 'none'), - 'instantly' if legacy.email_transactional.exists() else 'none' - ), - 'content_type': content_type, - 'object_id': subscribed_object.id, - } - ) - logger.info(f'Created NotificationType "{event_name}" with content_type {content_type}') + + if notification_name := EVENT_NAME_TO_NOTIFICATION_TYPE[event_name]: + subscription, _ = NotificationSubscription.objects.get_or_create( + notification_type=NotificationType.objects.get(name=notification_name), + user=legacy.user, + content_type=content_type, + object_id=subscribed_object.id, + message_frequency=('weekly' if legacy.email_digest.exists() else 'none'), + ) + logger.info(f'Created NotificationType "{event_name}" with content_type {content_type} with {subscription}') class Command(BaseCommand): help = 'Migrate legacy NotificationSubscriptionLegacy objects to new Notification app models.' diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index 050ce7bca2a..e6ea9e056fb 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -78,7 +78,7 @@ class Type(str, Enum): DIGEST_REVIEWS_MODERATORS = 'digest_reviews_moderators' # Node notifications - NODE_COMMENT = 'node_comments' + NODE_FILE_UPDATED = 'node_file_updated' NODE_FILES_UPDATED = 'node_files_updated' NODE_AFFILIATION_CHANGED = 'node_affiliation_changed' NODE_REQUEST_ACCESS_SUBMITTED = 'node_access_request_submitted' diff --git a/osf_tests/management_commands/test_migrate_notifications.py b/osf_tests/management_commands/test_migrate_notifications.py index 35837f7cc7c..62f6af75f40 100644 --- a/osf_tests/management_commands/test_migrate_notifications.py +++ b/osf_tests/management_commands/test_migrate_notifications.py @@ -61,7 +61,10 @@ def create_legacy_sub(self, event_name, users, user=None, provider=None, node=No legacy.email_transactional.add(users['transactional']) return legacy - def test_migrate_provider_subscription(self, user, provider, provider2): + def test_migrate_provider_subscription(self, user, users, provider, provider2): + self.create_legacy_sub('new_pending_submissions', users, provider=provider) + self.create_legacy_sub('new_pending_submissions', users, provider=provider2) + self.create_legacy_sub('new_pending_submissions', users, provider=RegistrationProvider.get_default()) NotificationSubscriptionLegacy.objects.get( event_name='new_pending_submissions', provider=provider @@ -76,25 +79,27 @@ def test_migrate_provider_subscription(self, user, provider, provider2): ) migrate_legacy_notification_subscriptions() - subs = NotificationSubscription.objects.filter(notification_type__name='new_pending_submissions') + subs = NotificationSubscription.objects.filter( + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS + ) assert subs.count() == 3 assert subs.get( - notification_type__name='new_pending_submissions', + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, object_id=provider.id, content_type=ContentType.objects.get_for_model(provider.__class__) ) assert subs.get( - notification_type__name='new_pending_submissions', + notification_type__name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS, object_id=provider2.id, content_type=ContentType.objects.get_for_model(provider2.__class__) ) def test_migrate_node_subscription(self, users, user, node): - self.create_legacy_sub('wiki_updated', users, user=user, node=node) + self.create_legacy_sub('file_updated', users, user=user, node=node) migrate_legacy_notification_subscriptions() - nt = NotificationType.objects.get(name='wiki_updated') + nt = NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED) assert nt.object_content_type == ContentType.objects.get_for_model(Node) subs = NotificationSubscription.objects.filter(notification_type=nt) @@ -103,12 +108,11 @@ def test_migrate_node_subscription(self, users, user, node): for sub in subs: assert sub.subscribed_object == node - def test_multiple_subscriptions_different_types(self, users, user, provider, node): + def test_multiple_subscriptions_no_old_types(self, users, user, provider, node): assert not NotificationSubscription.objects.filter(user=user) - self.create_legacy_sub('wiki_updated', users, user=user, node=node) + self.create_legacy_sub('comments', users, user=user, node=node) migrate_legacy_notification_subscriptions() - assert NotificationSubscription.objects.get(user=user).notification_type.name == 'wiki_updated' - assert NotificationSubscription.objects.get(notification_type__name='wiki_updated', user=user) + assert not NotificationSubscription.objects.filter(user=user) def test_idempotent_migration(self, users, user, node, provider): self.create_legacy_sub('file_updated', users, user=user, node=node) @@ -118,7 +122,7 @@ def test_idempotent_migration(self, users, user, node, provider): user=user, object_id=node.id, content_type=ContentType.objects.get_for_model(node.__class__), - notification_type__name='file_updated' + notification_type__name=NotificationType.Type.NODE_FILE_UPDATED ) def test_errors_invalid_subscription(self, users): diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index aecd846d697..d0499e5d4b1 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,8 +1,11 @@ import logging from django.apps import apps +from django.contrib.contenttypes.models import ContentType -from website.project.signals import contributor_added, project_created +from framework.celery_tasks import app +from framework.postcommit_tasks.handlers import run_postcommit +from website.project.signals import contributor_added, project_created, node_deleted, contributor_removed from framework.auth.signals import user_confirmed from website.project.signals import privacy_set_public from website import settings @@ -148,3 +151,64 @@ def reviews_withdraw_requests_notification_moderators(self, timestamp, context, user=user, event_context=context ) + + +@contributor_removed.connect +def remove_contributor_from_subscriptions(node, user): + """ Remove contributor from node subscriptions unless the user is an + admin on any of node's parent projects. + """ + NotificationSubscription = apps.get_model('osf.NotificationSubscription') + + Preprint = apps.get_model('osf.Preprint') + DraftRegistration = apps.get_model('osf.DraftRegistration') + # Preprints don't have subscriptions at this time + if isinstance(node, Preprint): + return + if isinstance(node, DraftRegistration): + return + + # If user still has permissions through being a contributor or group member, or has + # admin perms on a parent, don't remove their subscription + if not (node.is_contributor_or_group_member(user)) and user._id not in node.admin_contributor_or_group_member_ids: + node_subscriptions = NotificationSubscription.objects.filter( + user=user, + user__isnull=True, + object_id=node.id, + content_type=ContentType.objects.get_for_model(node) + ) + + for subscription in node_subscriptions: + subscription.delete() + + +@node_deleted.connect +def remove_subscription(node): + remove_subscription_task(node._id) + +@node_deleted.connect +def remove_supplemental_node(node): + remove_supplemental_node_from_preprints(node._id) + +@run_postcommit(once_per_request=False, celery=True) +@app.task(max_retries=5, default_retry_delay=60) +def remove_subscription_task(node_id): + AbstractNode = apps.get_model('osf.AbstractNode') + NotificationSubscription = apps.get_model('osf.NotificationSubscription') + node = AbstractNode.load(node_id) + NotificationSubscription.objects.filter( + object_id=node.id, + content_type=ContentType.objects.get_for_model(node), + ).delete() + + +@run_postcommit(once_per_request=False, celery=True) +@app.task(max_retries=5, default_retry_delay=60) +def remove_supplemental_node_from_preprints(node_id): + AbstractNode = apps.get_model('osf.AbstractNode') + + node = AbstractNode.load(node_id) + for preprint in node.preprints.all(): + if preprint.node is not None: + preprint.node = None + preprint.save() diff --git a/website/notifications/utils.py b/website/notifications/utils.py deleted file mode 100644 index 1cb8f485866..00000000000 --- a/website/notifications/utils.py +++ /dev/null @@ -1,67 +0,0 @@ -from django.apps import apps -from django.contrib.contenttypes.models import ContentType - -from framework.postcommit_tasks.handlers import run_postcommit -from osf.models import NotificationSubscription -from website.project import signals - -from framework.celery_tasks import app - - -@signals.contributor_removed.connect -def remove_contributor_from_subscriptions(node, user): - """ Remove contributor from node subscriptions unless the user is an - admin on any of node's parent projects. - """ - Preprint = apps.get_model('osf.Preprint') - DraftRegistration = apps.get_model('osf.DraftRegistration') - # Preprints don't have subscriptions at this time - if isinstance(node, Preprint): - return - if isinstance(node, DraftRegistration): - return - - # If user still has permissions through being a contributor or group member, or has - # admin perms on a parent, don't remove their subscription - if not (node.is_contributor_or_group_member(user)) and user._id not in node.admin_contributor_or_group_member_ids: - node_subscriptions = NotificationSubscription.objects.filter( - user=user, - user__isnull=True, - object_id=node.id, - content_type=ContentType.objects.get_for_model(node) - ) - - for subscription in node_subscriptions: - subscription.delete() - - -@signals.node_deleted.connect -def remove_subscription(node): - remove_subscription_task(node._id) - -@signals.node_deleted.connect -def remove_supplemental_node(node): - remove_supplemental_node_from_preprints(node._id) - -@run_postcommit(once_per_request=False, celery=True) -@app.task(max_retries=5, default_retry_delay=60) -def remove_subscription_task(node_id): - AbstractNode = apps.get_model('osf.AbstractNode') - NotificationSubscription = apps.get_model('osf.NotificationSubscription') - node = AbstractNode.load(node_id) - NotificationSubscription.objects.filter( - object_id=node.id, - content_type=ContentType.objects.get_for_model(node), - ).delete() - - -@run_postcommit(once_per_request=False, celery=True) -@app.task(max_retries=5, default_retry_delay=60) -def remove_supplemental_node_from_preprints(node_id): - AbstractNode = apps.get_model('osf.AbstractNode') - - node = AbstractNode.load(node_id) - for preprint in node.preprints.all(): - if preprint.node is not None: - preprint.node = None - preprint.save() From 96db17a53df5e9b8e9d7effb46c94d75057c9be2 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Tue, 5 Aug 2025 09:56:28 -0400 Subject: [PATCH 173/176] refactor email subscriptions into separate tasks --- addons/base/views.py | 62 ++++++++++++++++------------------ addons/boa/tasks.py | 1 + scripts/triggered_mails.py | 50 +++++++++++++++++++++++++++ website/notifications/tasks.py | 36 ++++++++++++++------ website/settings/defaults.py | 12 +++++++ 5 files changed, 119 insertions(+), 42 deletions(-) create mode 100644 scripts/triggered_mails.py diff --git a/addons/base/views.py b/addons/base/views.py index d3bf7759b42..4b68744597d 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -607,39 +607,37 @@ def create_waterbutler_log(payload, **kwargs): if target_node and payload['action'] != 'download_file': update_storage_usage_with_size(payload) - file_signals.file_updated.send( - target=node, - user=user, - payload=payload - ) + with transaction.atomic(): + file_signals.file_updated.send(target=node, user=user, payload=payload) - match action: - case NotificationType.Type.FILE_ADDED: - notification = NotificationType.objects.get(name=NotificationType.Type.FILE_ADDED) - case NotificationType.Type.FILE_REMOVED: - notification = NotificationType.objects.get(name=NotificationType.Type.FILE_REMOVED) - case NotificationType.Type.FILE_UPDATED: - notification = NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) - case NotificationType.Type.ADDON_FILE_RENAMED: - notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_RENAMED) - case NotificationType.Type.ADDON_FILE_COPIED: - notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_COPIED) - case NotificationType.Type.ADDON_FILE_REMOVED: - notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_REMOVED) - case NotificationType.Type.ADDON_FILE_MOVED: - notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_MOVED) - case _: - raise NotImplementedError(f'action {action} not implemented') - - notification.emit( - user=user, - event_context={ - 'profile_image_url': user.profile_image_url(), - 'localized_timestamp': str(timezone.now()), - 'user_fullname': user.fullname, - 'url': node.absolute_url, - } - ) + with transaction.atomic(): + match action: + case NotificationType.Type.FILE_ADDED: + notification = NotificationType.objects.get(name=NotificationType.Type.FILE_ADDED) + case NotificationType.Type.FILE_REMOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.FILE_REMOVED) + case NotificationType.Type.FILE_UPDATED: + notification = NotificationType.objects.get(name=NotificationType.Type.FILE_UPDATED) + case NotificationType.Type.ADDON_FILE_RENAMED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_RENAMED) + case NotificationType.Type.ADDON_FILE_COPIED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_COPIED) + case NotificationType.Type.ADDON_FILE_REMOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_REMOVED) + case NotificationType.Type.ADDON_FILE_MOVED: + notification = NotificationType.objects.get(name=NotificationType.Type.ADDON_FILE_MOVED) + case _: + raise NotImplementedError(f'action {action} not implemented') + + notification.emit( + user=user, + event_context={ + 'profile_image_url': user.profile_image_url(), + 'localized_timestamp': str(timezone.now()), + 'user_fullname': user.fullname, + 'url': node.absolute_url, + } + ) return {'status': 'success'} diff --git a/addons/boa/tasks.py b/addons/boa/tasks.py index 4c19d70986a..5e3122490d5 100644 --- a/addons/boa/tasks.py +++ b/addons/boa/tasks.py @@ -201,6 +201,7 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid, ) return BoaErrorCode.NO_ERROR + def handle_boa_error(message, code, username, fullname, project_url, query_file_full_path, query_file_name=None, file_size=None, output_file_name=None, job_id=None): """Handle Boa and WB API errors and send emails. diff --git a/scripts/triggered_mails.py b/scripts/triggered_mails.py new file mode 100644 index 00000000000..3e0c4fea73a --- /dev/null +++ b/scripts/triggered_mails.py @@ -0,0 +1,50 @@ +import logging + +from django.db import transaction +from django.db.models import Q +from django.utils import timezone + +from framework.celery_tasks import app as celery_app +from osf.models import OSFUser +from osf.models.queued_mail import NO_LOGIN_TYPE, NO_LOGIN, QueuedMail, queue_mail +from website.app import init_app +from website import settings + +from scripts.utils import add_file_logger + +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO) + + +def main(dry_run=True): + for user in find_inactive_users_with_no_inactivity_email_sent_or_queued(): + if dry_run: + logger.warning('Dry run mode') + logger.warning(f'Email of type no_login queued to {user.username}') + if not dry_run: + with transaction.atomic(): + queue_mail( + to_addr=user.username, + mail=NO_LOGIN, + send_at=timezone.now(), + user=user, + fullname=user.fullname, + osf_support_email=settings.OSF_SUPPORT_EMAIL, + ) + + +def find_inactive_users_with_no_inactivity_email_sent_or_queued(): + users_sent_ids = QueuedMail.objects.filter(email_type=NO_LOGIN_TYPE).values_list('user__guids___id') + return (OSFUser.objects + .filter( + (Q(date_last_login__lt=timezone.now() - settings.NO_LOGIN_WAIT_TIME) & ~Q(tags__name='osf4m')) | + Q(date_last_login__lt=timezone.now() - settings.NO_LOGIN_OSF4M_WAIT_TIME, tags__name='osf4m'), + is_active=True) + .exclude(guids___id__in=users_sent_ids)) + +@celery_app.task(name='scripts.triggered_mails') +def run_main(dry_run=True): + init_app(routes=False) + if not dry_run: + add_file_logger(logger, __file__) + main(dry_run=dry_run) diff --git a/website/notifications/tasks.py b/website/notifications/tasks.py index 94c9e71075a..bdda42db43f 100644 --- a/website/notifications/tasks.py +++ b/website/notifications/tasks.py @@ -22,29 +22,45 @@ from website import settings -@celery_app.task(name='website.notifications.tasks.send_users_email', max_retries=0) -def send_users_email(): +@celery_app.task(name='website.notifications.tasks.send_users_digest_email', max_retries=0) +def send_users_digest_email(): """Send pending emails. """ today = datetime.today().date() # Run for yesterday - _send_global_and_node_emails('daily') - _send_reviews_moderator_emails('daily') + _send_user_digest('daily') # Run only on Mondays if today.weekday() == 0: # Monday is 0 - _send_global_and_node_emails('weekly') - _send_reviews_moderator_emails('weekly') + _send_user_digest('weekly') # Run only on the last day of the month last_day = monthrange(today.year, today.month)[1] if today.day == last_day: - _send_global_and_node_emails('monthly') - _send_reviews_moderator_emails('monthly') + _send_user_digest('monthly') -def _send_global_and_node_emails(message_freq): +@celery_app.task(name='website.notifications.tasks.send_moderators_digest_email', max_retries=0) +def send_moderators_digest_email(): + """Send pending emails. + """ + today = datetime.today().date() + + # Run for yesterday + _send_moderator_digest('daily') + + # Run only on Mondays + if today.weekday() == 0: # Monday is 0 + _send_moderator_digest('weekly') + + # Run only on the last day of the month + last_day = monthrange(today.year, today.month)[1] + if today.day == last_day: + _send_moderator_digest('monthly') + + +def _send_user_digest(message_freq): """ Called by `send_users_email`. Send all global and node-related notification emails. """ @@ -77,7 +93,7 @@ def _send_global_and_node_emails(message_freq): for notification in notifications_qs: notification.mark_sent() -def _send_reviews_moderator_emails(message_freq): +def _send_moderator_digest(message_freq): """ Called by `send_users_email`. Send all reviews triggered emails. """ diff --git a/website/settings/defaults.py b/website/settings/defaults.py index 7581393e1db..e5ff25041c4 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -565,6 +565,8 @@ class CeleryConfig: 'scripts.approve_registrations', 'scripts.approve_embargo_terminations', 'scripts.triggered_mails', + 'scripts.website.notifications.tasks.send_moderators_digest_email', + 'scripts.website.notifications.tasks.send_users_digest_email', 'scripts.generate_sitemap', 'scripts.premigrate_created_modified', 'scripts.add_missing_identifiers_to_preprints', @@ -628,6 +630,16 @@ class CeleryConfig: 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m 'kwargs': {'dry_run': False}, }, + 'send_moderators_digest_email': { + 'task': 'website.notifications.tasks.send_moderators_digest_email', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, + 'send_users_digest_email': { + 'task': 'website.notifications.tasks.send_users_digest_email', + 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m + 'kwargs': {'dry_run': False}, + }, 'clear_expired_sessions': { 'task': 'osf.management.commands.clear_expired_sessions', 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m From 5993dad3d7b58de789f5001b41b1735606fc9e30 Mon Sep 17 00:00:00 2001 From: John Tordoff Date: Wed, 6 Aug 2025 12:46:13 -0400 Subject: [PATCH 174/176] improve digests by queuing emails with retry mechanism condensing email temaplates --- .../notifications/test_notification_digest.py | 273 +++++++++++------- osf/admin.py | 9 +- osf/email/__init__.py | 15 +- ...nd_more.py => 0033_notification_system.py} | 18 +- osf/models/__init__.py | 1 + osf/models/email_task.py | 20 ++ website/notifications/listeners.py | 3 +- website/notifications/tasks.py | 221 ++++++++------ website/templates/emails/digest.html.mako | 53 ++-- .../emails/file_operation_failed.html.mako | 6 +- .../emails/file_operation_success.html.mako | 6 +- .../templates/emails/file_updated.html.mako | 2 +- website/templates/emails/notify_base.mako | 6 +- 13 files changed, 382 insertions(+), 251 deletions(-) rename osf/migrations/{0033_notification_notificationsubscriptionlegacy_and_more.py => 0033_notification_system.py} (90%) create mode 100644 osf/models/email_task.py diff --git a/api_tests/notifications/test_notification_digest.py b/api_tests/notifications/test_notification_digest.py index 8f3d329222f..75f4fa251bd 100644 --- a/api_tests/notifications/test_notification_digest.py +++ b/api_tests/notifications/test_notification_digest.py @@ -1,120 +1,189 @@ import pytest -from unittest.mock import patch - -from datetime import datetime +from osf.models import Notification, NotificationType, EmailTask from website.notifications.tasks import ( - send_users_email, + send_user_email_task, + send_moderator_email_task, + send_users_digest_email, + send_moderators_digest_email, get_users_emails, - get_moderators_emails, -) -from osf_tests.factories import ( - AuthUserFactory, - NotificationSubscriptionFactory, - NotificationTypeFactory + get_moderators_emails ) -from osf.models import Notification, NotificationType -from tests.utils import capture_notifications +from osf_tests.factories import AuthUserFactory, RegistrationProviderFactory +def add_notification_subscription(user, notification_type, frequency, provider=None, subscription=None): + """ + Create a NotificationSubscription for a user. + If the notification type corresponds to a provider, set provider as the subscribed_object. + """ + from osf.models import NotificationSubscription + kwargs = { + 'user': user, + 'notification_type': NotificationType.objects.get(name=notification_type), + 'message_frequency': frequency, + } + if provider is not None: + kwargs['subscribed_object'] = provider + if provider is not None: + kwargs['subscribed_object'] = subscription + return NotificationSubscription.objects.create(**kwargs) -@pytest.mark.django_db -class TestNotificationDigest: - @pytest.fixture() - def user_one(self): - return AuthUserFactory() +@pytest.mark.django_db +def test_send_user_email_task_success(fake): + user = AuthUserFactory() + notification_type = NotificationType.objects.get(name=NotificationType.Type.USER_FILE_UPDATED) + subscription_type = add_notification_subscription( + user, + notification_type, + 'daily', + subscription=add_notification_subscription( + user, + NotificationType.objects.get(name=NotificationType.Type.USER_DIGEST), + 'daily' + ) + ) - @pytest.fixture() - def user_two(self): - return AuthUserFactory() + notification = Notification.objects.create( + subscription=subscription_type, + event_context={ + }, + ) + user.save() + notification_ids = [notification.id] + send_user_email_task.apply(args=(user._id, notification_ids, 'daily')).get() + email_task = EmailTask.objects.get(user_id=user.id) + assert email_task.status == 'SUCCESS' + notification.refresh_from_db() + assert notification.sent - @pytest.fixture() - def test_notification_type(self): - return NotificationTypeFactory( - name='test_notification_type', - template='test template for {notifications}' - ) +@pytest.mark.django_db +def test_send_user_email_task_user_not_found(): + non_existent_user_id = 'fakeuserid' + notification_ids = [] + send_user_email_task.apply(args=(non_existent_user_id, notification_ids, 'daily')).get() + assert EmailTask.objects.all().exists() + email_task = EmailTask.objects.all().get() + assert email_task.status == 'NO_USER_FOUND' + assert email_task.error_message == 'User not found or disabled' - @pytest.fixture() - def notifications_user_one(self, user_one): - data = {'user': None, 'moderator': None} - notification_subscription = NotificationSubscriptionFactory( - user=user_one, - notification_type=NotificationType.objects.get(name=NotificationType.Type.NODE_FILE_UPDATED), - message_frequency='monthly', - ) - notification_subscription.emit(event_context={'notifications': 'Test notification'}) - data['user'] = Notification.objects.get(subscription=notification_subscription).id +@pytest.mark.django_db +def test_send_user_email_task_user_disabled(fake): + user = AuthUserFactory() + user.deactivate_account() + user.save() + notification_type = NotificationType.objects.get(name=NotificationType.Type.USER_DIGEST) + notification = Notification.objects.create( + subscription=add_notification_subscription(user, NotificationType.Type.USER_FILE_UPDATED, notification_type, 'daily'), + sent=None, + event_context={}, + ) + notification_ids = [notification.id] + send_user_email_task.apply(args=(user._id, notification_ids, 'daily')).get() + email_task = EmailTask.objects.filter(user_id=user.id).first() + assert email_task.status == 'USER_DISABLED' + assert email_task.error_message == 'User not found or disabled' - notification_subscription = NotificationSubscriptionFactory( - user=user_one, - notification_type=NotificationType.objects.get(name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS), - message_frequency='monthly', - ) - notification_subscription.emit(event_context={'notifications': 'Test notification', 'provider_id': 1}) - data['moderator'] = Notification.objects.get(subscription=notification_subscription).id - return data +@pytest.mark.django_db +def test_send_user_email_task_no_notifications(fake): + user = AuthUserFactory() + notification_ids = [] + send_user_email_task.apply(args=(user._id, notification_ids, 'daily')).get() + email_task = EmailTask.objects.filter(user_id=user.id).first() + assert email_task.status == 'SUCCESS' - @pytest.fixture() - def notifications_user_two(self, user_two, test_notification_type): - data = {'user': None, 'moderator': None} - notification_subscription = NotificationSubscriptionFactory( - user=user_two, - notification_type=NotificationType.objects.get(name='test_notification_type'), - message_frequency='daily', - ) - notification_subscription.emit(event_context={'notifications': 'Test notification'}) - data['user'] = Notification.objects.get(subscription=notification_subscription).id - return data +@pytest.mark.django_db +def test_send_moderator_email_task_registration_provider_admin(fake): + user = AuthUserFactory(fullname='Admin User') + reg_provider = RegistrationProviderFactory(_id='abc123') + admin_group = reg_provider.get_group('admin') + admin_group.user_set.add(user) + notification_type = NotificationType.objects.get(name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS) + notification = Notification.objects.create( + subscription=add_notification_subscription(user, notification_type, 'daily', provider=reg_provider), + event_context={'provider_id': reg_provider.id}, + sent=None, + ) + notification_ids = [notification.id] + send_moderator_email_task.apply(args=(user._id, reg_provider.id, notification_ids, 'daily')).get() + email_task = EmailTask.objects.filter(user_id=user.id).first() + assert email_task.status == 'SUCCESS' + notification.refresh_from_db() + assert notification.sent - @patch('website.notifications.tasks._send_reviews_moderator_emails') - @patch('website.notifications.tasks._send_global_and_node_emails') - @patch('website.notifications.tasks.datetime') - def test_send_users_email_daily(self, mock_datetime, mock__send_global_and_node_emails, mock__reviews_moderator_email): - mock_datetime.today.return_value = datetime(2025, 8, 2) # Saturday - send_users_email() - mock__send_global_and_node_emails.assert_called_once_with('daily') - mock__reviews_moderator_email.assert_called_once_with('daily') +@pytest.mark.django_db +def test_send_moderator_email_task_no_notifications(fake): + user = AuthUserFactory(fullname='Admin User') + provider = RegistrationProviderFactory() + notification_ids = [] + send_moderator_email_task.apply(args=(user._id, provider.id, notification_ids, 'daily')).get() + email_task = EmailTask.objects.filter(user_id=user.id).first() + assert email_task.status == 'SUCCESS' - @patch('website.notifications.tasks._send_reviews_moderator_emails') - @patch('website.notifications.tasks._send_global_and_node_emails') - @patch('website.notifications.tasks.datetime') - def test_send_users_email_weekly(self, mock_datetime, mock__send_global_and_node_emails, mock__reviews_moderator_email): - mock_datetime.today.return_value = datetime(2025, 8, 4) # Monday - send_users_email() - assert mock__send_global_and_node_emails.call_count == 2 - assert mock__reviews_moderator_email.call_count == 2 - mock__send_global_and_node_emails.assert_any_call('daily') - mock__send_global_and_node_emails.assert_any_call('weekly') +@pytest.mark.django_db +def test_send_moderator_email_task_user_not_found(): + provider = RegistrationProviderFactory() + send_moderator_email_task.apply(args=('nouser', provider.id, [], 'daily')).get() + email_task = EmailTask.objects.filter() + assert email_task.exists() + assert email_task.first().status == 'NO_USER_FOUND' - @patch('website.notifications.tasks._send_reviews_moderator_emails') - @patch('website.notifications.tasks._send_global_and_node_emails') - @patch('website.notifications.tasks.datetime') - def test_send_users_email_monthly(self, mock_datetime, mock__send_global_and_node_emails, mock__reviews_moderator_email): - mock_datetime.today.return_value = datetime(2025, 6, 30) # Last day of month and a Monday - send_users_email() - assert mock__send_global_and_node_emails.call_count == 3 - mock__send_global_and_node_emails.assert_any_call('daily') - mock__send_global_and_node_emails.assert_any_call('weekly') - mock__send_global_and_node_emails.assert_any_call('monthly') +@pytest.mark.django_db +def test_get_users_emails(fake): + user = AuthUserFactory() + notification_type = NotificationType.objects.get(name=NotificationType.Type.USER_DIGEST) + notification1 = Notification.objects.create( + subscription=add_notification_subscription(user, notification_type, 'daily'), + sent=None, + event_context={}, + ) + res = list(get_users_emails('daily')) + assert len(res) == 1 + user_info = res[0] + assert user_info['user_id'] == user._id + assert any(msg['notification_id'] == notification1.id for msg in user_info['info']) - def test_get_emails(self, user_one, notifications_user_one): - users_emails = get_users_emails('monthly') - assert [el for el in users_emails] == [{'user_id': user_one._id, 'info': [{'notification_id': notifications_user_one['user']}]}] - moderators_emails = get_moderators_emails('monthly') - assert [el for el in moderators_emails] == [{'user_id': user_one._id, 'provider_id': '1', 'info': [{'notification_id': notifications_user_one['moderator']}]}] +# Reasoning: +# Test get_moderators_emails returns grouped emails by user and provider. +@pytest.mark.django_db +def test_get_moderators_emails(fake): + user = AuthUserFactory() + provider = RegistrationProviderFactory() + notification_type = NotificationType.objects.get(name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS) + Notification.objects.create( + subscription=add_notification_subscription(user, notification_type, 'daily', provider=provider), + event_context={'provider_id': provider.id}, + sent=None + ) + res = list(get_moderators_emails('daily')) + assert len(res) >= 1 + entry = [ + x for x in res if x['user_id'] == user._id and x['provider_id'] == str(provider.id) + ] + assert entry, 'Expected moderator digest group' - @patch('osf.models.Notification.send') - def test_send_users_email_sends_notifications(self, mock_send, user_two, notifications_user_two): - with capture_notifications() as notifications: - send_users_email() +@pytest.mark.django_db +def test_send_users_digest_email_end_to_end(fake): + user = AuthUserFactory() + notification_type = NotificationType.objects.get(name=NotificationType.Type.USER_DIGEST) + Notification.objects.create( + subscription=add_notification_subscription(user, notification_type, 'daily'), + sent=None, + event_context={}, + ) + send_users_digest_email() + email_task = EmailTask.objects.get(user_id=user.id) + assert email_task.status == 'SUCCESS' - assert mock_send.called - assert Notification.objects.get(id=notifications_user_two['user']).sent - assert notifications[0]['type'] == 'user_digest' - assert notifications[0]['kwargs']['user'] == user_two - assert notifications[0]['kwargs']['is_digest'] - assert notifications[0]['kwargs']['event_context'] == { - 'notifications': 'test template for Test notification', - 'user_fullname': user_two.fullname, - 'can_change_preferences': False - } +@pytest.mark.django_db +def test_send_moderators_digest_email_end_to_end(fake): + user = AuthUserFactory() + provider = RegistrationProviderFactory() + notification_type = NotificationType.objects.get(name=NotificationType.Type.PROVIDER_NEW_PENDING_SUBMISSIONS) + Notification.objects.create( + subscription=add_notification_subscription(user, notification_type, 'daily', provider=provider), + sent=None, + event_context={'provider_id': provider.id}, + ) + send_moderators_digest_email() + email_task = EmailTask.objects.filter(user_id=user.id).first() + assert email_task.status == 'SUCCESS' diff --git a/osf/admin.py b/osf/admin.py index 006040d4bde..7311c31ee9f 100644 --- a/osf/admin.py +++ b/osf/admin.py @@ -13,7 +13,7 @@ import waffle from osf.external.spam.tasks import reclassify_domain_references -from osf.models import OSFUser, Node, NotableDomain, NodeLicense, NotificationType, NotificationSubscription +from osf.models import OSFUser, Node, NotableDomain, NodeLicense, NotificationType, NotificationSubscription, EmailTask from osf.models.notification_type import get_default_frequency_choices from osf.models.notable_domain import DomainReference @@ -352,6 +352,13 @@ def get_intervals(self, request, pk): except NotificationType.DoesNotExist: return JsonResponse({'intervals': []}) + +@admin.register(EmailTask) +class EmailTaskAdmin(admin.ModelAdmin): + list_display = ('task_id', 'user', 'status', 'created_at', 'updated_at') + list_filter = ('status',) + search_fields = ('task_id', 'user__email') + admin.site.register(OSFUser, OSFUserAdmin) admin.site.register(Node, NodeAdmin) admin.site.register(NotableDomain, NotableDomainAdmin) diff --git a/osf/email/__init__.py b/osf/email/__init__.py index f84f1d16e74..9061b497d0f 100644 --- a/osf/email/__init__.py +++ b/osf/email/__init__.py @@ -1,6 +1,7 @@ import logging import waffle +from django.template import Template, Context from sendgrid import SendGridAPIClient, Personalization, To, Cc, Category, ReplyTo, Bcc from sendgrid.helpers.mail import Mail @@ -118,13 +119,7 @@ def send_email_with_send_grid(to_addr, notification_type, context, email_context raise exc def render_notification(template, context): - """Render a notification template with the given context. - - Args: - template (str): The template string to render. - context (dict): The context to use for rendering the template. - - Returns: - str: The rendered template. - """ - return template.format(**context) if template else '' + if not template: + return '' + t = Template(template) + return t.render(Context(context)) diff --git a/osf/migrations/0033_notification_notificationsubscriptionlegacy_and_more.py b/osf/migrations/0033_notification_system.py similarity index 90% rename from osf/migrations/0033_notification_notificationsubscriptionlegacy_and_more.py rename to osf/migrations/0033_notification_system.py index 7058cb56618..494681018f5 100644 --- a/osf/migrations/0033_notification_notificationsubscriptionlegacy_and_more.py +++ b/osf/migrations/0033_notification_system.py @@ -1,4 +1,4 @@ -# Generated by Django 4.2.13 on 2025-08-04 13:57 +# Generated by Django 4.2.13 on 2025-08-06 16:41 from django.conf import settings import django.contrib.postgres.fields @@ -37,6 +37,17 @@ class Migration(migrations.Migration): END$$; """ ), + migrations.CreateModel( + name='EmailTask', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('task_id', models.CharField(max_length=255, unique=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ('status', models.CharField(choices=[('PENDING', 'Pending'), ('STARTED', 'Started'), ('SUCCESS', 'Success'), ('FAILURE', 'Failure'), ('RETRY', 'Retry')], default='PENDING', max_length=20)), + ('error_message', models.TextField(blank=True)), + ], + ), migrations.CreateModel( name='Notification', fields=[ @@ -174,6 +185,11 @@ class Migration(migrations.Migration): name='subscription', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='osf.notificationsubscription'), ), + migrations.AddField( + model_name='emailtask', + name='user', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + ), migrations.RemoveField( model_name='notificationsubscription', name='_id', diff --git a/osf/models/__init__.py b/osf/models/__init__.py index 7e02185c4ff..ccf0544f777 100644 --- a/osf/models/__init__.py +++ b/osf/models/__init__.py @@ -8,6 +8,7 @@ ReviewAction, SchemaResponseAction, ) +from .email_task import EmailTask from .admin_log_entry import AdminLogEntry from .admin_profile import AdminProfile from .analytics import UserActivityCounter, PageCounter diff --git a/osf/models/email_task.py b/osf/models/email_task.py new file mode 100644 index 00000000000..9f1127de22d --- /dev/null +++ b/osf/models/email_task.py @@ -0,0 +1,20 @@ +from django.db import models + +class EmailTask(models.Model): + TASK_STATUS = ( + ('PENDING', 'Pending'), + ('STARTED', 'Started'), + ('SUCCESS', 'Success'), + ('FAILURE', 'Failure'), + ('RETRY', 'Retry'), + ) + + task_id = models.CharField(max_length=255, unique=True) + user = models.ForeignKey('osf.OSFUser', null=True, on_delete=models.SET_NULL) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + status = models.CharField(max_length=20, choices=TASK_STATUS, default='PENDING') + error_message = models.TextField(blank=True) + + def __str__(self): + return f'{self.task_id} ({self.status})' diff --git a/website/notifications/listeners.py b/website/notifications/listeners.py index d0499e5d4b1..fc563e8b477 100644 --- a/website/notifications/listeners.py +++ b/website/notifications/listeners.py @@ -1,7 +1,6 @@ import logging from django.apps import apps -from django.contrib.contenttypes.models import ContentType from framework.celery_tasks import app from framework.postcommit_tasks.handlers import run_postcommit @@ -159,6 +158,7 @@ def remove_contributor_from_subscriptions(node, user): admin on any of node's parent projects. """ NotificationSubscription = apps.get_model('osf.NotificationSubscription') + from django.contrib.contenttypes.models import ContentType Preprint = apps.get_model('osf.Preprint') DraftRegistration = apps.get_model('osf.DraftRegistration') @@ -193,6 +193,7 @@ def remove_supplemental_node(node): @run_postcommit(once_per_request=False, celery=True) @app.task(max_retries=5, default_retry_delay=60) def remove_subscription_task(node_id): + from django.contrib.contenttypes.models import ContentType AbstractNode = apps.get_model('osf.AbstractNode') NotificationSubscription = apps.get_model('osf.NotificationSubscription') node = AbstractNode.load(node_id) diff --git a/website/notifications/tasks.py b/website/notifications/tasks.py index bdda42db43f..e739262875a 100644 --- a/website/notifications/tasks.py +++ b/website/notifications/tasks.py @@ -1,88 +1,54 @@ -""" -Tasks for making even transactional emails consolidated. -""" import itertools -from datetime import datetime from calendar import monthrange +from datetime import date from django.db import connection +from django.utils import timezone from framework.celery_tasks import app as celery_app +from celery import shared_task +from celery.utils.log import get_task_logger +from osf.models import OSFUser, Notification, NotificationType, EmailTask, AbstractProvider, RegistrationProvider, \ + CollectionProvider from framework.sentry import log_message -from osf.models import ( - OSFUser, - AbstractProvider, - RegistrationProvider, - CollectionProvider, - Notification, - NotificationType, -) from osf.registrations.utils import get_registration_provider_submissions_url from osf.utils.permissions import ADMIN from website import settings +logger = get_task_logger(__name__) -@celery_app.task(name='website.notifications.tasks.send_users_digest_email', max_retries=0) -def send_users_digest_email(): - """Send pending emails. - """ - today = datetime.today().date() - - # Run for yesterday - _send_user_digest('daily') - - # Run only on Mondays - if today.weekday() == 0: # Monday is 0 - _send_user_digest('weekly') - - # Run only on the last day of the month - last_day = monthrange(today.year, today.month)[1] - if today.day == last_day: - _send_user_digest('monthly') - - -@celery_app.task(name='website.notifications.tasks.send_moderators_digest_email', max_retries=0) -def send_moderators_digest_email(): - """Send pending emails. - """ - today = datetime.today().date() - - # Run for yesterday - _send_moderator_digest('daily') - - # Run only on Mondays - if today.weekday() == 0: # Monday is 0 - _send_moderator_digest('weekly') - - # Run only on the last day of the month - last_day = monthrange(today.year, today.month)[1] - if today.day == last_day: - _send_moderator_digest('monthly') - +@shared_task(bind=True, max_retries=3, default_retry_delay=60) +def send_user_email_task(self, user_id, notification_ids, message_freq): + try: + user = OSFUser.objects.get( + guids___id=user_id, + deleted__isnull=True + ) + except OSFUser.DoesNotExist: + logger.error(f'OSFUser with id {user_id} does not exist') + email_task, _ = EmailTask.objects.get_or_create(task_id=self.request.id, status='NO_USER_FOUND') + email_task.error_message = 'User not found or disabled' + email_task.save() + return -def _send_user_digest(message_freq): - """ - Called by `send_users_email`. Send all global and node-related notification emails. - """ - grouped_emails = get_users_emails(message_freq) - for group in grouped_emails: - user = OSFUser.load(group['user_id']) - if not user: - log_message(f"User with id={group['user_id']} not found") - continue + try: + email_task, _ = EmailTask.objects.get_or_create(task_id=self.request.id, user=user, status='STARTED') if user.is_disabled: - continue + email_task.status = 'USER_DISABLED' + email_task.error_message = 'User not found or disabled' + email_task.save() + return - info = group['info'] - notification_ids = [message['notification_id'] for message in info] notifications_qs = Notification.objects.filter(id__in=notification_ids) - rendered_notifications = [notification.render() for notification in notifications_qs] + rendered_notifications = [n.render() for n in notifications_qs] if not rendered_notifications: - log_message(f"No notifications to send for user {user._id} with message frequency {message_freq}") - continue + email_task.status = 'SUCCESS' + email_task.save() + return + event_context = { - 'notifications': '
    '.join(rendered_notifications), + 'notifications': rendered_notifications, 'user_fullname': user.fullname, 'can_change_preferences': False } @@ -90,29 +56,62 @@ def _send_user_digest(message_freq): notification_type = NotificationType.objects.get(name=NotificationType.Type.USER_DIGEST) notification_type.emit(user=user, event_context=event_context, is_digest=True) - for notification in notifications_qs: - notification.mark_sent() + notifications_qs.update(sent=timezone.now()) -def _send_moderator_digest(message_freq): - """ - Called by `send_users_email`. Send all reviews triggered emails. - """ - grouped_emails = get_moderators_emails(message_freq) - for group in grouped_emails: - user = OSFUser.load(group['user_id']) - if not user: - log_message(f"User with id={group['user_id']} not found") - continue + email_task.status = 'SUCCESS' + email_task.save() + except Exception as e: + try: + user = OSFUser.objects.get( + guids___id=user_id, + deleted__isnull=True + ) + except OSFUser.DoesNotExist: + logger.error(f'OSFUser with id {user_id} does not exist') + email_task, _ = EmailTask.objects.get_or_create(task_id=self.request.id, status='NO_USER_FOUND') + email_task.error_message = 'User not found or disabled' + email_task.save() + return + email_task, _ = EmailTask.objects.get_or_create(task_id=self.request.id, user=user, status='RETRY') + email_task.status = 'RETRY' + email_task.error_message = str(e) + email_task.save() + logger.exception('Retrying send_user_email_task due to exception') + raise self.retry(exc=e) + +@shared_task(bind=True, max_retries=3, default_retry_delay=60) +def send_moderator_email_task(self, user_id, provider_id, notification_ids, message_freq): + try: + user = OSFUser.objects.get( + guids___id=user_id, + deleted__isnull=True + ) + except OSFUser.DoesNotExist: + logger.error(f'OSFUser with id {user_id} does not exist') + email_task, _ = EmailTask.objects.get_or_create(task_id=self.request.id, status='NO_USER_FOUND') + email_task.error_message = 'User not found or disabled' + email_task.save() + return + + try: + email_task, _ = EmailTask.objects.get_or_create(task_id=self.request.id, user=user, status='STARTED') if user.is_disabled: - continue + email_task.status = 'USER_DISABLED' + email_task.error_message = 'User not found or disabled' + email_task.save() + return - info = group['info'] - notification_ids = [message['notification_id'] for message in info] notifications_qs = Notification.objects.filter(id__in=notification_ids) rendered_notifications = [notification.render() for notification in notifications_qs] - provider = AbstractProvider.objects.get(id=group['provider_id']) - additional_context = dict() + if not rendered_notifications: + log_message(f"No notifications to send for moderator user {user._id}") + email_task.status = 'SUCCESS' + email_task.save() + return + + provider = AbstractProvider.objects.get(id=provider_id) + additional_context = {} if isinstance(provider, RegistrationProvider): provider_type = 'registration' submissions_url = get_registration_provider_submissions_url(provider) @@ -127,23 +126,20 @@ def _send_moderator_digest(message_freq): provider_type = 'collection' submissions_url = f'{settings.DOMAIN}collections/{provider._id}/moderation/' notification_settings_url = f'{settings.DOMAIN}registries/{provider._id}/moderation/notifications' + withdrawals_url = '' if provider.brand: additional_context = { 'logo_url': provider.brand.hero_logo_image, 'top_bar_color': provider.brand.primary_color } - withdrawals_url = '' else: provider_type = 'preprint' - submissions_url = f'{settings.DOMAIN}reviews/preprints/{provider._id}', + submissions_url = f'{settings.DOMAIN}reviews/preprints/{provider._id}' withdrawals_url = '' notification_settings_url = f'{settings.DOMAIN}reviews/{provider_type}s/{provider._id}/notifications' - if not rendered_notifications: - log_message(f"No notifications to send for user {user._id} with message frequency {message_freq}") - continue event_context = { - 'notifications': '
    '.join(rendered_notifications), + 'notifications': rendered_notifications, 'user_fullname': user.fullname, 'can_change_preferences': False, 'notification_settings_url': notification_settings_url, @@ -157,9 +153,52 @@ def _send_moderator_digest(message_freq): notification_type = NotificationType.objects.get(name=NotificationType.Type.DIGEST_REVIEWS_MODERATORS) notification_type.emit(user=user, event_context=event_context, is_digest=True) - for notification in notifications_qs: - notification.mark_sent() + notifications_qs.update(sent=timezone.now()) + + email_task.status = 'SUCCESS' + email_task.save() + + except Exception as e: + email_task.status = 'RETRY' + email_task.error_message = str(e) + email_task.save() + logger.exception('Retrying send_moderator_email_task due to exception') + raise self.retry(exc=e) + +@celery_app.task +def send_users_digest_email(): + today = date.today() + + frequencies = ['daily'] + if today.weekday() == 0: + frequencies.append('weekly') + if today.day == monthrange(today.year, today.month)[1]: + frequencies.append('monthly') + + for freq in frequencies: + grouped_emails = get_users_emails(freq) + for group in grouped_emails: + user_id = group['user_id'] + notification_ids = [msg['notification_id'] for msg in group['info']] + send_user_email_task.delay(user_id, notification_ids, freq) + +@celery_app.task +def send_moderators_digest_email(): + today = date.today() + + frequencies = ['daily'] + if today.weekday() == 0: + frequencies.append('weekly') + if today.day == monthrange(today.year, today.month)[1]: + frequencies.append('monthly') + for freq in frequencies: + grouped_emails = get_moderators_emails(freq) + for group in grouped_emails: + user_id = group['user_id'] + provider_id = group['provider_id'] + notification_ids = [msg['notification_id'] for msg in group['info']] + send_moderator_email_task.delay(user_id, provider_id, notification_ids, freq) def get_moderators_emails(message_freq: str): """Get all emails for reviews moderators that need to be sent, grouped by users AND providers. diff --git a/website/templates/emails/digest.html.mako b/website/templates/emails/digest.html.mako index 719a86b0c3c..b4218ba9775 100644 --- a/website/templates/emails/digest.html.mako +++ b/website/templates/emails/digest.html.mako @@ -1,47 +1,30 @@ <%inherit file="notify_base.mako" /> -<% from website import util %> -<%def name="build_message(d, parent=None)"> -%for key in d['children']: - %if d['children'][key]['messages']: - - - - - - - - - -
    -

    - <% from osf.models import Guid %> - ${Guid.load(key).referent.title} - %if parent : - in ${Guid.objects.get(_id=parent).referent.title} - %endif -

    -
    - %for m in d['children'][key]['messages']: - ${m} - %endfor -
    - %endif - %if isinstance(d['children'][key]['children'], dict): - ${build_message(d['children'][key], key )} - %endif -%endfor - - <%def name="content()"> -

    Recent Activity

    +

    + Recent Activity +

    - ${build_message(message)} + % if notifications: + + + % for n in notifications: + + + + % endfor + +
    + ${n} +
    + % else: +

    No recent activity.

    + % endif diff --git a/website/templates/emails/file_operation_failed.html.mako b/website/templates/emails/file_operation_failed.html.mako index 36c6fb0700c..5fd45f97e56 100644 --- a/website/templates/emails/file_operation_failed.html.mako +++ b/website/templates/emails/file_operation_failed.html.mako @@ -23,10 +23,10 @@ diff --git a/website/templates/emails/file_operation_success.html.mako b/website/templates/emails/file_operation_success.html.mako index 10792e8c6f1..56e1c14e356 100644 --- a/website/templates/emails/file_operation_success.html.mako +++ b/website/templates/emails/file_operation_success.html.mako @@ -23,10 +23,10 @@ diff --git a/website/templates/emails/file_updated.html.mako b/website/templates/emails/file_updated.html.mako index 6eae7990125..4701640013d 100644 --- a/website/templates/emails/file_updated.html.mako +++ b/website/templates/emails/file_updated.html.mako @@ -4,7 +4,7 @@ diff --git a/website/templates/emails/notify_base.mako b/website/templates/emails/notify_base.mako index 10e81d98840..deaf9d9cbcf 100644 --- a/website/templates/emails/notify_base.mako +++ b/website/templates/emails/notify_base.mako @@ -24,10 +24,10 @@ From d2dd1716fb9b923efad51632948e3059925c0e88 Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Mon, 11 Aug 2025 12:08:22 +0300 Subject: [PATCH 175/176] notifications DB optimization --- .../test_notifications_db_transaction.py | 55 +++++++++++++++++++ osf/models/notification.py | 5 +- osf/models/notification_subscription.py | 6 +- osf/models/notification_type.py | 34 ++++++++---- tests/utils.py | 21 +++++++ 5 files changed, 108 insertions(+), 13 deletions(-) create mode 100644 admin_tests/notifications/test_notifications_db_transaction.py diff --git a/admin_tests/notifications/test_notifications_db_transaction.py b/admin_tests/notifications/test_notifications_db_transaction.py new file mode 100644 index 00000000000..fda33a0e79a --- /dev/null +++ b/admin_tests/notifications/test_notifications_db_transaction.py @@ -0,0 +1,55 @@ +import pytest +from osf_tests.factories import ( + AuthUserFactory, + NotificationTypeFactory +) +from osf.models import Notification, NotificationType, NotificationSubscription +from tests.utils import get_mailhog_messages, delete_mailhog_messages +from django.db import reset_queries, connection +from waffle.testutils import override_switch +from osf import features + + +@pytest.mark.django_db +class TestNotificationTypeDBTransaction: + + @pytest.fixture() + def user_one(self): + return AuthUserFactory() + + @pytest.fixture() + def test_notification_type(self): + return NotificationTypeFactory( + name='test_notification_type', + template='Test template for {notifications}', + subject='Test notification subject', + ) + + def test_notification_type_cache(self): + NotificationType.Type.NODE_FILE_UPDATED.instance + reset_queries() + NotificationType.Type.NODE_FILE_UPDATED.instance + assert len(connection.queries) == 0 + + def test_emit_without_saving(self, user_one, test_notification_type): + reset_queries() + with override_switch(features.ENABLE_MAILHOG, active=True): + delete_mailhog_messages() + test_notification_type.emit( + user=user_one, + event_context={'notifications': 'test template for Test notification'}, + save=False + ) + assert len(connection.queries) == 0 + messages = get_mailhog_messages() + assert messages['total'] == 1 + assert messages['items'][0]['Content']['Headers']['To'][0] == user_one.username + assert messages['items'][0]['Content']['Body'] == 'Test template for test template for Test notification' + delete_mailhog_messages() + assert not NotificationSubscription.objects.filter( + user=user_one, + notification_type=test_notification_type + ).exists() + assert not Notification.objects.filter( + subscription__notification_type=test_notification_type + ).exists() diff --git a/osf/models/notification.py b/osf/models/notification.py index 9fbf1997e51..5e79b51c5ea 100644 --- a/osf/models/notification.py +++ b/osf/models/notification.py @@ -25,6 +25,7 @@ def send( protocol_type='email', destination_address=None, email_context=None, + save=True, ): """ @@ -41,7 +42,6 @@ def send( f"\ncontext={self.event_context}" f"\nemail={email_context}" ) - if protocol_type == 'email' and waffle.switch_is_active(features.ENABLE_MAILHOG): email.send_email_over_smtp( recipient_address, @@ -68,7 +68,8 @@ def send( else: raise NotImplementedError(f'protocol `{protocol_type}` is not supported.') - self.mark_sent() + if save: + self.mark_sent() def mark_sent(self) -> None: self.sent = timezone.now() diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index a0330d56dfd..cce441191ea 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -60,6 +60,7 @@ def emit( event_context=None, destination_address=None, email_context=None, + save=True, ): """Emit a notification to a user by creating Notification and NotificationSubscription objects. @@ -79,19 +80,22 @@ def emit( ) if self.message_frequency == 'instantly': - notification = Notification.objects.create( + notification = Notification( subscription=self, event_context=event_context ) notification.send( destination_address=destination_address, email_context=email_context, + save=save, ) else: Notification.objects.create( subscription=self, event_context=event_context ) + if save: + notification.save() @property def absolute_api_v2_url(self): diff --git a/osf/models/notification_type.py b/osf/models/notification_type.py index e6ea9e056fb..c76ef92fea0 100644 --- a/osf/models/notification_type.py +++ b/osf/models/notification_type.py @@ -3,6 +3,8 @@ from django.contrib.contenttypes.models import ContentType from enum import Enum +from osf.utils.caching import cached_property + def get_default_frequency_choices(): DEFAULT_FREQUENCY_CHOICES = ['none', 'instantly', 'daily', 'weekly', 'monthly'] @@ -146,7 +148,7 @@ class Type(str, Enum): DRAFT_REGISTRATION_CONTRIBUTOR_ADDED_DEFAULT = 'draft_registration_contributor_added_default' - @property + @cached_property def instance(self): obj, created = NotificationType.objects.get_or_create(name=self.value) return obj @@ -208,7 +210,8 @@ def emit( message_frequency='instantly', event_context=None, email_context=None, - is_digest=False + is_digest=False, + save=True, ): """Emit a notification to a user by creating Notification and NotificationSubscription objects. @@ -221,18 +224,29 @@ def emit( email_context (dict, optional): Context for additional email notification information, so as blind cc etc """ from osf.models.notification_subscription import NotificationSubscription - subscription, created = NotificationSubscription.objects.get_or_create( - notification_type=self, - user=user, - content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, - object_id=subscribed_object.pk if subscribed_object else None, - defaults={'message_frequency': message_frequency}, - _is_digest=is_digest, - ) + if not save: + subscription = NotificationSubscription( + notification_type=self, + user=user, + content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + object_id=subscribed_object.pk if subscribed_object else None, + message_frequency=message_frequency, + _is_digest=is_digest, + ) + else: + subscription, created = NotificationSubscription.objects.get_or_create( + notification_type=self, + user=user, + content_type=ContentType.objects.get_for_model(subscribed_object) if subscribed_object else None, + object_id=subscribed_object.pk if subscribed_object else None, + defaults={'message_frequency': message_frequency}, + _is_digest=is_digest, + ) subscription.emit( destination_address=destination_address, event_context=event_context, email_context=email_context, + save=save, ) def add_user_to_subscription(self, user, *args, **kwargs): diff --git a/tests/utils.py b/tests/utils.py index 6e3ec4fdfcd..6870cbe7bc5 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,6 +2,8 @@ import datetime import functools from unittest import mock +import requests +import waffle from django.apps import apps from django.http import HttpRequest @@ -14,6 +16,8 @@ from tests.base import get_default_metaschema from website.archiver import ARCHIVER_SUCCESS from website.archiver import listeners as archiver_listeners +from website import settings as website_settings +from osf import features def requires_module(module): def decorator(fn): @@ -285,3 +289,20 @@ def wrapped_emit(*emit_args, **emit_kwargs): yield captured +def get_mailhog_messages(): + """Fetch messages from MailHog API.""" + if not waffle.switch_is_active(features.ENABLE_MAILHOG): + return [] + mailhog_url = f'{website_settings.MAILHOG_API_HOST}/api/v2/messages' + response = requests.get(mailhog_url) + if response.status_code == 200: + return response.json() + return [] + + +def delete_mailhog_messages(): + """Delete all messages from MailHog.""" + if not waffle.switch_is_active(features.ENABLE_MAILHOG): + return + mailhog_url = f'{website_settings.MAILHOG_API_HOST}/api/v1/messages' + requests.delete(mailhog_url) From 9658544af8e9d9d51d9df73388f8a8b9d52f648f Mon Sep 17 00:00:00 2001 From: Ostap Zherebetskyi Date: Wed, 13 Aug 2025 12:14:25 +0300 Subject: [PATCH 176/176] fix notification save --- osf/models/notification_subscription.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/osf/models/notification_subscription.py b/osf/models/notification_subscription.py index cce441191ea..4c35b89b072 100644 --- a/osf/models/notification_subscription.py +++ b/osf/models/notification_subscription.py @@ -84,6 +84,8 @@ def emit( subscription=self, event_context=event_context ) + if save: + notification.save() notification.send( destination_address=destination_address, email_context=email_context, @@ -94,8 +96,6 @@ def emit( subscription=self, event_context=event_context ) - if save: - notification.save() @property def absolute_api_v2_url(self):