diff --git a/admin/users/views.py b/admin/users/views.py index 41ac9583fe8..4b4a1924584 100644 --- a/admin/users/views.py +++ b/admin/users/views.py @@ -512,7 +512,7 @@ def get_claim_links(self, user): for guid, value in user.unclaimed_records.items(): obj = Guid.load(guid) - url = f'{DOMAIN}user/{user._id}/{guid}/claim/?token={value["token"]}' + url = f'{DOMAIN}legacy/user/{user._id}/{guid}/claim/?token={value["token"]}' links.append(f'Claim URL for {obj.content_type.model} {obj._id}: {url}') return links or ['User currently has no active unclaimed records for any nodes.'] diff --git a/api/nodes/serializers.py b/api/nodes/serializers.py index e63801558eb..ef62b1e6e53 100644 --- a/api/nodes/serializers.py +++ b/api/nodes/serializers.py @@ -1308,6 +1308,12 @@ def update(self, instance, validated_data): validated_data, ) +class NodeContributorsUpdateSerializer(ser.Serializer): + def update(self, instance, validated_data): + if project := instance.root: + instance.copy_contributors_from(project) + return instance + class NodeLinksSerializer(JSONAPISerializer): diff --git a/api/nodes/views.py b/api/nodes/views.py index 8630c106162..7a6671913df 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -109,6 +109,7 @@ DraftRegistrationDetailLegacySerializer, NodeContributorsSerializer, NodeContributorDetailSerializer, + NodeContributorsUpdateSerializer, NodeInstitutionsRelationshipSerializer, NodeContributorsCreateSerializer, NodeViewOnlyLinkSerializer, @@ -436,11 +437,16 @@ class NodeContributorsList(BaseContributorList, bulk_views.BulkUpdateJSONAPIView def get_resource(self): return self.get_node() + def get_object(self): + return self.get_node() + # overrides ListBulkCreateJSONAPIView, BulkUpdateJSONAPIView, BulkDeleteJSONAPIView def get_serializer_class(self): """ Use NodeContributorDetailSerializer which requires 'id' """ + if self.request.method == 'PATCH' and self.request.query_params.get('copy_contributors_from_parent_project'): + return NodeContributorsUpdateSerializer if self.request.method == 'PUT' or self.request.method == 'PATCH' or self.request.method == 'DELETE': return NodeContributorDetailSerializer elif self.request.method == 'POST': @@ -501,6 +507,18 @@ def get_serializer_context(self): context['default_email'] = 'default' return context + def patch(self, request, *args, **kwargs): + """ + Override the default patch behavior to handle the special case + of updating contributors by copying contributors from the parent project. + """ + if request.query_params.get('copy_contributors_from_parent_project'): + instance = self.get_object() + serializer = self.get_serializer_class()(instance, data=request.data) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(serializer.data, status=200) + return super().patch(request, *args, **kwargs) class NodeContributorDetail(BaseContributorDetail, generics.RetrieveUpdateDestroyAPIView, NodeMixin, UserMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/nodes_contributors_read). diff --git a/api_tests/actions/views/test_action_list.py b/api_tests/actions/views/test_action_list.py index df07ef74a2e..c6a0039fc53 100644 --- a/api_tests/actions/views/test_action_list.py +++ b/api_tests/actions/views/test_action_list.py @@ -1,6 +1,8 @@ import pytest +import pytest_socket from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from osf_tests.factories import ( PreprintFactory, AuthUserFactory, @@ -189,7 +191,11 @@ def test_accept_permissions_accept(self, app, url, preprint, node_admin, moderat assert not preprint.is_published # Moderator can accept - res = app.post_json_api(url, accept_payload, auth=moderator.auth) + with capture_notifications() as notifications: + res = app.post_json_api(url, accept_payload, auth=moderator.auth) + assert len(notifications['emits']) == 2 + assert notifications['emits'][0]['type'] == NotificationType.Type.REVIEWS_SUBMISSION_STATUS + assert notifications['emits'][1]['type'] == NotificationType.Type.REVIEWS_SUBMISSION_STATUS assert res.status_code == 201 preprint.refresh_from_db() assert preprint.machine_state == 'accepted' @@ -319,8 +325,11 @@ def test_valid_transitions( preprint.date_last_transitioned = None preprint.save() payload = self.create_payload(preprint._id, trigger=trigger) - with capture_notifications(): + try: res = app.post_json_api(url, payload, auth=moderator.auth) + except pytest_socket.SocketConnectBlockedError: + with capture_notifications(): + res = app.post_json_api(url, payload, auth=moderator.auth) assert res.status_code == 201 action = preprint.actions.order_by('-created').first() diff --git a/api_tests/base/test_throttling.py b/api_tests/base/test_throttling.py index 52f6267c2a6..a1c2921ca2f 100644 --- a/api_tests/base/test_throttling.py +++ b/api_tests/base/test_throttling.py @@ -1,9 +1,12 @@ from unittest import mock from api.base.settings.defaults import API_BASE +from osf.models import NotificationType from tests.base import ApiTestCase from osf_tests.factories import AuthUserFactory, ProjectFactory +from tests.utils import capture_notifications + class TestDefaultThrottleClasses(ApiTestCase): @@ -121,10 +124,14 @@ def test_add_contrib_throttle_rate_allow_request_not_called( @mock.patch('api.base.throttling.AddContributorThrottle.allow_request') def test_add_contrib_throttle_rate_allow_request_called(self, mock_allow): - res = self.app.post_json_api( - self.public_url, - self.data_user_two, - auth=self.user.auth) + with capture_notifications() as notifications: + res = self.app.post_json_api( + self.public_url, + self.data_user_two, + auth=self.user.auth + ) + assert len(notifications['emits']) == 1 + assert notifications['emits'][0]['type'] == NotificationType.Type.NODE_CONTRIBUTOR_ADDED_DEFAULT assert res.status_code == 201 assert mock_allow.call_count == 1 diff --git a/api_tests/base/test_utils.py b/api_tests/base/test_utils.py index 32429fdf0f7..cd3d56652f9 100644 --- a/api_tests/base/test_utils.py +++ b/api_tests/base/test_utils.py @@ -5,7 +5,6 @@ from django.conf import settings as django_conf_settings from rest_framework import fields -from rest_framework.exceptions import ValidationError from api.base import utils as api_utils from osf.models.base import coerce_guid, Guid, GuidMixin, OptionalGuidMixin, VersionedGuidMixin, InvalidGuid @@ -63,47 +62,6 @@ def test_push_status_message_no_response(self): except BaseException: assert False, f'Exception from push_status_message via API v2 with type "{status}".' - def test_push_status_message_expected_error(self): - status_message = 'This is a message' - try: - push_status_message(status_message, kind='error') - assert False, 'push_status_message() should have generated a ValidationError exception.' - - except ValidationError as e: - assert ( - e.detail[0] == status_message - ), 'push_status_message() should have passed along the message with the Exception.' - - except RuntimeError: - assert False, 'push_status_message() should have caught the runtime error and replaced it.' - - except BaseException: - assert False, 'Exception from push_status_message when called from the v2 API with type "error"' - - @mock.patch('framework.status.get_session') - def test_push_status_message_unexpected_error(self, mock_get_session): - status_message = 'This is a message' - exception_message = 'this is some very unexpected problem' - mock_session = mock.Mock() - mock_session.attach_mock(mock.Mock(side_effect=RuntimeError(exception_message)), 'get') - mock_get_session.return_value = mock_session - try: - push_status_message(status_message, kind='error') - assert False, 'push_status_message() should have generated a RuntimeError exception.' - except ValidationError: - assert False, 'push_status_message() should have re-raised the RuntimeError not gotten ValidationError.' - except RuntimeError as e: - assert str(e) == exception_message, ( - 'push_status_message() should have re-raised the ' - 'original RuntimeError with the original message.' - ) - - except BaseException: - assert False, ( - 'Unexpected Exception from push_status_message when called ' - 'from the v2 API with type "error"' - ) - @pytest.mark.django_db class TestCoerceGuid: diff --git a/api_tests/collection_submission_actions/views/test_collection_submissions_actions_detail.py b/api_tests/collection_submission_actions/views/test_collection_submissions_actions_detail.py index 6601100bffd..2a7fe5cdb79 100644 --- a/api_tests/collection_submission_actions/views/test_collection_submissions_actions_detail.py +++ b/api_tests/collection_submission_actions/views/test_collection_submissions_actions_detail.py @@ -29,8 +29,7 @@ def node(collection_provider): def collection(collection_provider): collection = CollectionFactory(is_public=True) collection.provider = collection_provider - with capture_notifications(): - collection.save() + collection.save() return collection diff --git a/api_tests/collection_submission_actions/views/test_collection_submissions_actions_list.py b/api_tests/collection_submission_actions/views/test_collection_submissions_actions_list.py index bad7da10539..9c9fb239fb2 100644 --- a/api_tests/collection_submission_actions/views/test_collection_submissions_actions_list.py +++ b/api_tests/collection_submission_actions/views/test_collection_submissions_actions_list.py @@ -5,7 +5,7 @@ from django.utils import timezone from osf_tests.factories import NodeFactory, CollectionFactory, CollectionProviderFactory -from osf.models import CollectionSubmission +from osf.models import CollectionSubmission, NotificationType from osf.utils.workflows import CollectionSubmissionsTriggers, CollectionSubmissionStates from tests.utils import capture_notifications @@ -33,8 +33,7 @@ def node(collection_provider): def collection(collection_provider): collection = CollectionFactory() collection.provider = collection_provider - with capture_notifications(): - collection.save() + collection.save() return collection @@ -122,10 +121,21 @@ def test_status_code__collection_moderator_accept_reject_moderated(self, app, no collection_submission.state_machine.set_state(CollectionSubmissionStates.PENDING) collection_submission.save() test_auth = configure_test_auth(node, UserRoles.MODERATOR) - resp = app.post_json_api(POST_URL, make_payload( - collection_submission=collection_submission, - trigger=moderator_trigger.db_name - ), auth=test_auth, expect_errors=True) + with capture_notifications() as notifications: + resp = app.post_json_api( + POST_URL, + make_payload( + collection_submission=collection_submission, + trigger=moderator_trigger.db_name + ), + auth=test_auth, + expect_errors=True + ) + assert len(notifications['emits']) == 1 + if moderator_trigger is CollectionSubmissionsTriggers.ACCEPT: + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_ACCEPTED + if moderator_trigger is CollectionSubmissionsTriggers.REJECT: + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REJECTED assert resp.status_code == 201 @pytest.mark.parametrize('moderator_trigger', [CollectionSubmissionsTriggers.ACCEPT, CollectionSubmissionsTriggers.REJECT]) @@ -158,11 +168,27 @@ def test_status_code__remove(self, app, node, collection_submission, user_role): collection_submission.state_machine.set_state(CollectionSubmissionStates.ACCEPTED) collection_submission.save() test_auth = configure_test_auth(node, user_role) - resp = app.post_json_api(POST_URL, make_payload( - collection_submission=collection_submission, - trigger=CollectionSubmissionsTriggers.REMOVE.db_name - ), auth=test_auth, expect_errors=True) + with capture_notifications() as notifications: + resp = app.post_json_api( + POST_URL, + make_payload( + collection_submission=collection_submission, + trigger=CollectionSubmissionsTriggers.REMOVE.db_name + ), + auth=test_auth, + expect_errors=True + ) assert resp.status_code == 201 + if user_role == UserRoles.MODERATOR: + assert len(notifications['emits']) == 1 + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_MODERATOR + assert notifications['emits'][0]['kwargs']['user'] == collection_submission.creator + else: + assert len(notifications['emits']) == 2 + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + assert notifications['emits'][0]['kwargs']['user'] == collection_submission.creator + assert notifications['emits'][1]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_ADMIN + assert notifications['emits'][1]['kwargs']['user'] == node.contributors.last() @pytest.mark.django_db @@ -182,11 +208,14 @@ def test_POST_accept__writes_action_and_advances_state(self, app, collection_sub collection_submission.save() test_auth = configure_test_auth(node, UserRoles.MODERATOR) payload = make_payload(collection_submission, trigger=CollectionSubmissionsTriggers.ACCEPT.db_name) - app.post_json_api(POST_URL, payload, auth=test_auth) + with capture_notifications() as notifications: + app.post_json_api(POST_URL, payload, auth=test_auth) + assert len(notifications['emits']) == 1 + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_ACCEPTED + user = collection_submission.collection.provider.get_group('moderator').user_set.first() collection_submission.refresh_from_db() action = collection_submission.actions.last() - assert action.trigger == CollectionSubmissionsTriggers.ACCEPT assert action.creator == user assert action.from_state == CollectionSubmissionStates.PENDING @@ -198,11 +227,14 @@ def test_POST_reject__writes_action_and_advances_state(self, app, collection_sub collection_submission.save() test_auth = configure_test_auth(node, UserRoles.MODERATOR) payload = make_payload(collection_submission, trigger=CollectionSubmissionsTriggers.REJECT.db_name) - app.post_json_api(POST_URL, payload, auth=test_auth) + with capture_notifications() as notifications: + app.post_json_api(POST_URL, payload, auth=test_auth) + assert len(notifications['emits']) == 1 + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REJECTED + user = collection_submission.collection.provider.get_group('moderator').user_set.first() collection_submission.refresh_from_db() action = collection_submission.actions.last() - assert action.trigger == CollectionSubmissionsTriggers.REJECT assert action.creator == user assert action.from_state == CollectionSubmissionStates.PENDING @@ -214,7 +246,14 @@ def test_POST_cancel__writes_action_and_advances_state(self, app, collection_sub collection_submission.save() test_auth = configure_test_auth(node, UserRoles.ADMIN_USER) payload = make_payload(collection_submission, trigger=CollectionSubmissionsTriggers.CANCEL.db_name) - app.post_json_api(POST_URL, payload, auth=test_auth) + with capture_notifications() as notifications: + app.post_json_api(POST_URL, payload, auth=test_auth) + assert len(notifications['emits']) == 2 + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_CANCEL + assert notifications['emits'][0]['kwargs']['user'] == collection_submission.creator + assert notifications['emits'][1]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_CANCEL + assert notifications['emits'][0]['kwargs']['user'] == node.creator + collection_submission.refresh_from_db() action = collection_submission.actions.last() @@ -229,7 +268,10 @@ def test_POST_remove__writes_action_and_advances_state(self, app, collection_sub collection_submission.save() test_auth = configure_test_auth(node, UserRoles.MODERATOR) payload = make_payload(collection_submission, trigger=CollectionSubmissionsTriggers.REMOVE.db_name) - app.post_json_api(POST_URL, payload, auth=test_auth) + with capture_notifications() as notifications: + app.post_json_api(POST_URL, payload, auth=test_auth) + assert len(notifications['emits']) == 1 + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_REMOVED_MODERATOR user = collection_submission.collection.provider.get_group('moderator').user_set.first() collection_submission.refresh_from_db() action = collection_submission.actions.last() @@ -269,15 +311,18 @@ def test_status_code__private_collection_moderator(self, app, node, collection, collection.is_public = False collection.save() test_auth = configure_test_auth(node, UserRoles.MODERATOR) - resp = app.post_json_api( - POST_URL, - make_payload( - collection_submission, - trigger=CollectionSubmissionsTriggers.ACCEPT.db_name - ), - auth=test_auth, - expect_errors=True - ) + with capture_notifications() as notifications: + resp = app.post_json_api( + POST_URL, + make_payload( + collection_submission, + trigger=CollectionSubmissionsTriggers.ACCEPT.db_name + ), + auth=test_auth, + expect_errors=True + ) + assert len(notifications['emits']) == 1 + assert notifications['emits'][0]['type'] == NotificationType.Type.COLLECTION_SUBMISSION_ACCEPTED assert resp.status_code == 201 diff --git a/api_tests/collection_submissions/views/test_collection_submission_list_actions.py b/api_tests/collection_submissions/views/test_collection_submission_list_actions.py index 2c1c546fe5d..8c7cbe7d318 100644 --- a/api_tests/collection_submissions/views/test_collection_submission_list_actions.py +++ b/api_tests/collection_submissions/views/test_collection_submission_list_actions.py @@ -6,6 +6,7 @@ from osf.migrations import update_provider_auth_groups from osf.models import CollectionSubmission from osf.utils.workflows import CollectionSubmissionsTriggers, CollectionSubmissionStates +from tests.utils import capture_notifications GET_URL = '/v2/collection_submissions/{}/actions/' @@ -39,7 +40,8 @@ def collection_submission(node, collection): collection=collection, creator=node.creator, ) - collection_submission.save() + with capture_notifications(): + collection_submission.save() return collection_submission diff --git a/api_tests/crossref/views/test_crossref_email_response.py b/api_tests/crossref/views/test_crossref_email_response.py index d7e0a86c278..33345f4f442 100644 --- a/api_tests/crossref/views/test_crossref_email_response.py +++ b/api_tests/crossref/views/test_crossref_email_response.py @@ -163,7 +163,7 @@ def test_error_response_sends_message_does_not_set_doi(self, app, url, preprint, with capture_notifications() as notifications: app.post(url, context_data) assert len(notifications['emits']) == 1 - assert notifications['emits'][0]['type'] == NotificationType.Type.DESK_OSF_SUPPORT_EMAIL + assert notifications['emits'][0]['type'] == NotificationType.Type.DESK_CROSSREF_ERROR assert not preprint.get_identifier_value('doi') def test_success_response_sets_doi(self, app, url, preprint, success_xml): diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index 58d58d0cf95..45d3888dcc3 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -121,10 +121,8 @@ def test_file_with_wrong_guid(self, app, user): def test_file_guid_not_created_with_basic_auth( self, mock_allow, app, user, file_url): res = app.get(f'{file_url}?create_guid=1', auth=user.auth) - guid = res.json['data']['attributes'].get('guid', None) assert res.status_code == 200 assert mock_allow.call_count == 1 - assert guid is None @mock.patch('api.base.throttling.CreateGuidThrottle.allow_request') def test_file_guid_created_with_cookie( diff --git a/api_tests/metadata_records/test_custom_item_metadata.py b/api_tests/metadata_records/test_custom_item_metadata.py index 4de6de457ab..13d259ba7ca 100644 --- a/api_tests/metadata_records/test_custom_item_metadata.py +++ b/api_tests/metadata_records/test_custom_item_metadata.py @@ -351,6 +351,7 @@ def test_with_write_permission(self, app, public_osfguid, private_osfguid, anybo resource_type_general='book-chapter', ), auth=anybody_with_write_permission.auth, + expect_errors=True, ) assert res.status_code == 400 expected.assert_expectations(db_record=db_record, api_record=None) # db unchanged diff --git a/api_tests/nodes/views/test_node_contributors_detail_update.py b/api_tests/nodes/views/test_node_contributors_detail_update.py index 0e183c97345..3321cce167f 100644 --- a/api_tests/nodes/views/test_node_contributors_detail_update.py +++ b/api_tests/nodes/views/test_node_contributors_detail_update.py @@ -32,6 +32,21 @@ def project(self, user, contrib): ) return project + @pytest.fixture() + def component_project_and_parent_project(self, user, contrib): + # both need to be in same method to keep root same as parent + parent_project = ProjectFactory(creator=user) + parent_project.add_contributor( + contrib, + permissions=permissions.WRITE, + visible=True, + save=True + ) + component_project = ProjectFactory(creator=user) + component_project.root = parent_project + component_project.save() + return component_project, parent_project + @pytest.fixture() def url_creator(self, user, project): return f'/{API_BASE}nodes/{project._id}/contributors/{user._id}/' @@ -414,3 +429,22 @@ def test_change_admin_self_with_other_admin(self, app, user, contrib, project, u attributes = res.json['data']['attributes'] assert attributes['permission'] == permissions.WRITE assert project.get_permissions(user) == [permissions.READ, permissions.WRITE] + + def test_update_component_project_with_parent_contributors(self, app, user, component_project_and_parent_project): + def exists_unique_parent_contributors(parent_project, component_project): + return parent_project.contributors.exclude( + id__in=component_project.contributors.values_list('id', flat=True) + ).exists() + component_project, parent_project = component_project_and_parent_project + assert exists_unique_parent_contributors(parent_project, component_project) + res = app.patch_json_api( + f'/{API_BASE}nodes/{component_project._id}/contributors/?copy_contributors_from_parent_project=true', + { + 'data': { + 'type': 3 + } + }, + auth=user.auth + ) + assert res.status_code == 200 + assert not exists_unique_parent_contributors(parent_project, component_project) diff --git a/api_tests/search/views/test_views.py b/api_tests/search/views/test_views.py index 75e288263e8..c535828d955 100644 --- a/api_tests/search/views/test_views.py +++ b/api_tests/search/views/test_views.py @@ -18,6 +18,7 @@ RegistrationProviderFactory, ) from osf_tests.utils import mock_archive +from tests.utils import capture_notifications from website import settings from website.search import elastic_search from website.search import search @@ -849,13 +850,14 @@ def test_search_collections( node_with_abstract, node_private, registration_collection, registration_one, registration_two, registration_private, reg_with_abstract): - collection_public.collect_object(node_one, user) - collection_public.collect_object(node_two, user) - collection_public.collect_object(node_private, user) + with capture_notifications(): + collection_public.collect_object(node_one, user) + collection_public.collect_object(node_two, user) + collection_public.collect_object(node_private, user) - registration_collection.collect_object(registration_one, user) - registration_collection.collect_object(registration_two, user) - registration_collection.collect_object(registration_private, user) + registration_collection.collect_object(registration_one, user) + registration_collection.collect_object(registration_two, user) + registration_collection.collect_object(registration_private, user) # test_search_collections_no_auth res = app.get(url_collection_search) @@ -889,8 +891,9 @@ def test_search_collections( assert total == num_results == 2 # test_search_collections_by_submission_abstract - collection_public.collect_object(node_with_abstract, user) - registration_collection.collect_object(reg_with_abstract, user) + with capture_notifications(): + collection_public.collect_object(node_with_abstract, user) + registration_collection.collect_object(reg_with_abstract, user) url = '{}?q={}'.format(url_collection_search, 'KHADJA') res = app.get(url) assert res.status_code == 200 @@ -909,15 +912,16 @@ def test_POST_search_collections( self, app, url_collection_search, user, node_one, node_two, collection_public, node_with_abstract, node_private, registration_collection, registration_one, registration_two, registration_private, reg_with_abstract): - collection_public.collect_object(node_one, user, status='asdf', issue='0', volume='1', program_area='asdf') - collection_public.collect_object(node_two, user, collected_type='asdf', status='lkjh') - collection_public.collect_object(node_with_abstract, user, status='asdf', grade_levels='super') - collection_public.collect_object(node_private, user, status='asdf', collected_type='asdf') + with capture_notifications(): + collection_public.collect_object(node_one, user, status='asdf', issue='0', volume='1', program_area='asdf') + collection_public.collect_object(node_two, user, collected_type='asdf', status='lkjh') + collection_public.collect_object(node_with_abstract, user, status='asdf', grade_levels='super') + collection_public.collect_object(node_private, user, status='asdf', collected_type='asdf') - registration_collection.collect_object(registration_one, user, status='asdf') - registration_collection.collect_object(registration_two, user, collected_type='asdf', status='lkjh') - registration_collection.collect_object(reg_with_abstract, user, status='asdf') - registration_collection.collect_object(registration_private, user, status='asdf', collected_type='asdf') + registration_collection.collect_object(registration_one, user, status='asdf') + registration_collection.collect_object(registration_two, user, collected_type='asdf', status='lkjh') + registration_collection.collect_object(reg_with_abstract, user, status='asdf') + registration_collection.collect_object(registration_private, user, status='asdf', collected_type='asdf') # test_search_empty payload = self.post_payload() @@ -1015,8 +1019,9 @@ def test_POST_search_collections_disease_data_type( node_with_abstract, node_private, registration_collection, registration_one, registration_two, registration_private, reg_with_abstract): - collection_public.collect_object(node_one, user, disease='illness', data_type='realness') - collection_public.collect_object(node_two, user, data_type='realness') + with capture_notifications(): + collection_public.collect_object(node_one, user, disease='illness', data_type='realness') + collection_public.collect_object(node_two, user, data_type='realness') payload = self.post_payload(disease='illness') res = app.post_json_api(url_collection_search, payload) diff --git a/osf/models/user.py b/osf/models/user.py index 8519593a283..e4f9c03fe82 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -1693,7 +1693,7 @@ def get_claim_url(self, project_id, external=False): base_url = website_settings.DOMAIN if external else '/' unclaimed_record = self.get_unclaimed_record(project_id) token = unclaimed_record['token'] - return f'{base_url}user/{uid}/{project_id}/claim/?token={token}' + return f'{base_url}legacy/user/{uid}/{project_id}/claim/?token={token}' def is_affiliated_with_institution(self, institution): """Return if this user is affiliated with the given ``institution``.""" diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index 6cddff997c0..fe8861a9015 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -1165,7 +1165,7 @@ def test_get_claim_url(self, unreg_user, unreg_moderator, project, provider): domain = settings.DOMAIN assert ( unreg_user.get_claim_url(pid, external=True) == - f'{domain}user/{uid}/{pid}/claim/?token={token}' + f'{domain}legacy/user/{uid}/{pid}/claim/?token={token}' ) # test_unreg_moderator @@ -1175,7 +1175,7 @@ def test_get_claim_url(self, unreg_user, unreg_moderator, project, provider): domain = settings.DOMAIN assert ( unreg_moderator.get_claim_url(pid, external=True) == - f'{domain}user/{uid}/{pid}/claim/?token={token}' + f'{domain}legacy/user/{uid}/{pid}/claim/?token={token}' ) def test_get_claim_url_raises_value_error_if_not_valid_pid(self, unreg_user, unreg_moderator): diff --git a/tasks/__init__.py b/tasks/__init__.py index 37d1169a2d4..ef69f14e01f 100755 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -378,7 +378,6 @@ def test_module(ctx, module=None, numprocesses=None, nocapture=False, params=Non 'api_tests/guids', 'api_tests/meetings', 'api_tests/metadata_records', - 'api_tests/osf_groups', 'api_tests/reviews', 'api_tests/regions', 'api_tests/search', diff --git a/tests/test_adding_contributor_views.py b/tests/test_adding_contributor_views.py index 72fe3ee10c4..64e398b77e8 100644 --- a/tests/test_adding_contributor_views.py +++ b/tests/test_adding_contributor_views.py @@ -559,7 +559,258 @@ def test_send_claim_email_before_throttle_expires(self): project.save() with capture_notifications(): send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) - # 2nd call raises error because throttle hasn't expired with pytest.raises(HTTPError): - send_claim_email(email=fake_email(), unclaimed_user=unreg_user, node=project) + send_claim_registered_email( + claimer=reg_user, + unclaimed_user=self.user, + node=self.project, + ) + assert not self.mock_send_grid.called + + @mock.patch('website.project.views.contributor.send_claim_registered_email') + def test_claim_user_post_with_email_already_registered_sends_correct_email( + self, send_claim_registered_email): + reg_user = UserFactory() + payload = { + 'value': reg_user.username, + 'pk': self.user._primary_key + } + url = self.project.api_url_for('claim_user_post', uid=self.user._id) + self.app.post(url, json=payload) + assert send_claim_registered_email.called + + def test_user_with_removed_unclaimed_url_claiming(self): + """ Tests that when an unclaimed user is removed from a project, the + unregistered user object does not retain the token. + """ + self.project.remove_contributor(self.user, Auth(user=self.referrer)) + + assert self.project._primary_key not in self.user.unclaimed_records.keys() + + def test_user_with_claim_url_cannot_claim_twice(self): + """ Tests that when an unclaimed user is replaced on a project with a + claimed user, the unregistered user object does not retain the token. + """ + reg_user = AuthUserFactory() + + self.project.replace_contributor(self.user, reg_user) + + assert self.project._primary_key not in self.user.unclaimed_records.keys() + + def test_claim_user_form_redirects_to_password_confirm_page_if_user_is_logged_in(self): + reg_user = AuthUserFactory() + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get(url, auth=reg_user.auth) + assert res.status_code == 302 + res = self.app.get(url, auth=reg_user.auth, follow_redirects=True) + token = self.user.get_unclaimed_record(self.project._primary_key)['token'] + expected = self.project.web_url_for( + 'claim_user_registered', + uid=self.user._id, + token=token, + ) + assert res.request.path == expected + + @mock.patch('framework.auth.cas.make_response_from_ticket') + def test_claim_user_when_user_is_registered_with_orcid(self, mock_response_from_ticket): + # TODO: check in qa url encoding + token = self.user.get_unclaimed_record(self.project._primary_key)['token'] + url = f'/user/{self.user._id}/{self.project._id}/claim/verify/{token}/' + # logged out user gets redirected to cas login + res1 = self.app.get(url) + assert res1.status_code == 302 + res = self.app.resolve_redirect(self.app.get(url)) + service_url = f'http://localhost{url}' + expected = cas.get_logout_url(service_url=cas.get_login_url(service_url=service_url)) + assert res1.location == expected + + # user logged in with orcid automatically becomes a contributor + orcid_user, validated_credentials, cas_resp = generate_external_user_with_resp(url) + mock_response_from_ticket.return_value = authenticate( + orcid_user, + redirect(url) + ) + orcid_user.set_unusable_password() + orcid_user.save() + + # The request to OSF with CAS service ticket must not have cookie and/or auth. + service_ticket = fake.md5() + url_with_service_ticket = f'{url}?ticket={service_ticket}' + res = self.app.get(url_with_service_ticket) + # The response of this request is expected to be a 302 with `Location`. + # And the redirect URL must equal to the originial service URL + assert res.status_code == 302 + redirect_url = res.headers['Location'] + assert redirect_url == url + # The response of this request is expected have the `Set-Cookie` header with OSF cookie. + # And the cookie must belong to the ORCiD user. + raw_set_cookie = res.headers['Set-Cookie'] + assert raw_set_cookie + simple_cookie = SimpleCookie() + simple_cookie.load(raw_set_cookie) + cookie_dict = {key: value.value for key, value in simple_cookie.items()} + osf_cookie = cookie_dict.get(settings.COOKIE_NAME, None) + assert osf_cookie is not None + user = OSFUser.from_cookie(osf_cookie) + assert user._id == orcid_user._id + # The ORCiD user must be different from the unregistered user created when the contributor was added + assert user._id != self.user._id + + # Must clear the Flask g context manual and set the OSF cookie to context + g.current_session = None + self.app.set_cookie(settings.COOKIE_NAME, osf_cookie) + res = self.app.resolve_redirect(res) + assert res.status_code == 302 + assert self.project.is_contributor(orcid_user) + assert self.project.url in res.headers.get('Location') + + def test_get_valid_form(self): + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get(url, follow_redirects=True) + assert res.status_code == 200 + + def test_invalid_claim_form_raise_400(self): + uid = self.user._primary_key + pid = self.project._primary_key + url = f'/legacy/user/{uid}/{pid}/claim/?token=badtoken' + res = self.app.get(url, follow_redirects=True) + assert res.status_code == 400 + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes): + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + location = res.headers.get('Location') + assert 'login?service=' in location + assert 'username' in location + assert 'verification_key' in location + assert self.project._primary_key in location + + self.user.reload() + assert self.user.is_registered + assert self.user.is_active + assert self.project._primary_key not in self.user.unclaimed_records + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_search_nodes): + # user has multiple unclaimed records + p2 = ProjectFactory(creator=self.referrer) + self.user.add_unclaimed_record(p2, referrer=self.referrer, + given_name=fake.name()) + self.user.save() + assert len(self.user.unclaimed_records.keys()) > 1 # sanity check + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.post(url, data={ + 'username': self.given_email, + 'password': 'bohemianrhap', + 'password2': 'bohemianrhap' + }) + self.user.reload() + assert self.user.unclaimed_records == {} + + @mock.patch('osf.models.OSFUser.update_search_nodes') + def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_search_nodes): + # User is created with a full name + original_name = fake.name() + unreg = UnregUserFactory(fullname=original_name) + # User invited with a different name + different_name = fake.name() + new_user = self.project.add_unregistered_contributor( + email=unreg.username, + fullname=different_name, + auth=Auth(self.project.creator), + ) + self.project.save() + # Goes to claim url + claim_url = new_user.get_claim_url(self.project._id) + self.app.post(claim_url, data={ + 'username': unreg.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + unreg.reload() + # Full name was set correctly + assert unreg.fullname == different_name + # CSL names were set correctly + parsed_name = impute_names_model(different_name) + assert unreg.given_name == parsed_name['given_name'] + assert unreg.family_name == parsed_name['family_name'] + + def test_claim_user_post_returns_fullname(self): + url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' + res = self.app.post( + url, + auth=self.referrer.auth, + json={ + 'value': self.given_email, + 'pk': self.user._primary_key + }, + ) + assert res.json['fullname'] == self.given_name + assert self.mock_send_grid.called + + def test_claim_user_post_if_email_is_different_from_given_email(self): + email = fake_email() # email that is different from the one the referrer gave + url = f'/api/v1/user/{self.user._primary_key}/{self.project._primary_key}/claim/email/' + self.app.post(url, json={'value': email, 'pk': self.user._primary_key} ) + assert self.mock_send_grid.called + assert self.mock_send_grid.call_count == 2 + call_to_invited = self.mock_send_grid.mock_calls[0] + call_to_invited.assert_called_with(to_addr=email) + call_to_referrer = self.mock_send_grid.mock_calls[1] + call_to_referrer.assert_called_with(to_addr=self.given_email) + + def test_claim_url_with_bad_token_returns_400(self): + url = self.project.web_url_for( + 'claim_user_registered', + uid=self.user._id, + token='badtoken', + ) + res = self.app.get(url, auth=self.referrer.auth) + assert res.status_code == 400 + + def test_cannot_claim_user_with_user_who_is_already_contributor(self): + # user who is already a contirbutor to the project + contrib = AuthUserFactory() + self.project.add_contributor(contrib, auth=Auth(self.project.creator)) + self.project.save() + # Claiming user goes to claim url, but contrib is already logged in + url = self.user.get_claim_url(self.project._primary_key) + res = self.app.get( + url, + auth=contrib.auth, follow_redirects=True) + # Response is a 400 + assert res.status_code == 400 + + def test_claim_user_with_project_id_adds_corresponding_claimed_tag_to_user(self): + assert OsfClaimedTags.Osf.value not in self.user.system_tags + url = self.user.get_claim_url(self.project_with_source_tag._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + self.user.reload() + assert OsfClaimedTags.Osf.value in self.user.system_tags + + def test_claim_user_with_preprint_id_adds_corresponding_claimed_tag_to_user(self): + assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') not in self.user.system_tags + url = self.user.get_claim_url(self.preprint_with_source_tag._primary_key) + res = self.app.post(url, data={ + 'username': self.user.username, + 'password': 'killerqueen', + 'password2': 'killerqueen' + }) + + assert res.status_code == 302 + self.user.reload() + assert provider_claimed_tag(self.preprint_with_source_tag.provider._id, 'preprint') in self.user.system_tags diff --git a/website/routes.py b/website/routes.py index 2a3415e1051..195476ec50f 100644 --- a/website/routes.py +++ b/website/routes.py @@ -857,7 +857,7 @@ def make_url_map(app): # user will be required to set email and password # claim token must be present in query parameter Rule( - ['/user///claim/'], + ['/legacy/user///claim/'], ['get', 'post'], project_views.contributor.claim_user_form, OsfWebRenderer('claim_account.mako', trust=False)