Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
46 changes: 26 additions & 20 deletions addons/base/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from framework.flask import redirect
from framework.sentry import log_exception
from framework.transactions.handlers import no_auto_transaction
from website import mails
from website import settings
from addons.base import signals as file_signals
from addons.base.utils import format_last_known_metadata, get_mfr_url
Expand All @@ -52,7 +51,7 @@
DraftRegistration,
Guid,
FileVersionUserMetadata,
FileVersion
FileVersion, NotificationType
)
from osf.metrics import PreprintView, PreprintDownload
from osf.utils import permissions
Expand All @@ -64,7 +63,7 @@
from website.util import rubeus

# import so that associated listener is instantiated and gets emails
from website.notifications.events.files import FileEvent # noqa
from notifications.file_event_notifications import FileEvent # noqa

ERROR_MESSAGES = {'FILE_GONE': """
<style>
Expand Down Expand Up @@ -226,8 +225,6 @@ def get_auth(auth, **kwargs):
_check_resource_permissions(resource, auth, action)

provider_name = waterbutler_data['provider']
waterbutler_settings = None
waterbutler_credentials = None
file_version = file_node = None
if provider_name == 'osfstorage' or (not flag_is_active(request, features.ENABLE_GV)):
file_version, file_node = _get_osfstorage_file_version_and_node(
Expand Down Expand Up @@ -576,20 +573,31 @@ def create_waterbutler_log(payload, **kwargs):
params=payload
)

if payload.get('email') is True or payload.get('errors'):
mails.send_mail(
user.username,
mails.FILE_OPERATION_FAILED if payload.get('errors')
else mails.FILE_OPERATION_SUCCESS,
action=payload['action'],
source_node=source_node,
destination_node=destination_node,
source_path=payload['source']['materialized'],
source_addon=payload['source']['addon'],
destination_addon=payload['destination']['addon'],
osf_support_email=settings.OSF_SUPPORT_EMAIL
if payload.get('email') or payload.get('errors'):
if payload.get('email'):
notification_type = NotificationType.Type.USER_FILE_OPERATION_SUCCESS.instance
if payload.get('errors'):
notification_type = NotificationType.Type.USER_FILE_OPERATION_FAILED.instance
notification_type.emit(
user=user,
subscribed_object=node,
event_context={
'user_fullname': user.fullname,
'action': payload['action'],
'source_node': source_node._id,
'source_node_title': source_node.title,
'destination_node': destination_node._id,
'destination_node_title': destination_node.title,
'destination_node_parent_node_title': destination_node.parent_node.title if destination_node.parent_node else None,
'source_path': payload['source']['materialized'],
'source_addon': payload['source']['addon'],
'destination_addon': payload['destination']['addon'],
'osf_support_email': settings.OSF_SUPPORT_EMAIL,
'logo': settings.OSF_LOGO,
'OSF_LOGO_LIST': settings.OSF_LOGO_LIST,
'OSF_LOGO': settings.OSF_LOGO,
}
)

if payload.get('errors'):
# Action failed but our function succeeded
# Bail out to avoid file_signals
Expand All @@ -603,10 +611,8 @@ def create_waterbutler_log(payload, **kwargs):
target_node = AbstractNode.load(metadata.get('nid'))
if target_node and payload['action'] != 'download_file':
update_storage_usage_with_size(payload)

with transaction.atomic():
file_signals.file_updated.send(target=node, user=user, event_type=action, payload=payload)

return {'status': 'success'}


Expand Down
64 changes: 33 additions & 31 deletions addons/boa/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,9 @@
from addons.boa.boa_error_code import BoaErrorCode
from framework import sentry
from framework.celery_tasks import app as celery_app
from osf.models import OSFUser
from osf.models import OSFUser, NotificationType
from osf.utils.fields import ensure_str, ensure_bytes
from website import settings as osf_settings
from website.mails import send_mail, ADDONS_BOA_JOB_COMPLETE, ADDONS_BOA_JOB_FAILURE

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -184,18 +183,19 @@ async def submit_to_boa_async(host, username, password, user_guid, project_guid,

logger.info('Successfully uploaded query output to OSF.')
logger.debug('Task ends <<<<<<<<')
await sync_to_async(send_mail)(
to_addr=user.username,
mail=ADDONS_BOA_JOB_COMPLETE,
fullname=user.fullname,
query_file_name=query_file_name,
query_file_full_path=file_full_path,
output_file_name=output_file_name,
job_id=boa_job.id,
project_url=project_url,
boa_job_list_url=boa_settings.BOA_JOB_LIST_URL,
boa_support_email=boa_settings.BOA_SUPPORT_EMAIL,
osf_support_email=osf_settings.OSF_SUPPORT_EMAIL,
NotificationType.Type.ADDONS_BOA_JOB_COMPLETE.instance.emit(
user=user,
event_context={
'fullname': user.fullname,
'query_file_name': query_file_name,
'query_file_full_path': file_full_path,
'output_file_name': output_file_name,
'job_id': boa_job.id,
'project_url': project_url,
'boa_job_list_url': boa_settings.BOA_JOB_LIST_URL,
'boa_support_email': boa_settings.BOA_SUPPORT_EMAIL,
'osf_support_email': osf_settings.OSF_SUPPORT_EMAIL,
}
)
return BoaErrorCode.NO_ERROR

Expand All @@ -209,22 +209,24 @@ def handle_boa_error(message, code, username, fullname, project_url, query_file_
sentry.log_message(message, skip_session=True)
except Exception:
pass
send_mail(
to_addr=username,
mail=ADDONS_BOA_JOB_FAILURE,
fullname=fullname,
code=code,
message=message,
query_file_name=query_file_name,
file_size=file_size,
max_file_size=boa_settings.MAX_SUBMISSION_SIZE,
query_file_full_path=query_file_full_path,
output_file_name=output_file_name,
job_id=job_id,
max_job_wait_hours=boa_settings.MAX_JOB_WAITING_TIME / 3600,
project_url=project_url,
boa_job_list_url=boa_settings.BOA_JOB_LIST_URL,
boa_support_email=boa_settings.BOA_SUPPORT_EMAIL,
osf_support_email=osf_settings.OSF_SUPPORT_EMAIL,
NotificationType.Type.ADDONS_BOA_JOB_FAILURE.instance.emit(
destination_address=username,
event_context={
'user_fullname': fullname,
'code': code,
'query_file_name': query_file_name,
'file_size': file_size,
'message': message,
'max_file_size': boa_settings.MAX_SUBMISSION_SIZE,
'query_file_full_path': query_file_full_path,
'output_file_name': output_file_name,
'job_id': job_id,
'max_job_wait_hours': boa_settings.MAX_JOB_WAITING_TIME / 3600,
'project_url': project_url,
'boa_job_list_url': boa_settings.BOA_JOB_LIST_URL,
'boa_support_email': boa_settings.BOA_SUPPORT_EMAIL,
'osf_support_email': osf_settings.OSF_SUPPORT_EMAIL,

}
)
return code
54 changes: 21 additions & 33 deletions addons/boa/tests/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@
from addons.boa import settings as boa_settings
from addons.boa.boa_error_code import BoaErrorCode
from addons.boa.tasks import submit_to_boa, submit_to_boa_async, handle_boa_error
from osf.models import NotificationType
from osf_tests.factories import AuthUserFactory, ProjectFactory
from tests.base import OsfTestCase
from tests.utils import capture_notifications
from website import settings as osf_settings
from website.mails import ADDONS_BOA_JOB_COMPLETE, ADDONS_BOA_JOB_FAILURE

DEFAULT_REFRESH_JOB_INTERVAL = boa_settings.REFRESH_JOB_INTERVAL
DEFAULT_MAX_JOB_WAITING_TIME = boa_settings.MAX_JOB_WAITING_TIME
Expand All @@ -38,9 +39,6 @@ def setUp(self):
self.output_file_name = 'fake_boa_script_results.txt'
self.job_id = '1a2b3c4d5e6f7g8'

from conftest import start_mock_send_grid
self.mock_send_grid = start_mock_send_grid(self)

def tearDown(self):
super().tearDown()

Expand All @@ -55,27 +53,26 @@ def test_boa_error_code(self):
assert BoaErrorCode.FILE_TOO_LARGE_ERROR == 6
assert BoaErrorCode.JOB_TIME_OUT_ERROR == 7

@mock.patch('website.mails.settings.USE_EMAIL', True)
@mock.patch('website.mails.settings.USE_CELERY', False)
def test_handle_boa_error(self):
with mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message, \
mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error:
return_value = handle_boa_error(
self.error_message,
BoaErrorCode.UNKNOWN,
self.user_username,
self.user_fullname,
self.project_url,
self.file_full_path,
query_file_name=self.query_file_name,
file_size=self.file_size,
output_file_name=self.output_file_name,
job_id=self.job_id
)
self.mock_send_grid.assert_called()
mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True)
mock_logger_error.assert_called_with(self.error_message)
assert return_value == BoaErrorCode.UNKNOWN
with mock.patch('addons.boa.tasks.sentry.log_message', return_value=None) as mock_sentry_log_message:
with mock.patch('addons.boa.tasks.logger.error', return_value=None) as mock_logger_error:
with capture_notifications() as notifications:
return_value = handle_boa_error(
self.error_message,
BoaErrorCode.UNKNOWN,
self.user_username,
self.user_fullname,
self.project_url,
self.file_full_path,
file_size=self.file_size,
output_file_name=self.output_file_name,
job_id=self.job_id
)
assert len(notifications['emits']) == 1
assert notifications['emits'][0]['type'] == NotificationType.Type.ADDONS_BOA_JOB_FAILURE
mock_sentry_log_message.assert_called_with(self.error_message, skip_session=True)
mock_logger_error.assert_called_with(self.error_message)
assert return_value == BoaErrorCode.UNKNOWN


class TestSubmitToBoa(OsfTestCase):
Expand Down Expand Up @@ -154,14 +151,6 @@ def setUp(self):
boa_settings.REFRESH_JOB_INTERVAL = DEFAULT_REFRESH_JOB_INTERVAL
boa_settings.MAX_JOB_WAITING_TIME = DEFAULT_MAX_JOB_WAITING_TIME

from conftest import start_mock_send_grid
self.mock_send_grid = start_mock_send_grid(self)

def tearDown(self):
super().tearDown()

@mock.patch('website.mails.settings.USE_EMAIL', True)
@mock.patch('website.mails.settings.USE_CELERY', False)
async def test_submit_success(self):
with mock.patch('osf.models.user.OSFUser.objects.get', return_value=self.user), \
mock.patch('osf.models.user.OSFUser.get_or_create_cookie', return_value=self.user_cookie), \
Expand Down Expand Up @@ -190,7 +179,6 @@ async def test_submit_success(self):
assert self.mock_job.refresh.call_count == 4
assert mock_async_sleep.call_count == 4
mock_close.assert_called()
self.mock_send_grid.assert_called()
mock_handle_boa_error.assert_not_called()

async def test_download_error(self):
Expand Down
7 changes: 5 additions & 2 deletions addons/osfstorage/tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from osf import models
from addons.osfstorage import utils
from addons.osfstorage import settings
from tests.utils import capture_notifications
from website.files.exceptions import FileNodeCheckedOutError, FileNodeIsPrimaryFile

SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore
Expand Down Expand Up @@ -745,7 +746,8 @@ def test_after_fork_copies_versions(self, node, node_settings, auth_obj):
version = factories.FileVersionFactory()
record.add_version(version)

fork = node.fork_node(auth_obj)
with capture_notifications():
fork = node.fork_node(auth_obj)
fork_node_settings = fork.get_addon('osfstorage')
fork_node_settings.reload()

Expand All @@ -757,7 +759,8 @@ def test_fork_reverts_to_node_storage_region(self, user2, region, region2, node,
"""
Despite different user regions defaults, the forked node always stay in the same region as it's orginal node.
"""
fork = node.fork_node(Auth(user2))
with capture_notifications():
fork = node.fork_node(Auth(user2))
assert fork.get_addon('osfstorage').region_id == region.id

# don't inherit or override region
Expand Down
7 changes: 5 additions & 2 deletions addons/wiki/tests/test_wiki.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from framework.auth import Auth
from django.utils import timezone
from addons.wiki.utils import to_mongo_key
from tests.utils import capture_notifications

from .config import EXAMPLE_DOCS, EXAMPLE_OPS

Expand Down Expand Up @@ -818,7 +819,8 @@ def test_uuids_differ_between_forks(self):
assert project_res.status_code == 200
self.project.reload()

fork = self.project.fork_node(Auth(self.user))
with capture_notifications():
fork = self.project.fork_node(Auth(self.user))
assert fork.is_fork_of(self.project)
fork_url = fork.web_url_for('project_wiki_view', wname=self.wname)
fork_res = self.app.get(fork_url, auth=self.user.auth)
Expand Down Expand Up @@ -1084,7 +1086,8 @@ def test_get_sharejs_uuid(self):
# Differs across projects and forks
project = ProjectFactory()
assert sharejs_uuid != get_sharejs_uuid(project, wname)
fork = self.project.fork_node(Auth(self.project.creator))
with capture_notifications():
fork = self.project.fork_node(Auth(self.project.creator))
assert sharejs_uuid != get_sharejs_uuid(fork, wname)

def test_generate_share_uuid(self):
Expand Down
12 changes: 1 addition & 11 deletions admin/common_auth/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from django.contrib.auth import login, REDIRECT_FIELD_NAME, authenticate, logout

from osf.models.user import OSFUser
from osf.models import AdminProfile, AbstractProvider
from osf.models import AdminProfile
from admin.common_auth.forms import LoginForm, UserRegistrationForm, DeskUserForm


Expand Down Expand Up @@ -69,16 +69,6 @@ def form_valid(self, form):

# create AdminProfile for this new user
profile, created = AdminProfile.objects.get_or_create(user=osf_user)

for group in form.cleaned_data.get('group_perms'):
osf_user.groups.add(group)
split = group.name.split('_')
group_type = split[0]
if group_type == 'reviews':
provider_id = split[2]
provider = AbstractProvider.objects.get(id=provider_id)
provider.notification_subscriptions.get(event_name='new_pending_submissions').add_user_to_subscription(osf_user, 'email_transactional')

osf_user.save()

if created:
Expand Down
4 changes: 1 addition & 3 deletions admin/nodes/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from admin.base.utils import change_embargo_date
from admin.base.views import GuidView
from admin.base.forms import GuidForm
from admin.notifications.views import detect_duplicate_notifications, delete_selected_notifications
from admin.notifications.views import delete_selected_notifications

from api.share.utils import update_share
from api.caching.tasks import update_storage_usage_cache
Expand Down Expand Up @@ -100,7 +100,6 @@ def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
node = self.get_object()

detailed_duplicates = detect_duplicate_notifications(node_id=node.id)
children = node.get_nodes(is_node_link=False)
# Annotate guid because django templates prohibit accessing attributes that start with underscores
children = AbstractNode.objects.filter(
Expand All @@ -111,7 +110,6 @@ def get_context_data(self, **kwargs):
'STORAGE_LIMITS': settings.StorageLimits,
'node': node,
'children': children,
'duplicates': detailed_duplicates
})

return context
Expand Down
Loading