fix 500 errors with excel files > 500k size limit

werkzeug's internal workings keep files under 500kb in memory, and files
greater than 500kb as a TemporaryFile

(https://github.com/pallets/werkzeug/blob/0.11-maintenance/werkzeug/formparser.py#L38)

when we encounter a CSV or TSV, we call normalise_newlines, which invokes
`.read()`, however when we were passing straight into pyexcel, we called
`file.getvalue()` - this exists on BytesIO (small files) but not on
TemporaryFile objects (large files) - we were seeing 500 errors
This commit is contained in:
Leo Hemsted
2016-10-26 15:44:24 +01:00
parent a93333572f
commit 26a985720c
3 changed files with 12 additions and 7 deletions

View File

@@ -1,9 +1,12 @@
import pytest
from pathlib import Path
from io import StringIO
from app.utils import email_safe, generate_notifications_csv, generate_previous_dict, generate_next_dict
from csv import DictReader
import pytest
from freezegun import freeze_time
from app.utils import email_safe, generate_notifications_csv, generate_previous_dict, generate_next_dict, Spreadsheet
def test_email_safe_return_dot_separated_email_domain():
test_name = 'SOME service with+stuff+ b123'
@@ -67,3 +70,9 @@ def test_generate_next_dict(client):
def test_generate_previous_next_dict_adds_other_url_args(client):
ret = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in ret['url']
def test_can_create_spreadsheet_from_large_excel_file():
with open(str(Path.cwd() / 'tests' / 'spreadsheet_files' / 'excel 2007.xlsx'), 'rb') as xl:
ret = Spreadsheet.from_file(xl, filename='xl.xlsx')
assert ret.as_csv_data