diff --git a/Pipfile.lock b/Pipfile.lock index d51ee08d9..1cd3ed3bd 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -353,11 +353,11 @@ }, "flask": { "hashes": [ - "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b", - "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526" + "sha256:7eb373984bf1c770023fce9db164ed0c3353cd0b53f130f4693da0ca756a2e6d", + "sha256:c0bec9477df1cb867e5a67c9e1ab758de9cb4a3e52dd70681f59fa40a62b3f2d" ], "index": "pypi", - "version": "==2.2.2" + "version": "==2.2.3" }, "flask-bcrypt": { "hashes": [ @@ -743,24 +743,24 @@ }, "newrelic": { "hashes": [ - "sha256:2163cb63db50dfd792066ffcf4c14909bb252f39147313332fe545b1b2539b15", - "sha256:29f1cb2e00d14ed15744acf417703f1be4ed29a0e1488242c618bdde123db37b", - "sha256:3de16fee3e9ac6b3384504f22bb7af1d913e7531c20dcc895833133e2aa4735c", - "sha256:51e2391f50ca2deda749e5bfae1cc2791474cf1a814ccb1111837bff9db9447b", - "sha256:53583c56b480cc5cd9ed71341232f8e26c2f01ec508ff8ecc384159bd6a31c3c", - "sha256:66f3fa85eb3b2ab54d0637b108ea1c82f628f5f050417f8ddcab38bd1607a170", - "sha256:7e9fa97cd9f686090a64f7cdef776225163b33b751cbf4b2861b5b96da40d870", - "sha256:8024f13e705bc29bc13cf58438737f3fe6f4e66b364f8758585f8bea409ceb41", - "sha256:802bbda6c88d7e912b7220f17fba63e022a69d8b0cc3c2e3781395e4bf732c60", - "sha256:877daddeeb3959982ae5bd9b5f2a30c25e6f5f43d92fecf7d870e040c1da0c95", - "sha256:9425e17405ab3e1d15c4c3b48f193bb8f75cd60c402bed670d5ee2a911b684ef", - "sha256:a15169b13f333db8691a9cb8f81498d5ccd54f965a4097abc1d23eb884a0b2f2", - "sha256:a3b835fde21c405017562fc1daeb5cfa790a099f055654333f5c993ad0b26c9e", - "sha256:a78612d6fdad07e0c2bd98f109898f99ac99ead65ba22a249f275af6fb307078", - "sha256:a8aaa35bdbfe9681e59030c45e5cf56f80a6b059ec14898e0f71e1a2f06b354b" + "sha256:27ace0e370bb26215aa33cf16aef5c580e15b8d28e1571f44977380d00c7da85", + "sha256:2eb214e4540595c259ca6927ca0e2f11ad943a54caf25e6847725bd80d2fdd5c", + "sha256:3350d5f67d0bf5bba75446809b80f949a77be946ea19b977457cec4b965595d8", + "sha256:3949e70082b882b58a09253a0650965115172ce76d94bf8aba7b2572880a6794", + "sha256:3beb7b089a8a4cb7a9daee066a9e14b3a713fb14c732ab62753eb446ef01e37a", + "sha256:4eef08adc764d6103b3a387a0bd705551c03ad3f2c6bde0b8f91b4c78fcfac3e", + "sha256:5a8d88746ba41fa6ed13efa3fa2f10705315f19c8077d4b3383602ac35b754fc", + "sha256:6882c809b8ad82a0eff06af192f218c3fa43fce936916d7c78626a677fe35f13", + "sha256:701553504c1f04a04f2a3092ff7332828057272a1a75babffa702492df2044bc", + "sha256:86d6411473d2e4d2844fc71f5ca3a2a808822e079f9f3f684429c48e0149fdf7", + "sha256:9d0d426dc7c8004a589c4c777a74dcdc0503510f0cd98157cdde4354c5c74d04", + "sha256:b417534f96d297666322f32304bb4022527fe809b29534f8d4693c583731619d", + "sha256:ca425772cf72c435cc313d00f1b3fb392f05df5db0d9eee194e39d1212da99d1", + "sha256:e729cdd108135ecc50d5f9545c99f9ac27566dd10f5c05213e2ad188d0467d06", + "sha256:f06be99fe5180b02d4f629d7033799a296468f209935a006075958d57584c1f1" ], "index": "pypi", - "version": "==8.6.0" + "version": "==8.7.0" }, "notifications-python-client": { "hashes": [ @@ -847,11 +847,11 @@ }, "prompt-toolkit": { "hashes": [ - "sha256:6a2948ec427dfcc7c983027b1044b355db6aaa8be374f54ad2015471f7d81c5b", - "sha256:d5d73d4b5eb1a92ba884a88962b157f49b71e06c4348b417dd622b25cdd3800b" + "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b", + "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f" ], - "markers": "python_version >= '3.7'", - "version": "==3.0.37" + "markers": "python_full_version >= '3.7.0'", + "version": "==3.0.38" }, "psycopg2-binary": { "hashes": [ @@ -1120,7 +1120,7 @@ "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" ], - "markers": "python_version >= '3.7' and python_version < '4.0'", + "markers": "python_version >= '3.7' and python_version < '4'", "version": "==2.28.2" }, "rfc3339-validator": { @@ -1142,7 +1142,7 @@ "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2", "sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9" ], - "markers": "python_version >= '3.5' and python_version < '4.0'", + "markers": "python_version >= '3.5' and python_version < '4'", "version": "==4.7.2" }, "s3transfer": { @@ -1338,81 +1338,92 @@ }, "wrapt": { "hashes": [ - "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3", - "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b", - "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4", - "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2", - "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656", - "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3", - "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff", - "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310", - "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a", - "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57", - "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069", - "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383", - "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe", - "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87", - "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d", - "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b", - "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907", - "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f", - "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0", - "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28", - "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1", - "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853", - "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc", - "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3", - "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3", - "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164", - "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1", - "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c", - "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1", - "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7", - "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1", - "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320", - "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed", - "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1", - "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248", - "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c", - "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456", - "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77", - "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef", - "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1", - "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7", - "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86", - "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4", - "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d", - "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d", - "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8", - "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5", - "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471", - "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00", - "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68", - "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3", - "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d", - "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735", - "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d", - "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569", - "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7", - "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59", - "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5", - "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb", - "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b", - "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f", - "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462", - "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015", - "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af" + "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", + "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", + "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", + "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", + "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", + "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", + "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", + "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", + "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", + "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", + "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", + "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", + "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", + "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", + "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", + "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", + "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", + "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", + "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", + "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", + "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", + "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", + "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", + "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", + "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", + "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", + "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", + "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", + "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", + "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", + "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", + "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", + "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", + "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", + "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", + "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", + "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", + "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", + "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", + "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", + "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", + "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", + "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", + "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", + "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", + "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", + "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", + "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", + "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", + "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", + "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", + "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", + "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", + "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", + "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", + "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", + "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", + "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", + "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", + "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", + "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", + "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", + "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", + "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", + "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", + "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", + "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", + "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", + "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", + "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", + "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", + "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", + "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", + "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", + "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.14.1" + "version": "==1.15.0" }, "zipp": { "hashes": [ - "sha256:188834565033387710d046e3fe96acfc9b5e86cbca7f39ff69cf21a4128198b7", - "sha256:9e5421e176ef5ab4c0ad896624e87a7b2f07aca746c9b2aa305952800cb8eecb" + "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", + "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" ], "markers": "python_version >= '3.7'", - "version": "==3.14.0" + "version": "==3.15.0" } }, "develop": { @@ -1536,60 +1547,60 @@ "toml" ], "hashes": [ - "sha256:049806ae2df69468c130f04f0fab4212c46b34ba5590296281423bb1ae379df2", - "sha256:08e3dd256b8d3e07bb230896c8c96ec6c5dffbe5a133ba21f8be82b275b900e8", - "sha256:0f03c229f1453b936916f68a47b3dfb5e84e7ad48e160488168a5e35115320c8", - "sha256:171dd3aa71a49274a7e4fc26f5bc167bfae5a4421a668bc074e21a0522a0af4b", - "sha256:1856a8c4aa77eb7ca0d42c996d0ca395ecafae658c1432b9da4528c429f2575c", - "sha256:28563a35ef4a82b5bc5160a01853ce62b9fceee00760e583ffc8acf9e3413753", - "sha256:2c15bd09fd5009f3a79c8b3682b52973df29761030b692043f9834fc780947c4", - "sha256:2c9fffbc39dc4a6277e1525cab06c161d11ee3995bbc97543dc74fcec33e045b", - "sha256:2d7daf3da9c7e0ed742b3e6b4de6cc464552e787b8a6449d16517b31bbdaddf5", - "sha256:32e6a730fd18b2556716039ab93278ccebbefa1af81e6aa0c8dba888cf659e6e", - "sha256:34d7211be69b215ad92298a962b2cd5a4ef4b17c7871d85e15d3d1b6dc8d8c96", - "sha256:358d3bce1468f298b19a3e35183bdb13c06cdda029643537a0cc37e55e74e8f1", - "sha256:3713a8ec18781fda408f0e853bf8c85963e2d3327c99a82a22e5c91baffcb934", - "sha256:40785553d68c61e61100262b73f665024fd2bb3c6f0f8e2cd5b13e10e4df027b", - "sha256:4655ecd813f4ba44857af3e9cffd133ab409774e9d2a7d8fdaf4fdfd2941b789", - "sha256:465ea431c3b78a87e32d7d9ea6d081a1003c43a442982375cf2c247a19971961", - "sha256:4b8fd32f85b256fc096deeb4872aeb8137474da0c0351236f93cbedc359353d6", - "sha256:4c1153a6156715db9d6ae8283480ae67fb67452aa693a56d7dae9ffe8f7a80da", - "sha256:577a8bc40c01ad88bb9ab1b3a1814f2f860ff5c5099827da2a3cafc5522dadea", - "sha256:59a427f8a005aa7254074719441acb25ac2c2f60c1f1026d43f846d4254c1c2f", - "sha256:5e29a64e9586194ea271048bc80c83cdd4587830110d1e07b109e6ff435e5dbc", - "sha256:74cd60fa00f46f28bd40048d6ca26bd58e9bee61d2b0eb4ec18cea13493c003f", - "sha256:7efa21611ffc91156e6f053997285c6fe88cfef3fb7533692d0692d2cb30c846", - "sha256:7f992b32286c86c38f07a8b5c3fc88384199e82434040a729ec06b067ee0d52c", - "sha256:875b03d92ac939fbfa8ae74a35b2c468fc4f070f613d5b1692f9980099a3a210", - "sha256:88ae5929f0ef668b582fd7cad09b5e7277f50f912183cf969b36e82a1c26e49a", - "sha256:8d5302eb84c61e758c9d68b8a2f93a398b272073a046d07da83d77b0edc8d76b", - "sha256:90e7a4cbbb7b1916937d380beb1315b12957b8e895d7d9fb032e2038ac367525", - "sha256:9240a0335365c29c968131bdf624bb25a8a653a9c0d8c5dbfcabf80b59c1973c", - "sha256:932048364ff9c39030c6ba360c31bf4500036d4e15c02a2afc5a76e7623140d4", - "sha256:93db11da6e728587e943dff8ae1b739002311f035831b6ecdb15e308224a4247", - "sha256:971b49dbf713044c3e5f6451b39f65615d4d1c1d9a19948fa0f41b0245a98765", - "sha256:9cc9c41aa5af16d845b53287051340c363dd03b7ef408e45eec3af52be77810d", - "sha256:9dbb21561b0e04acabe62d2c274f02df0d715e8769485353ddf3cf84727e31ce", - "sha256:a6ceeab5fca62bca072eba6865a12d881f281c74231d2990f8a398226e1a5d96", - "sha256:ad12c74c6ce53a027f5a5ecbac9be20758a41c85425c1bbab7078441794b04ee", - "sha256:b09dd7bef59448c66e6b490cc3f3c25c14bc85d4e3c193b81a6204be8dd355de", - "sha256:bd67df6b48db18c10790635060858e2ea4109601e84a1e9bfdd92e898dc7dc79", - "sha256:bf9e02bc3dee792b9d145af30db8686f328e781bd212fdef499db5e9e4dd8377", - "sha256:bfa065307667f1c6e1f4c3e13f415b0925e34e56441f5fda2c84110a4a1d8bda", - "sha256:c160e34e388277f10c50dc2c7b5e78abe6d07357d9fe7fcb2f3c156713fd647e", - "sha256:c243b25051440386179591a8d5a5caff4484f92c980fb6e061b9559da7cc3f64", - "sha256:c3c4beddee01c8125a75cde3b71be273995e2e9ec08fbc260dd206b46bb99969", - "sha256:cd38140b56538855d3d5722c6d1b752b35237e7ea3f360047ce57f3fade82d98", - "sha256:d7f2a7df523791e6a63b40360afa6792a11869651307031160dc10802df9a252", - "sha256:da32526326e8da0effb452dc32a21ffad282c485a85a02aeff2393156f69c1c3", - "sha256:dc4f9a89c82faf6254d646180b2e3aa4daf5ff75bdb2c296b9f6a6cf547e26a7", - "sha256:f0557289260125a6c453ad5673ba79e5b6841d9a20c9e101f758bfbedf928a77", - "sha256:f332d61fbff353e2ef0f3130a166f499c3fad3a196e7f7ae72076d41a6bfb259", - "sha256:f3ff4205aff999164834792a3949f82435bc7c7655c849226d5836c3242d7451", - "sha256:ffa637a2d5883298449a5434b699b22ef98dd8e2ef8a1d9e60fa9cfe79813411" + "sha256:0339dc3237c0d31c3b574f19c57985fcbe494280153bbcad33f2cdf469f4ac3e", + "sha256:09643fb0df8e29f7417adc3f40aaf379d071ee8f0350ab290517c7004f05360b", + "sha256:0bd7e628f6c3ec4e7d2d24ec0e50aae4e5ae95ea644e849d92ae4805650b4c4e", + "sha256:0cf557827be7eca1c38a2480484d706693e7bb1929e129785fe59ec155a59de6", + "sha256:0f8318ed0f3c376cfad8d3520f496946977abde080439d6689d7799791457454", + "sha256:1b7fb13850ecb29b62a447ac3516c777b0e7a09ecb0f4bb6718a8654c87dfc80", + "sha256:22c308bc508372576ffa3d2dbc4824bb70d28eeb4fcd79d4d1aed663a06630d0", + "sha256:3004765bca3acd9e015794e5c2f0c9a05587f5e698127ff95e9cfba0d3f29339", + "sha256:3a209d512d157379cc9ab697cbdbb4cfd18daa3e7eebaa84c3d20b6af0037384", + "sha256:436313d129db7cf5b4ac355dd2bd3f7c7e5294af077b090b85de75f8458b8616", + "sha256:49567ec91fc5e0b15356da07a2feabb421d62f52a9fff4b1ec40e9e19772f5f8", + "sha256:4dd34a935de268a133e4741827ae951283a28c0125ddcdbcbba41c4b98f2dfef", + "sha256:570c21a29493b350f591a4b04c158ce1601e8d18bdcd21db136fbb135d75efa6", + "sha256:5928b85416a388dd557ddc006425b0c37e8468bd1c3dc118c1a3de42f59e2a54", + "sha256:5d2b9b5e70a21474c105a133ba227c61bc95f2ac3b66861143ce39a5ea4b3f84", + "sha256:617a94ada56bbfe547aa8d1b1a2b8299e2ec1ba14aac1d4b26a9f7d6158e1273", + "sha256:6a034480e9ebd4e83d1aa0453fd78986414b5d237aea89a8fdc35d330aa13bae", + "sha256:6fce673f79a0e017a4dc35e18dc7bb90bf6d307c67a11ad5e61ca8d42b87cbff", + "sha256:78d2c3dde4c0b9be4b02067185136b7ee4681978228ad5ec1278fa74f5ca3e99", + "sha256:7f099da6958ddfa2ed84bddea7515cb248583292e16bb9231d151cd528eab657", + "sha256:80559eaf6c15ce3da10edb7977a1548b393db36cbc6cf417633eca05d84dd1ed", + "sha256:834c2172edff5a08d78e2f53cf5e7164aacabeb66b369f76e7bb367ca4e2d993", + "sha256:861cc85dfbf55a7a768443d90a07e0ac5207704a9f97a8eb753292a7fcbdfcfc", + "sha256:8649371570551d2fd7dee22cfbf0b61f1747cdfb2b7587bb551e4beaaa44cb97", + "sha256:87dc37f16fb5e3a28429e094145bf7c1753e32bb50f662722e378c5851f7fdc6", + "sha256:8a6450da4c7afc4534305b2b7d8650131e130610cea448ff240b6ab73d7eab63", + "sha256:8d3843ca645f62c426c3d272902b9de90558e9886f15ddf5efe757b12dd376f5", + "sha256:8dca3c1706670297851bca1acff9618455122246bdae623be31eca744ade05ec", + "sha256:97a3189e019d27e914ecf5c5247ea9f13261d22c3bb0cfcfd2a9b179bb36f8b1", + "sha256:99f4dd81b2bb8fc67c3da68b1f5ee1650aca06faa585cbc6818dbf67893c6d58", + "sha256:9e872b082b32065ac2834149dc0adc2a2e6d8203080501e1e3c3c77851b466f9", + "sha256:a81dbcf6c6c877986083d00b834ac1e84b375220207a059ad45d12f6e518a4e3", + "sha256:abacd0a738e71b20e224861bc87e819ef46fedba2fb01bc1af83dfd122e9c319", + "sha256:ae82c988954722fa07ec5045c57b6d55bc1a0890defb57cf4a712ced65b26ddd", + "sha256:b0c0d46de5dd97f6c2d1b560bf0fcf0215658097b604f1840365296302a9d1fb", + "sha256:b1991a6d64231a3e5bbe3099fb0dd7c9aeaa4275ad0e0aeff4cb9ef885c62ba2", + "sha256:b2167d116309f564af56f9aa5e75ef710ef871c5f9b313a83050035097b56820", + "sha256:bd5a12239c0006252244f94863f1c518ac256160cd316ea5c47fb1a11b25889a", + "sha256:bdd3f2f285ddcf2e75174248b2406189261a79e7fedee2ceeadc76219b6faa0e", + "sha256:c77f2a9093ccf329dd523a9b2b3c854c20d2a3d968b6def3b820272ca6732242", + "sha256:cb5f152fb14857cbe7f3e8c9a5d98979c4c66319a33cad6e617f0067c9accdc4", + "sha256:cca7c0b7f5881dfe0291ef09ba7bb1582cb92ab0aeffd8afb00c700bf692415a", + "sha256:d2ef6cae70168815ed91388948b5f4fcc69681480a0061114db737f957719f03", + "sha256:d9256d4c60c4bbfec92721b51579c50f9e5062c21c12bec56b55292464873508", + "sha256:e191a63a05851f8bce77bc875e75457f9b01d42843f8bd7feed2fc26bbe60833", + "sha256:e2b50ebc2b6121edf352336d503357321b9d8738bb7a72d06fc56153fd3f4cd8", + "sha256:e3ea04b23b114572b98a88c85379e9e9ae031272ba1fb9b532aa934c621626d4", + "sha256:e4d70c853f0546855f027890b77854508bdb4d6a81242a9d804482e667fff6e6", + "sha256:f29351393eb05e6326f044a7b45ed8e38cb4dcc38570d12791f271399dc41431", + "sha256:f3d07edb912a978915576a776756069dede66d012baa503022d3a0adba1b6afa", + "sha256:fac6343bae03b176e9b58104a9810df3cdccd5cfed19f99adfa807ffbf43cf9b" ], "markers": "python_version >= '3.7'", - "version": "==7.2.0" + "version": "==7.2.1" }, "cryptography": { "hashes": [ @@ -1625,7 +1636,7 @@ "sha256:1ccd482024a30b95c4fffb3fe567a9df97b705f34c1075f8abde8537867600c3", "sha256:8981ca462fba91469c268d684a03f72c89c7a807674d884f83a28d8c2822a9b6" ], - "markers": "python_version >= '3.6' and python_version < '4'", + "markers": "python_version >= '3.6' and python_version < '4.0'", "version": "==3.1.5" }, "execnet": { @@ -1754,7 +1765,7 @@ "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1" ], - "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==2.2.0" }, "markupsafe": { @@ -1946,7 +1957,7 @@ "sha256:4659bc2a667783e7a15d190f6fccf8b2486685b6dba4c19c3876314769c57526", "sha256:b4fa3a7a0be38243123cf9d1f3518da10c51bdb165a2b2985566247f9155a7d3" ], - "markers": "python_version >= '3.6'", + "markers": "python_full_version >= '3.6.0'", "version": "==32.0.1" }, "pluggy": { @@ -2106,7 +2117,7 @@ "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" ], - "markers": "python_version >= '3.7' and python_version < '4.0'", + "markers": "python_version >= '3.7' and python_version < '4'", "version": "==2.28.2" }, "requests-mock": { @@ -2137,7 +2148,7 @@ "sha256:125d96d20c92b946b983d0d392b84ff945461e5a06d3867e9f9e575f8697b67f", "sha256:8aa57747f3fc3e977684f0176a88e789be314a99f99b43b75d1e9cb5dc6db9e9" ], - "markers": "python_version >= '3.7'", + "markers": "python_full_version >= '3.7.0'", "version": "==13.3.1" }, "s3transfer": { diff --git a/app/__init__.py b/app/__init__.py index 38df2f5f7..20d93c37c 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -139,13 +139,6 @@ def register_blueprint(application): from app.inbound_number.rest import inbound_number_blueprint from app.inbound_sms.rest import inbound_sms as inbound_sms_blueprint from app.job.rest import job_blueprint - from app.letter_branding.letter_branding_rest import ( - letter_branding_blueprint, - ) - from app.letters.rest import letter_job - from app.notifications.notifications_letter_callback import ( - letter_callback_blueprint, - ) from app.notifications.notifications_ses_callback import ( ses_callback_blueprint, ) @@ -231,12 +224,6 @@ def register_blueprint(application): email_branding_blueprint.before_request(requires_admin_auth) application.register_blueprint(email_branding_blueprint, url_prefix='/email-branding') - letter_job.before_request(requires_admin_auth) - application.register_blueprint(letter_job) - - letter_callback_blueprint.before_request(requires_no_auth) - application.register_blueprint(letter_callback_blueprint) - billing_blueprint.before_request(requires_admin_auth) application.register_blueprint(billing_blueprint) @@ -258,9 +245,6 @@ def register_blueprint(application): template_folder_blueprint.before_request(requires_admin_auth) application.register_blueprint(template_folder_blueprint) - letter_branding_blueprint.before_request(requires_admin_auth) - application.register_blueprint(letter_branding_blueprint) - upload_blueprint.before_request(requires_admin_auth) application.register_blueprint(upload_blueprint) diff --git a/app/billing/billing_schemas.py b/app/billing/billing_schemas.py index acc06b5ab..4dd4a273e 100644 --- a/app/billing/billing_schemas.py +++ b/app/billing/billing_schemas.py @@ -20,7 +20,6 @@ def serialize_ft_billing_remove_emails(rows): "chargeable_units": row.chargeable_units, "notifications_sent": row.notifications_sent, "rate": float(row.rate), - "postage": row.postage, "cost": float(row.cost), "free_allowance_used": row.free_allowance_used, "charged_units": row.charged_units, diff --git a/app/celery/letters_pdf_tasks.py b/app/celery/letters_pdf_tasks.py deleted file mode 100644 index 754501567..000000000 --- a/app/celery/letters_pdf_tasks.py +++ /dev/null @@ -1,570 +0,0 @@ -from base64 import urlsafe_b64encode -from datetime import datetime, timedelta -from hashlib import sha512 - -from botocore.exceptions import ClientError as BotoClientError -from flask import current_app -from notifications_utils.letter_timings import LETTER_PROCESSING_DEADLINE -from notifications_utils.postal_address import PostalAddress -from notifications_utils.timezones import convert_utc_to_local_timezone - -from app import encryption, notify_celery -from app.aws import s3 -from app.config import QueueNames, TaskNames -from app.cronitor import cronitor -from app.dao.notifications_dao import ( - dao_get_letters_and_sheets_volume_by_postage, - dao_get_letters_to_be_printed, - dao_get_notification_by_reference, - dao_update_notification, - dao_update_notifications_by_reference, - get_notification_by_id, - update_notification_status_by_id, -) -from app.dao.templates_dao import dao_get_template_by_id -from app.errors import VirusScanError -from app.exceptions import NotificationTechnicalFailureException -from app.letters.utils import ( - LetterPDFNotFound, - ScanErrorType, - find_letter_pdf_in_s3, - generate_letter_pdf_filename, - get_billable_units_for_letter_page_count, - get_file_names_from_error_bucket, - get_folder_name, - get_reference_from_filename, - move_error_pdf_to_scan_bucket, - move_failed_pdf, - move_sanitised_letter_to_test_or_live_pdf_bucket, - move_scan_to_invalid_pdf_bucket, -) -from app.models import ( - INTERNATIONAL_LETTERS, - INTERNATIONAL_POSTAGE_TYPES, - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_PENDING_VIRUS_CHECK, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VALIDATION_FAILED, - NOTIFICATION_VIRUS_SCAN_FAILED, - POSTAGE_TYPES, - RESOLVE_POSTAGE_FOR_FILE_NAME, - Service, -) - - -@notify_celery.task(bind=True, name="get-pdf-for-templated-letter", max_retries=15, default_retry_delay=300) -def get_pdf_for_templated_letter(self, notification_id): - try: - notification = get_notification_by_id(notification_id, _raise=True) - letter_filename = generate_letter_pdf_filename( - reference=notification.reference, - created_at=notification.created_at, - ignore_folder=notification.key_type == KEY_TYPE_TEST, - postage=notification.postage - ) - letter_data = { - 'letter_contact_block': notification.reply_to_text, - 'template': { - "subject": notification.template.subject, - "content": notification.template.content, - "template_type": notification.template.template_type - }, - 'values': notification.personalisation, - 'logo_filename': notification.service.letter_branding and notification.service.letter_branding.filename, - 'letter_filename': letter_filename, - "notification_id": str(notification_id), - 'key_type': notification.key_type - } - - encrypted_data = encryption.sign(letter_data) - - notify_celery.send_task( - name=TaskNames.CREATE_PDF_FOR_TEMPLATED_LETTER, - args=(encrypted_data,), - queue=QueueNames.SANITISE_LETTERS - ) - except Exception as e: - try: - current_app.logger.exception( - f"RETRY: calling create-letter-pdf task for notification {notification_id} failed" - ) - self.retry(exc=e, queue=QueueNames.RETRY) - except self.MaxRetriesExceededError: - message = f"RETRY FAILED: Max retries reached. " \ - f"The task create-letter-pdf failed for notification id {notification_id}. " \ - f"Notification has been updated to technical-failure" - update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE) - raise NotificationTechnicalFailureException(message) - - -@notify_celery.task(bind=True, name="update-billable-units-for-letter", max_retries=15, default_retry_delay=300) -def update_billable_units_for_letter(self, notification_id, page_count): - notification = get_notification_by_id(notification_id, _raise=True) - - billable_units = get_billable_units_for_letter_page_count(page_count) - - if notification.key_type != KEY_TYPE_TEST: - notification.billable_units = billable_units - dao_update_notification(notification) - - current_app.logger.info( - f"Letter notification id: {notification_id} reference {notification.reference}: " - f"billable units set to {billable_units}" - ) - - -@notify_celery.task( - bind=True, name="update-validation-failed-for-templated-letter", max_retries=15, default_retry_delay=300 -) -def update_validation_failed_for_templated_letter(self, notification_id, page_count): - notification = get_notification_by_id(notification_id, _raise=True) - notification.status = NOTIFICATION_VALIDATION_FAILED - dao_update_notification(notification) - current_app.logger.info(f"Validation failed: letter is too long {page_count} for letter with id: {notification_id}") - - -@notify_celery.task(name='collate-letter-pdfs-to-be-sent') -@cronitor("collate-letter-pdfs-to-be-sent") -def collate_letter_pdfs_to_be_sent(): - """ - Finds all letters which are still waiting to be sent to DVLA for printing - - This would usually be run at 5.50pm and collect up letters created between before 5:30pm today - that have not yet been sent. - If run after midnight, it will collect up letters created before 5:30pm the day before. - """ - print_run_date = convert_utc_to_local_timezone(datetime.utcnow()) - if print_run_date.time() < LETTER_PROCESSING_DEADLINE: - print_run_date = print_run_date - timedelta(days=1) - - print_run_deadline = print_run_date.replace( - hour=17, minute=30, second=0, microsecond=0 - ) - _get_letters_and_sheets_volumes_and_send_to_dvla(print_run_deadline) - - for postage in POSTAGE_TYPES: - current_app.logger.info(f"starting collate-letter-pdfs-to-be-sent processing for postage class {postage}") - letters_to_print = get_key_and_size_of_letters_to_be_sent_to_print(print_run_deadline, postage) - - for i, letters in enumerate(group_letters(letters_to_print)): - filenames = [letter['Key'] for letter in letters] - - service_id = letters[0]['ServiceId'] - organisation_id = letters[0]['OrganisationId'] - - hash = urlsafe_b64encode(sha512(''.join(filenames).encode()).digest())[:20].decode() - # eg NOTIFY.2018-12-31.001.Wjrui5nAvObjPd-3GEL-.ZIP - dvla_filename = 'NOTIFY.{date}.{postage}.{num:03}.{hash}.{service_id}.{organisation_id}.ZIP'.format( - date=print_run_deadline.strftime("%Y-%m-%d"), - postage=RESOLVE_POSTAGE_FOR_FILE_NAME[postage], - num=i + 1, - hash=hash, - service_id=service_id, - organisation_id=organisation_id - ) - - current_app.logger.info( - 'Calling task zip-and-send-letter-pdfs for {} pdfs to upload {} with total size {:,} bytes'.format( - len(filenames), - dvla_filename, - sum(letter['Size'] for letter in letters) - ) - ) - notify_celery.send_task( - name=TaskNames.ZIP_AND_SEND_LETTER_PDFS, - kwargs={ - 'filenames_to_zip': filenames, - 'upload_filename': dvla_filename - }, - queue=QueueNames.PROCESS_FTP, - compression='zlib' - ) - current_app.logger.info(f"finished collate-letter-pdfs-to-be-sent processing for postage class {postage}") - - current_app.logger.info("finished collate-letter-pdfs-to-be-sent") - - -def _get_letters_and_sheets_volumes_and_send_to_dvla(print_run_deadline): - letters_volumes = dao_get_letters_and_sheets_volume_by_postage(print_run_deadline) - send_letters_volume_email_to_dvla(letters_volumes, print_run_deadline.date()) - - -def send_letters_volume_email_to_dvla(letters_volumes, date): - personalisation = { - 'total_volume': 0, - 'first_class_volume': 0, - 'second_class_volume': 0, - 'international_volume': 0, - 'total_sheets': 0, - 'first_class_sheets': 0, - "second_class_sheets": 0, - 'international_sheets': 0, - 'date': date.strftime("%d %B %Y") - } - for item in letters_volumes: - personalisation['total_volume'] += item.letters_count - personalisation['total_sheets'] += item.sheets_count - if f"{item.postage}_class_volume" in personalisation: - personalisation[f"{item.postage}_class_volume"] = item.letters_count - personalisation[f"{item.postage}_class_sheets"] = item.sheets_count - else: - personalisation["international_volume"] += item.letters_count - personalisation["international_sheets"] += item.sheets_count - - template = dao_get_template_by_id(current_app.config['LETTERS_VOLUME_EMAIL_TEMPLATE_ID']) - recipients = current_app.config['DVLA_EMAIL_ADDRESSES'] - reply_to = template.service.get_default_reply_to_email_address() - service = Service.query.get(current_app.config['NOTIFY_SERVICE_ID']) - - # avoid circular imports: - from app.notifications.process_notifications import ( - persist_notification, - send_notification_to_queue, - ) - for recipient in recipients: - saved_notification = persist_notification( - template_id=template.id, - template_version=template.version, - recipient=recipient, - service=service, - personalisation=personalisation, - notification_type=template.template_type, - api_key_id=None, - key_type=KEY_TYPE_NORMAL, - reply_to_text=reply_to - ) - - send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY) - - -def get_key_and_size_of_letters_to_be_sent_to_print(print_run_deadline, postage): - letters_awaiting_sending = dao_get_letters_to_be_printed(print_run_deadline, postage) - for letter in letters_awaiting_sending: - try: - letter_pdf = find_letter_pdf_in_s3(letter) - yield { - "Key": letter_pdf.key, - "Size": letter_pdf.size, - "ServiceId": str(letter.service_id), - "OrganisationId": str(letter.service.organisation_id) - } - except (BotoClientError, LetterPDFNotFound) as e: - current_app.logger.exception( - f"Error getting letter from bucket for notification: {letter.id} with reference: {letter.reference}", e) - - -def group_letters(letter_pdfs): - """ - Group letters in chunks of MAX_LETTER_PDF_ZIP_FILESIZE. Will add files to lists, never going over that size. - If a single file is (somehow) larger than MAX_LETTER_PDF_ZIP_FILESIZE that'll be in a list on it's own. - If there are no files, will just exit (rather than yielding an empty list). - """ - running_filesize = 0 - list_of_files = [] - service_id = None - for letter in letter_pdfs: - if letter['Key'].lower().endswith('.pdf'): - if not service_id: - service_id = letter['ServiceId'] - if ( - running_filesize + letter['Size'] > current_app.config['MAX_LETTER_PDF_ZIP_FILESIZE'] - or len(list_of_files) >= current_app.config['MAX_LETTER_PDF_COUNT_PER_ZIP'] - or letter['ServiceId'] != service_id - ): - yield list_of_files - running_filesize = 0 - list_of_files = [] - service_id = None - - if not service_id: - service_id = letter['ServiceId'] - running_filesize += letter['Size'] - list_of_files.append(letter) - - if list_of_files: - yield list_of_files - - -@notify_celery.task(bind=True, name='sanitise-letter', max_retries=15, default_retry_delay=300) -def sanitise_letter(self, filename): - try: - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - - current_app.logger.info('Notification ID {} Virus scan passed: {}'.format(notification.id, filename)) - - if notification.status != NOTIFICATION_PENDING_VIRUS_CHECK: - current_app.logger.info('Sanitise letter called for notification {} which is in {} state'.format( - notification.id, notification.status)) - return - - notify_celery.send_task( - name=TaskNames.SANITISE_LETTER, - kwargs={ - 'notification_id': str(notification.id), - 'filename': filename, - 'allow_international_letters': notification.service.has_permission( - INTERNATIONAL_LETTERS - ), - }, - queue=QueueNames.SANITISE_LETTERS, - ) - except Exception: - try: - current_app.logger.exception( - "RETRY: calling sanitise_letter task for notification {} failed".format(notification.id) - ) - self.retry(queue=QueueNames.RETRY) - except self.MaxRetriesExceededError: - message = "RETRY FAILED: Max retries reached. " \ - "The task sanitise_letter failed for notification {}. " \ - "Notification has been updated to technical-failure".format(notification.id) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - raise NotificationTechnicalFailureException(message) - - -@notify_celery.task(bind=True, name='process-sanitised-letter', max_retries=15, default_retry_delay=300) -def process_sanitised_letter(self, sanitise_data): - letter_details = encryption.verify_signature(sanitise_data) - - filename = letter_details['filename'] - notification_id = letter_details['notification_id'] - - current_app.logger.info('Processing sanitised letter with id {}'.format(notification_id)) - notification = get_notification_by_id(notification_id, _raise=True) - - if notification.status != NOTIFICATION_PENDING_VIRUS_CHECK: - current_app.logger.info( - 'process-sanitised-letter task called for notification {} which is in {} state'.format( - notification.id, notification.status) - ) - return - - try: - original_pdf_object = s3.get_s3_object(current_app.config['LETTERS_SCAN_BUCKET_NAME'], filename) - - if letter_details['validation_status'] == 'failed': - current_app.logger.info('Processing invalid precompiled pdf with id {} (file {})'.format( - notification_id, filename)) - - _move_invalid_letter_and_update_status( - notification=notification, - filename=filename, - scan_pdf_object=original_pdf_object, - message=letter_details['message'], - invalid_pages=letter_details['invalid_pages'], - page_count=letter_details['page_count'], - ) - return - - current_app.logger.info('Processing valid precompiled pdf with id {} (file {})'.format( - notification_id, filename)) - - billable_units = get_billable_units_for_letter_page_count(letter_details['page_count']) - is_test_key = notification.key_type == KEY_TYPE_TEST - - # Updating the notification needs to happen before the file is moved. This is so that if updating the - # notification fails, the task can retry because the file is in the same place. - update_letter_pdf_status( - reference=notification.reference, - status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED, - billable_units=billable_units, - recipient_address=letter_details['address'] - ) - - # The original filename could be wrong because we didn't know the postage. - # Now we know if the letter is international, we can check what the filename should be. - upload_file_name = generate_letter_pdf_filename( - reference=notification.reference, - created_at=notification.created_at, - ignore_folder=True, - postage=notification.postage - ) - - move_sanitised_letter_to_test_or_live_pdf_bucket( - filename, - is_test_key, - notification.created_at, - upload_file_name, - ) - # We've moved the sanitised PDF from the sanitise bucket, but still need to delete the original file: - original_pdf_object.delete() - - except BotoClientError: - # Boto exceptions are likely to be caused by the file(s) being in the wrong place, so retrying won't help - - # we'll need to manually investigate - current_app.logger.exception( - f"Boto error when processing sanitised letter for notification {notification.id} (file {filename})" - ) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - raise NotificationTechnicalFailureException - except Exception: - try: - current_app.logger.exception( - "RETRY: calling process_sanitised_letter task for notification {} failed".format(notification.id) - ) - self.retry(queue=QueueNames.RETRY) - except self.MaxRetriesExceededError: - message = "RETRY FAILED: Max retries reached. " \ - "The task process_sanitised_letter failed for notification {}. " \ - "Notification has been updated to technical-failure".format(notification.id) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - raise NotificationTechnicalFailureException(message) - - -def _move_invalid_letter_and_update_status( - *, notification, filename, scan_pdf_object, message=None, invalid_pages=None, page_count=None -): - try: - move_scan_to_invalid_pdf_bucket( - source_filename=filename, - message=message, - invalid_pages=invalid_pages, - page_count=page_count - ) - scan_pdf_object.delete() - - update_letter_pdf_status( - reference=notification.reference, - status=NOTIFICATION_VALIDATION_FAILED, - billable_units=0) - except BotoClientError: - current_app.logger.exception( - "Error when moving letter with id {} to invalid PDF bucket".format(notification.id) - ) - update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE) - raise NotificationTechnicalFailureException - - -@notify_celery.task(name='process-virus-scan-failed') -def process_virus_scan_failed(filename): - move_failed_pdf(filename, ScanErrorType.FAILURE) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_VIRUS_SCAN_FAILED, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format( - updated_count - ) - ) - - error = VirusScanError('notification id {} Virus scan failed: {}'.format(notification.id, filename)) - current_app.logger.exception(error) - raise error - - -@notify_celery.task(name='process-virus-scan-error') -def process_virus_scan_error(filename): - move_failed_pdf(filename, ScanErrorType.ERROR) - reference = get_reference_from_filename(filename) - notification = dao_get_notification_by_reference(reference) - updated_count = update_letter_pdf_status(reference, NOTIFICATION_TECHNICAL_FAILURE, billable_units=0) - - if updated_count != 1: - raise Exception( - "There should only be one letter notification for each reference. Found {} notifications".format( - updated_count - ) - ) - error = VirusScanError('notification id {} Virus scan error: {}'.format(notification.id, filename)) - current_app.logger.exception(error) - raise error - - -def update_letter_pdf_status(reference, status, billable_units, recipient_address=None): - postage = None - if recipient_address: - # fix allow_international_letters - postage = PostalAddress(raw_address=recipient_address.replace(',', '\n'), - allow_international_letters=True - ).postage - postage = postage if postage in INTERNATIONAL_POSTAGE_TYPES else None - update_dict = {'status': status, 'billable_units': billable_units, 'updated_at': datetime.utcnow()} - if postage: - update_dict.update({'postage': postage, 'international': True}) - if recipient_address: - update_dict['to'] = recipient_address - update_dict['normalised_to'] = ''.join(recipient_address.split()).lower() - return dao_update_notifications_by_reference( - references=[reference], - update_dict=update_dict)[0] - - -def replay_letters_in_error(filename=None): - # This method can be used to replay letters that end up in the ERROR directory. - # We had an incident where clamAV was not processing the virus scan. - if filename: - move_error_pdf_to_scan_bucket(filename) - # call task to add the filename to anti virus queue - current_app.logger.info("Calling scan_file for: {}".format(filename)) - - if current_app.config['ANTIVIRUS_ENABLED']: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={'filename': filename}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - sanitise_letter.apply_async( - [filename], - queue=QueueNames.LETTERS - ) - else: - error_files = get_file_names_from_error_bucket() - for item in error_files: - moved_file_name = item.key.split('/')[1] - current_app.logger.info("Calling scan_file for: {}".format(moved_file_name)) - move_error_pdf_to_scan_bucket(moved_file_name) - # call task to add the filename to anti virus queue - if current_app.config['ANTIVIRUS_ENABLED']: - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={'filename': moved_file_name}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - sanitise_letter.apply_async( - [filename], - queue=QueueNames.LETTERS - ) - - -@notify_celery.task(name='resanitise-pdf') -def resanitise_pdf(notification_id): - """ - `notification_id` is the notification id for a PDF letter which was either uploaded or sent using the API. - - This task calls the `recreate_pdf_for_precompiled_letter` template preview task which recreates the - PDF for a letter which is already sanitised and in the letters-pdf bucket. The new file that is generated - will then overwrite the existing letter in the letters-pdf bucket. - """ - notification = get_notification_by_id(notification_id) - - # folder_name is the folder that the letter is in the letters-pdf bucket e.g. '2021-10-10/' - folder_name = get_folder_name(notification.created_at) - - filename = generate_letter_pdf_filename( - reference=notification.reference, - created_at=notification.created_at, - ignore_folder=True, - postage=notification.postage - ) - - notify_celery.send_task( - name=TaskNames.RECREATE_PDF_FOR_PRECOMPILED_LETTER, - kwargs={ - 'notification_id': str(notification.id), - 'file_location': f'{folder_name}{filename}', - 'allow_international_letters': notification.service.has_permission( - INTERNATIONAL_LETTERS - ), - }, - queue=QueueNames.SANITISE_LETTERS, - ) diff --git a/app/celery/nightly_tasks.py b/app/celery/nightly_tasks.py index fda994397..b812b2915 100644 --- a/app/celery/nightly_tasks.py +++ b/app/celery/nightly_tasks.py @@ -1,15 +1,10 @@ from datetime import datetime, timedelta -import pytz from flask import current_app -from notifications_utils.clients.zendesk.zendesk_client import ( - NotifySupportTicket, -) from notifications_utils.timezones import convert_utc_to_local_timezone -from sqlalchemy import func from sqlalchemy.exc import SQLAlchemyError -from app import notify_celery, statsd_client, zendesk_client +from app import notify_celery, statsd_client from app.aws import s3 from app.celery.process_ses_receipts_tasks import check_and_queue_callback_task from app.config import QueueNames @@ -29,15 +24,7 @@ from app.dao.notifications_dao import ( from app.dao.service_data_retention_dao import ( fetch_service_data_retention_for_all_services_by_notification_type, ) -from app.models import ( - EMAIL_TYPE, - KEY_TYPE_NORMAL, - LETTER_TYPE, - NOTIFICATION_SENDING, - SMS_TYPE, - FactProcessingTime, - Notification, -) +from app.models import EMAIL_TYPE, SMS_TYPE, FactProcessingTime from app.utils import get_local_midnight_in_utc @@ -47,12 +34,6 @@ def remove_sms_email_csv_files(): _remove_csv_files([EMAIL_TYPE, SMS_TYPE]) -@notify_celery.task(name="remove_letter_jobs") -@cronitor("remove_letter_jobs") -def remove_letter_csv_files(): - _remove_csv_files([LETTER_TYPE]) - - def _remove_csv_files(job_types): jobs = dao_get_jobs_older_than_data_retention(notification_types=job_types) for job in jobs: @@ -65,7 +46,6 @@ def _remove_csv_files(job_types): def delete_notifications_older_than_retention(): delete_email_notifications_older_than_retention.apply_async(queue=QueueNames.REPORTING) delete_sms_notifications_older_than_retention.apply_async(queue=QueueNames.REPORTING) - delete_letter_notifications_older_than_retention.apply_async(queue=QueueNames.REPORTING) @notify_celery.task(name="delete-sms-notifications") @@ -80,12 +60,6 @@ def delete_email_notifications_older_than_retention(): _delete_notifications_older_than_retention_by_type('email') -@notify_celery.task(name="delete-letter-notifications") -@cronitor("delete-letter-notifications") -def delete_letter_notifications_older_than_retention(): - _delete_notifications_older_than_retention_by_type('letter') - - def _delete_notifications_older_than_retention_by_type(notification_type): flexible_data_retention = fetch_service_data_retention_for_all_services_by_notification_type(notification_type) @@ -185,110 +159,6 @@ def delete_inbound_sms(): raise -@notify_celery.task(name="raise-alert-if-letter-notifications-still-sending") -@cronitor("raise-alert-if-letter-notifications-still-sending") -def raise_alert_if_letter_notifications_still_sending(): - still_sending_count, sent_date = get_letter_notifications_still_sending_when_they_shouldnt_be() - - if still_sending_count: - message = "There are {} letters in the 'sending' state from {}".format( - still_sending_count, - sent_date.strftime('%A %d %B') - ) - # Only send alerts in production - if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: - message += ". Resolve using https://github.com/alphagov/notifications-manuals/wiki/Support-Runbook#deal-with-letters-still-in-sending" # noqa - - ticket = NotifySupportTicket( - subject=f"[{current_app.config['NOTIFY_ENVIRONMENT']}] Letters still sending", - email_ccs=current_app.config['DVLA_EMAIL_ADDRESSES'], - message=message, - ticket_type=NotifySupportTicket.TYPE_INCIDENT, - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - zendesk_client.send_ticket_to_zendesk(ticket) - else: - current_app.logger.info(message) - - -def get_letter_notifications_still_sending_when_they_shouldnt_be(): - today = datetime.utcnow().date() - - # Do nothing on the weekend - if today.isoweekday() in {6, 7}: # sat, sun - return 0, None - - if today.isoweekday() in {1, 2}: # mon, tues. look for files from before the weekend - offset_days = 4 - else: - offset_days = 2 - - expected_sent_date = today - timedelta(days=offset_days) - - q = Notification.query.filter( - Notification.notification_type == LETTER_TYPE, - Notification.status == NOTIFICATION_SENDING, - Notification.key_type == KEY_TYPE_NORMAL, - func.date(Notification.sent_at) <= expected_sent_date - ) - - return q.count(), expected_sent_date - - -@notify_celery.task(name='raise-alert-if-no-letter-ack-file') -@cronitor('raise-alert-if-no-letter-ack-file') -def letter_raise_alert_if_no_ack_file_for_zip(): - # get a list of zip files since yesterday - zip_file_set = set() - today_str = datetime.utcnow().strftime('%Y-%m-%d') - yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # AWS datetime format - - for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], - subfolder=today_str + '/zips_sent', - suffix='.TXT'): - subname = key.split('/')[-1] # strip subfolder in name - zip_file_set.add(subname.upper().replace('.ZIP.TXT', '')) - - # get acknowledgement file - ack_file_set = set() - - for key in s3.get_list_of_files_by_suffix(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], - subfolder='root/dispatch', suffix='.ACK.txt', last_modified=yesterday): - ack_file_set.add(key.lstrip('root/dispatch').upper().replace('.ACK.TXT', '')) # noqa - - message = '\n'.join([ - "Letter ack file does not contain all zip files sent." - "", - f"See runbook at https://github.com/alphagov/notifications-manuals/wiki/Support-Runbook#letter-ack-file-does-not-contain-all-zip-files-sent\n", # noqa - f"pdf bucket: {current_app.config['LETTERS_PDF_BUCKET_NAME']}, subfolder: {datetime.utcnow().strftime('%Y-%m-%d')}/zips_sent", # noqa - f"ack bucket: {current_app.config['DVLA_RESPONSE_BUCKET_NAME']}", - "", - f"Missing ack for zip files: {str(sorted(zip_file_set - ack_file_set))}", - ]) - - # strip empty element before comparison - ack_file_set.discard('') - zip_file_set.discard('') - - if len(zip_file_set - ack_file_set) > 0: - if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: - ticket = NotifySupportTicket( - subject="Letter acknowledge error", - message=message, - ticket_type=NotifySupportTicket.TYPE_INCIDENT, - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - zendesk_client.send_ticket_to_zendesk(ticket) - current_app.logger.error(message) - - if len(ack_file_set - zip_file_set) > 0: - current_app.logger.info( - "letter ack contains zip that is not for today: {}".format(ack_file_set - zip_file_set) - ) - - @notify_celery.task(name='save-daily-notification-processing-time') @cronitor("save-daily-notification-processing-time") def save_daily_notification_processing_time(local_date=None): diff --git a/app/celery/reporting_tasks.py b/app/celery/reporting_tasks.py index b754c4f6c..123eb8ced 100644 --- a/app/celery/reporting_tasks.py +++ b/app/celery/reporting_tasks.py @@ -12,7 +12,7 @@ from app.dao.fact_billing_dao import ( ) from app.dao.fact_notification_status_dao import update_fact_notification_status from app.dao.notifications_dao import get_service_ids_with_notifications_on_date -from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE +from app.models import EMAIL_TYPE, SMS_TYPE @notify_celery.task(name="create-nightly-billing") @@ -72,10 +72,6 @@ def create_nightly_notification_status(): because all outstanding email / SMS are "timed out" after 3 days, and we reject delivery receipts after this point. - - Letter statuses don't change after 9 days. There's no "timeout" for - letters but this is the longest we've had to cope with in the past - due - to major issues with our print provider. - Because the time range of the task exceeds the minimum possible retention period (3 days), we need to choose which table to query for each service. @@ -89,8 +85,8 @@ def create_nightly_notification_status(): yesterday = convert_utc_to_local_timezone(datetime.utcnow()).date() - timedelta(days=1) - for notification_type in [SMS_TYPE, EMAIL_TYPE, LETTER_TYPE]: - days = 10 if notification_type == LETTER_TYPE else 4 + for notification_type in [SMS_TYPE, EMAIL_TYPE]: + days = 4 for i in range(days): process_day = yesterday - timedelta(days=i) diff --git a/app/celery/research_mode_tasks.py b/app/celery/research_mode_tasks.py index b19de9c2a..62344a41a 100644 --- a/app/celery/research_mode_tasks.py +++ b/app/celery/research_mode_tasks.py @@ -1,13 +1,8 @@ import json -import random -from datetime import datetime, timedelta from flask import current_app -from notifications_utils.s3 import s3upload from requests import HTTPError, request -from app import notify_celery -from app.aws.s3 import file_exists from app.celery.process_ses_receipts_tasks import process_ses_results from app.config import QueueNames from app.models import SMS_TYPE @@ -85,46 +80,6 @@ def sns_callback(notification_id, to): # "deliverytime": "2016-04-05 16:01:07"}) -@notify_celery.task(bind=True, name="create-fake-letter-response-file", max_retries=5, default_retry_delay=300) -def create_fake_letter_response_file(self, reference): - now = datetime.utcnow() - dvla_response_data = '{}|Sent|0|Sorted'.format(reference) - - # try and find a filename that hasn't been taken yet - from a random time within the last 30 seconds - for i in sorted(range(30), key=lambda _: random.random()): # nosec B311 - not security related - upload_file_name = 'NOTIFY-{}-RSP.TXT'.format((now - timedelta(seconds=i)).strftime('%Y%m%d%H%M%S')) - if not file_exists(current_app.config['DVLA_RESPONSE_BUCKET_NAME'], upload_file_name): - break - else: - raise ValueError( - 'cant create fake letter response file for {} - too many files for that time already exist on s3'.format( - reference - ) - ) - - s3upload( - filedata=dvla_response_data, - region=current_app.config['AWS_REGION'], - bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], - file_location=upload_file_name - ) - current_app.logger.info("Fake DVLA response file {}, content [{}], uploaded to {}, created at {}".format( - upload_file_name, dvla_response_data, current_app.config['DVLA_RESPONSE_BUCKET_NAME'], now)) - - # on development we can't trigger SNS callbacks so we need to manually hit the DVLA callback endpoint - if current_app.config['NOTIFY_ENVIRONMENT'] == 'development': - make_request('letter', 'dvla', _fake_sns_s3_callback(upload_file_name), None) - - -def _fake_sns_s3_callback(filename): - message_contents = '{"Records":[{"s3":{"object":{"key":"%s"}}}]}' % (filename) # noqa - return json.dumps({ - "Type": "Notification", - "MessageId": "some-message-id", - "Message": message_contents - }) - - def ses_notification_callback(reference): ses_message_body = { 'delivery': { diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 28298aa75..d3e76e584 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -8,15 +8,13 @@ from sqlalchemy import between from sqlalchemy.exc import SQLAlchemyError from app import notify_celery, zendesk_client -from app.aws import s3 -from app.celery.letters_pdf_tasks import get_pdf_for_templated_letter from app.celery.tasks import ( get_recipient_csv_and_template_and_sender_id, process_incomplete_jobs, process_job, process_row, ) -from app.config import QueueNames, TaskNames +from app.config import QueueNames from app.dao.invited_org_user_dao import ( delete_org_invitations_created_more_than_two_days_ago, ) @@ -29,12 +27,7 @@ from app.dao.jobs_dao import ( find_jobs_with_missing_rows, find_missing_row_for_job, ) -from app.dao.notifications_dao import ( - dao_old_letters_with_created_status, - dao_precompiled_letters_still_pending_virus_check, - letters_missing_from_sending_bucket, - notifications_not_yet_sent, -) +from app.dao.notifications_dao import notifications_not_yet_sent from app.dao.provider_details_dao import ( dao_adjust_provider_priority_back_to_resting_points, ) @@ -43,7 +36,6 @@ from app.dao.services_dao import ( dao_find_services_with_high_failure_rates, ) from app.dao.users_dao import delete_codes_older_created_more_than_a_day_ago -from app.letters.utils import generate_letter_pdf_filename from app.models import ( EMAIL_TYPE, JOB_STATUS_ERROR, @@ -165,88 +157,6 @@ def replay_created_notifications(): for n in notifications_to_resend: send_notification_to_queue(notification=n, research_mode=n.service.research_mode) - # if the letter has not be send after an hour, then create a zendesk ticket - letters = letters_missing_from_sending_bucket(resend_created_notifications_older_than) - - if len(letters) > 0: - msg = "{} letters were created over an hour ago, " \ - "but do not have an updated_at timestamp or billable units. " \ - "\n Creating app.celery.letters_pdf_tasks.create_letters tasks to upload letter to S3 " \ - "and update notifications for the following notification ids: " \ - "\n {}".format(len(letters), [x.id for x in letters]) - - current_app.logger.info(msg) - for letter in letters: - get_pdf_for_templated_letter.apply_async([str(letter.id)], queue=QueueNames.CREATE_LETTERS_PDF) - - -@notify_celery.task(name='check-if-letters-still-pending-virus-check') -def check_if_letters_still_pending_virus_check(): - letters = [] - - for letter in dao_precompiled_letters_still_pending_virus_check(): - # find letter in the scan bucket - filename = generate_letter_pdf_filename( - letter.reference, - letter.created_at, - ignore_folder=True, - postage=letter.postage - ) - - if s3.file_exists(current_app.config['LETTERS_SCAN_BUCKET_NAME'], filename): - current_app.logger.warning( - f'Letter id {letter.id} got stuck in pending-virus-check. Sending off for scan again.' - ) - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={'filename': filename}, - queue=QueueNames.ANTIVIRUS, - ) - else: - letters.append(letter) - - if len(letters) > 0: - letter_ids = [(str(letter.id), letter.reference) for letter in letters] - - msg = f"""{len(letters)} precompiled letters have been pending-virus-check for over 90 minutes. - We couldn't find them in the scan bucket. We'll need to find out where the files are and kick them off - again or move them to technical failure. - - Notifications: {sorted(letter_ids)}""" - - if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: - ticket = NotifySupportTicket( - subject=f"[{current_app.config['NOTIFY_ENVIRONMENT']}] Letters still pending virus check", - message=msg, - ticket_type=NotifySupportTicket.TYPE_INCIDENT, - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - zendesk_client.send_ticket_to_zendesk(ticket) - current_app.logger.error(msg) - - -@notify_celery.task(name='check-if-letters-still-in-created') -def check_if_letters_still_in_created(): - letters = dao_old_letters_with_created_status() - - if len(letters) > 0: - msg = "{} letters were created before 17.30 yesterday and still have 'created' status. " \ - "Follow runbook to resolve: " \ - "https://github.com/alphagov/notifications-manuals/wiki/Support-Runbook" \ - "#deal-with-Letters-still-in-created.".format(len(letters)) - - if current_app.config['NOTIFY_ENVIRONMENT'] in ['live', 'production', 'test']: - ticket = NotifySupportTicket( - subject=f"[{current_app.config['NOTIFY_ENVIRONMENT']}] Letters still in 'created' status", - message=msg, - ticket_type=NotifySupportTicket.TYPE_INCIDENT, - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - zendesk_client.send_ticket_to_zendesk(ticket) - current_app.logger.error(msg) - @notify_celery.task(name='check-for-missing-rows-in-completed-jobs') def check_for_missing_rows_in_completed_jobs(): diff --git a/app/celery/tasks.py b/app/celery/tasks.py index dd37229f1..944b1b320 100644 --- a/app/celery/tasks.py +++ b/app/celery/tasks.py @@ -1,63 +1,39 @@ import json -from collections import defaultdict, namedtuple from datetime import datetime from flask import current_app -from notifications_utils.insensitive_dict import InsensitiveDict -from notifications_utils.postal_address import PostalAddress from notifications_utils.recipients import RecipientCSV -from notifications_utils.timezones import convert_utc_to_local_timezone from requests import HTTPError, RequestException, request from sqlalchemy.exc import IntegrityError, SQLAlchemyError -from app import create_random_identifier, create_uuid, encryption, notify_celery +from app import create_uuid, encryption, notify_celery from app.aws import s3 -from app.celery import letters_pdf_tasks, provider_tasks, research_mode_tasks +from app.celery import provider_tasks from app.config import QueueNames -from app.dao.daily_sorted_letter_dao import ( - dao_create_or_update_daily_sorted_letter, -) from app.dao.inbound_sms_dao import dao_get_inbound_sms_by_id from app.dao.jobs_dao import dao_get_job_by_id, dao_update_job from app.dao.notifications_dao import ( dao_get_last_notification_added_for_job_id, - dao_get_notification_history_by_reference, - dao_update_notifications_by_reference, get_notification_by_id, - update_notification_status_by_reference, ) -from app.dao.provider_details_dao import ( - get_provider_details_by_notification_type, -) -from app.dao.returned_letters_dao import insert_or_update_returned_letters from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_inbound_api_dao import get_service_inbound_api_for_service from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id from app.dao.templates_dao import dao_get_template_by_id -from app.exceptions import DVLAException, NotificationTechnicalFailureException from app.models import ( - DVLA_RESPONSE_STATUS_SENT, EMAIL_TYPE, JOB_STATUS_CANCELLED, JOB_STATUS_FINISHED, JOB_STATUS_IN_PROGRESS, JOB_STATUS_PENDING, KEY_TYPE_NORMAL, - LETTER_TYPE, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_RETURNED_LETTER, - NOTIFICATION_SENDING, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_TEMPORARY_FAILURE, SMS_TYPE, - DailySortedLetter, ) from app.notifications.process_notifications import persist_notification from app.notifications.validators import check_service_over_daily_message_limit from app.serialised_models import SerialisedService, SerialisedTemplate from app.service.utils import service_allowed_to_send_to -from app.utils import DATETIME_FORMAT, get_reference_from_personalisation +from app.utils import DATETIME_FORMAT from app.v2.errors import TooManyRequestsError @@ -136,8 +112,7 @@ def process_row(row, template, job, service, sender_id=None): send_fns = { SMS_TYPE: save_sms, - EMAIL_TYPE: save_email, - LETTER_TYPE: save_letter + EMAIL_TYPE: save_email } send_fn = send_fns[template_type] @@ -341,103 +316,6 @@ def save_api_email_or_sms(self, encrypted_notification): current_app.logger.error(f"Max retry failed Failed to persist notification {notification['id']}") -@notify_celery.task(bind=True, name="save-letter", max_retries=5, default_retry_delay=300) -def save_letter( - self, - service_id, - notification_id, - encrypted_notification, -): - notification = encryption.decrypt(encrypted_notification) - - postal_address = PostalAddress.from_personalisation( - InsensitiveDict(notification['personalisation']) - ) - - service = SerialisedService.from_id(service_id) - template = SerialisedTemplate.from_id_and_service_id( - notification['template'], - service_id=service.id, - version=notification['template_version'], - ) - - try: - # if we don't want to actually send the letter, then start it off in SENDING so we don't pick it up - status = NOTIFICATION_CREATED if not service.research_mode else NOTIFICATION_SENDING - - saved_notification = persist_notification( - template_id=notification['template'], - template_version=notification['template_version'], - postage=postal_address.postage if postal_address.international else template.postage, - recipient=postal_address.normalised, - service=service, - personalisation=notification['personalisation'], - notification_type=LETTER_TYPE, - api_key_id=None, - key_type=KEY_TYPE_NORMAL, - created_at=datetime.utcnow(), - job_id=notification['job'], - job_row_number=notification['row_number'], - notification_id=notification_id, - reference=create_random_identifier(), - client_reference=get_reference_from_personalisation(notification['personalisation']), - reply_to_text=template.reply_to_text, - status=status - ) - - if not service.research_mode: - letters_pdf_tasks.get_pdf_for_templated_letter.apply_async( - [str(saved_notification.id)], - queue=QueueNames.CREATE_LETTERS_PDF - ) - elif current_app.config['NOTIFY_ENVIRONMENT'] in ['preview', 'development']: - research_mode_tasks.create_fake_letter_response_file.apply_async( - (saved_notification.reference,), - queue=QueueNames.RESEARCH_MODE - ) - else: - update_notification_status_by_reference(saved_notification.reference, 'delivered') - - current_app.logger.debug("Letter {} created at {}".format(saved_notification.id, saved_notification.created_at)) - except SQLAlchemyError as e: - handle_exception(self, notification, notification_id, e) - - -@notify_celery.task(bind=True, name='update-letter-notifications-to-sent') -def update_letter_notifications_to_sent_to_dvla(self, notification_references): - # This task will be called by the FTP app to update notifications as sent to DVLA - provider = get_provider_details_by_notification_type(LETTER_TYPE)[0] - - updated_count, _ = dao_update_notifications_by_reference( - notification_references, - { - 'status': NOTIFICATION_SENDING, - 'sent_by': provider.identifier, - 'sent_at': datetime.utcnow(), - 'updated_at': datetime.utcnow() - } - ) - - current_app.logger.info("Updated {} letter notifications to sending".format(updated_count)) - - -@notify_celery.task(bind=True, name='update-letter-notifications-to-error') -def update_letter_notifications_to_error(self, notification_references): - # This task will be called by the FTP app to update notifications as sent to DVLA - - updated_count, _ = dao_update_notifications_by_reference( - notification_references, - { - 'status': NOTIFICATION_TECHNICAL_FAILURE, - 'updated_at': datetime.utcnow() - } - ) - message = "Updated {} letter notifications to technical-failure with references {}".format( - updated_count, notification_references - ) - raise NotificationTechnicalFailureException(message) - - def handle_exception(task, notification, notification_id, exc): if not get_notification_by_id(notification_id): retry_msg = '{task} notification for job {job} row number {row} and notification id {noti}'.format( @@ -457,108 +335,6 @@ def handle_exception(task, notification, notification_id, exc): current_app.logger.error('Max retry failed' + retry_msg) -@notify_celery.task(bind=True, name='update-letter-notifications-statuses') -def update_letter_notifications_statuses(self, filename): - notification_updates = parse_dvla_file(filename) - - temporary_failures = [] - - for update in notification_updates: - check_billable_units(update) - update_letter_notification(filename, temporary_failures, update) - if temporary_failures: - # This will alert Notify that DVLA was unable to deliver the letters, we need to investigate - message = "DVLA response file: {filename} has failed letters with notification.reference {failures}" \ - .format(filename=filename, failures=temporary_failures) - raise DVLAException(message) - - -@notify_celery.task(bind=True, name="record-daily-sorted-counts") -def record_daily_sorted_counts(self, filename): - sorted_letter_counts = defaultdict(int) - notification_updates = parse_dvla_file(filename) - for update in notification_updates: - sorted_letter_counts[update.cost_threshold.lower()] += 1 - - unknown_status = sorted_letter_counts.keys() - {'unsorted', 'sorted'} - if unknown_status: - message = 'DVLA response file: {} contains unknown Sorted status {}'.format( - filename, unknown_status.__repr__() - ) - raise DVLAException(message) - - billing_date = get_local_billing_date_from_filename(filename) - persist_daily_sorted_letter_counts(day=billing_date, - file_name=filename, - sorted_letter_counts=sorted_letter_counts) - - -def parse_dvla_file(filename): - bucket_location = '{}-ftp'.format(current_app.config['NOTIFY_EMAIL_DOMAIN']) - response_file_content = s3.get_s3_file(bucket_location, filename) - - try: - return process_updates_from_file(response_file_content) - except TypeError: - raise DVLAException('DVLA response file: {} has an invalid format'.format(filename)) - - -def get_local_billing_date_from_filename(filename): - # exclude seconds from the date since we don't need it. We got a date ending in 60 second - which is not valid. - datetime_string = filename.split('-')[1][:-2] - datetime_obj = datetime.strptime(datetime_string, '%Y%m%d%H%M') - return convert_utc_to_local_timezone(datetime_obj).date() - - -def persist_daily_sorted_letter_counts(day, file_name, sorted_letter_counts): - daily_letter_count = DailySortedLetter( - billing_day=day, - file_name=file_name, - unsorted_count=sorted_letter_counts['unsorted'], - sorted_count=sorted_letter_counts['sorted'] - ) - dao_create_or_update_daily_sorted_letter(daily_letter_count) - - -def process_updates_from_file(response_file): - NotificationUpdate = namedtuple('NotificationUpdate', ['reference', 'status', 'page_count', 'cost_threshold']) - notification_updates = [NotificationUpdate(*line.split('|')) for line in response_file.splitlines()] - return notification_updates - - -def update_letter_notification(filename, temporary_failures, update): - if update.status == DVLA_RESPONSE_STATUS_SENT: - status = NOTIFICATION_DELIVERED - else: - status = NOTIFICATION_TEMPORARY_FAILURE - temporary_failures.append(update.reference) - - updated_count, _ = dao_update_notifications_by_reference( - references=[update.reference], - update_dict={"status": status, - "updated_at": datetime.utcnow() - } - ) - - if not updated_count: - msg = "Update letter notification file {filename} failed: notification either not found " \ - "or already updated from delivered. Status {status} for notification reference {reference}".format( - filename=filename, status=status, reference=update.reference) - current_app.logger.info(msg) - - -def check_billable_units(notification_update): - notification = dao_get_notification_history_by_reference(notification_update.reference) - - if int(notification_update.page_count) != notification.billable_units: - msg = 'Notification with id {} has {} billable_units but DVLA says page count is {}'.format( - notification.id, notification.billable_units, notification_update.page_count) - try: - raise DVLAException(msg) - except DVLAException: - current_app.logger.exception(msg) - - @notify_celery.task(bind=True, name="send-inbound-sms", max_retries=5, default_retry_delay=300) def send_inbound_sms_to_service(self, inbound_sms_id, service_id): inbound_api = get_service_inbound_api_for_service(service_id=service_id) @@ -647,19 +423,3 @@ def process_incomplete_job(job_id): process_row(row, template, job, job.service, sender_id=sender_id) job_complete(job, resumed=True) - - -@notify_celery.task(name='process-returned-letters-list') -def process_returned_letters_list(notification_references): - updated, updated_history = dao_update_notifications_by_reference( - notification_references, - {"status": NOTIFICATION_RETURNED_LETTER} - ) - - insert_or_update_returned_letters(notification_references) - - current_app.logger.info( - "Updated {} letter notifications ({} history notifications, from {} references) to returned-letter".format( - updated, updated_history, len(notification_references) - ) - ) diff --git a/app/commands.py b/app/commands.py index 5f0ffb91b..5a831c741 100644 --- a/app/commands.py +++ b/app/commands.py @@ -19,12 +19,7 @@ from sqlalchemy.orm.exc import NoResultFound from app import db from app.aws import s3 -from app.celery.letters_pdf_tasks import ( - get_pdf_for_templated_letter, - resanitise_pdf, -) -from app.celery.tasks import process_row, record_daily_sorted_counts -from app.config import QueueNames +from app.celery.tasks import process_row from app.dao.annual_billing_dao import ( dao_create_or_update_annual_billing_for_year, set_default_free_allowance_for_service, @@ -61,7 +56,6 @@ from app.models import ( AnnualBilling, Domain, EmailBranding, - LetterBranding, Notification, Organisation, Service, @@ -163,22 +157,6 @@ def insert_inbound_numbers_from_file(file_name): db.session.commit() -@notify_command(name='replay-create-pdf-for-templated-letter') -@click.option('-n', '--notification_id', type=click.UUID, required=True, - help="Notification id of the letter that needs the get_pdf_for_templated_letter task replayed") -def replay_create_pdf_for_templated_letter(notification_id): - print("Create task to get_pdf_for_templated_letter for notification: {}".format(notification_id)) - get_pdf_for_templated_letter.apply_async([str(notification_id)], queue=QueueNames.CREATE_LETTERS_PDF) - - -@notify_command(name='recreate-pdf-for-precompiled-or-uploaded-letter') -@click.option('-n', '--notification_id', type=click.UUID, required=True, - help="Notification ID of the precompiled or uploaded letter") -def recreate_pdf_for_precompiled_or_uploaded_letter(notification_id): - print(f"Call resanitise_pdf task for notification: {notification_id}") - resanitise_pdf.apply_async([str(notification_id)], queue=QueueNames.LETTERS) - - def setup_commands(application): application.cli.add_command(command_group) @@ -233,11 +211,11 @@ def rebuild_ft_billing_for_day(service_id, day): def bulk_invite_user_to_service(file_name, service_id, user_id, auth_type, permissions): # permissions # manage_users | manage_templates | manage_settings - # send messages ==> send_texts | send_emails | send_letters + # send messages ==> send_texts | send_emails # Access API keys manage_api_keys # platform_admin # view_activity - # "send_texts,send_emails,send_letters,view_activity" + # "send_texts,send_emails,view_activity" from app.service_invite.rest import create_invited_user file = open(file_name) for email_address in file: @@ -266,52 +244,6 @@ def bulk_invite_user_to_service(file_name, service_id, user_id, auth_type, permi file.close() -@notify_command(name='populate-notification-postage') -@click.option( - '-s', - '--start_date', - default=datetime(2017, 2, 1), - help="start date inclusive", - type=click_dt(format='%Y-%m-%d') -) -@statsd(namespace="tasks") -def populate_notification_postage(start_date): - current_app.logger.info('populating historical notification postage') - - total_updated = 0 - - while start_date < datetime.utcnow(): - # process in ten day chunks - end_date = start_date + timedelta(days=10) - - sql = \ - """ - UPDATE {} - SET postage = 'second' - WHERE notification_type = 'letter' AND - postage IS NULL AND - created_at BETWEEN :start AND :end - """ - - execution_start = datetime.utcnow() - - if end_date > datetime.utcnow() - timedelta(days=8): - print('Updating notifications table as well') - db.session.execute(sql.format('notifications'), {'start': start_date, 'end': end_date}) - - result = db.session.execute(sql.format('notification_history'), {'start': start_date, 'end': end_date}) - db.session.commit() - - current_app.logger.info('notification postage took {}ms. Migrated {} rows for {} to {}'.format( - datetime.utcnow() - execution_start, result.rowcount, start_date, end_date)) - - start_date += timedelta(days=10) - - total_updated += result.rowcount - - current_app.logger.info('Total inserted/updated records = {}'.format(total_updated)) - - @notify_command(name='archive-jobs-created-between-dates') @click.option('-s', '--start_date', required=True, help="start date inclusive", type=click_dt(format='%Y-%m-%d')) @click.option('-e', '--end_date', required=True, help="end date inclusive", type=click_dt(format='%Y-%m-%d')) @@ -342,18 +274,6 @@ def update_jobs_archived_flag(start_date, end_date): current_app.logger.info('Total archived jobs = {}'.format(total_updated)) -@notify_command(name='replay-daily-sorted-count-files') -@click.option('-f', '--file_extension', required=False, help="File extension to search for, defaults to rs.txt") -@statsd(namespace="tasks") -def replay_daily_sorted_count_files(file_extension): - bucket_location = '{}-ftp'.format(current_app.config['NOTIFY_EMAIL_DOMAIN']) - for filename in s3.get_list_of_files_by_suffix(bucket_name=bucket_location, - subfolder='root/dispatch', - suffix=file_extension or '.rs.txt'): - print("Create task to record daily sorted counts for file: ", filename) - record_daily_sorted_counts.apply_async([filename], queue=QueueNames.NOTIFY) - - @notify_command(name='populate-organisations-from-file') @click.option('-f', '--file_name', required=True, help="Pipe delimited file containing organisation name, sector, crown, argeement_signed, domains") @@ -364,7 +284,6 @@ def populate_organisations_from_file(file_name): # [3] argeement_signed:: TRUE | FALSE # [4] domains:: comma separated list of domains related to the organisation # [5] email branding name: name of the default email branding for the org - # [6] letter branding name: name of the default letter branding for the org # The expectation is that the organisation, organisation_to_service # and user_to_organisation will be cleared before running this command. @@ -385,19 +304,13 @@ def populate_organisations_from_file(file_name): email_branding_column = columns[5].strip() if len(email_branding_column) > 0: email_branding = EmailBranding.query.filter(EmailBranding.name == email_branding_column).one() - letter_branding = None - letter_branding_column = columns[6].strip() - if len(letter_branding_column) > 0: - letter_branding = LetterBranding.query.filter(LetterBranding.name == letter_branding_column).one() data = { 'name': columns[0], 'active': True, 'agreement_signed': boolean_or_none(columns[3]), 'crown': boolean_or_none(columns[2]), 'organisation_type': columns[1].lower(), - 'email_branding_id': email_branding.id if email_branding else None, - 'letter_branding_id': letter_branding.id if letter_branding else None - + 'email_branding_id': email_branding.id if email_branding else None } org = Organisation(**data) try: @@ -452,57 +365,6 @@ def populate_organisation_agreement_details_from_file(file_name): db.session.commit() -@notify_command(name='get-letter-details-from-zips-sent-file') -@click.argument('file_paths', required=True, nargs=-1) -@statsd(namespace="tasks") -def get_letter_details_from_zips_sent_file(file_paths): - """Get notification details from letters listed in zips_sent file(s) - - This takes one or more file paths for the zips_sent files in S3 as its parameters, for example: - get-letter-details-from-zips-sent-file '2019-04-01/zips_sent/filename_1' '2019-04-01/zips_sent/filename_2' - """ - - rows_from_file = [] - - for path in file_paths: - file_contents = s3.get_s3_file( - bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], - file_location=path - ) - rows_from_file.extend(json.loads(file_contents)) - - notification_references = tuple(row[18:34] for row in rows_from_file) - get_letters_data_from_references(notification_references) - - -@notify_command(name='get-notification-and-service-ids-for-letters-that-failed-to-print') -@click.option('-f', '--file_name', required=True, - help="""Full path of the file to upload, file should contain letter filenames, one per line""") -def get_notification_and_service_ids_for_letters_that_failed_to_print(file_name): - print("Getting service and notification ids for letter filenames list {}".format(file_name)) - file = open(file_name) - references = tuple([row[7:23] for row in file]) - - get_letters_data_from_references(tuple(references)) - file.close() - - -def get_letters_data_from_references(notification_references): - sql = """ - SELECT id, service_id, template_id, reference, job_id, created_at - FROM notifications - WHERE reference IN :notification_references - ORDER BY service_id, job_id""" - result = db.session.execute(sql, {'notification_references': notification_references}).fetchall() - - with open('zips_sent_details.csv', 'w') as csvfile: - csv_writer = csv.writer(csvfile) - csv_writer.writerow(['notification_id', 'service_id', 'template_id', 'reference', 'job_id', 'created_at']) - - for row in result: - csv_writer.writerow(row) - - @notify_command(name='associate-services-to-organisations') def associate_services_to_organisations(): services = Service.get_history_model().query.filter_by( @@ -521,12 +383,11 @@ def associate_services_to_organisations(): @notify_command(name='populate-service-volume-intentions') @click.option('-f', '--file_name', required=True, - help="Pipe delimited file containing service_id, SMS, email, letters") + help="Pipe delimited file containing service_id, SMS, email") def populate_service_volume_intentions(file_name): # [0] service_id # [1] SMS:: volume intentions for service # [2] Email:: volume intentions for service - # [3] Letters:: volume intentions for service with open(file_name, 'r') as f: for line in itertools.islice(f, 1, None): @@ -535,7 +396,6 @@ def populate_service_volume_intentions(file_name): service = dao_fetch_service_by_id(columns[0]) service.volume_sms = columns[1] service.volume_email = columns[2] - service.volume_letter = columns[3] dao_update_service(service) print("populate-service-volume-intentions complete") diff --git a/app/config.py b/app/config.py index 2c29a6b7b..2b09617b3 100644 --- a/app/config.py +++ b/app/config.py @@ -19,14 +19,10 @@ class QueueNames(object): JOBS = 'job-tasks' RETRY = 'retry-tasks' NOTIFY = 'notify-internal-tasks' - PROCESS_FTP = 'process-ftp-tasks' - CREATE_LETTERS_PDF = 'create-letters-pdf-tasks' CALLBACKS = 'service-callbacks' CALLBACKS_RETRY = 'service-callbacks-retry' - LETTERS = 'letter-tasks' SMS_CALLBACKS = 'sms-callbacks' ANTIVIRUS = 'antivirus-tasks' - SANITISE_LETTERS = 'sanitise-letter-tasks' SAVE_API_EMAIL = 'save-api-email-tasks' SAVE_API_SMS = 'save-api-sms-tasks' @@ -43,10 +39,8 @@ class QueueNames(object): QueueNames.JOBS, QueueNames.RETRY, QueueNames.NOTIFY, - QueueNames.CREATE_LETTERS_PDF, QueueNames.CALLBACKS, QueueNames.CALLBACKS_RETRY, - QueueNames.LETTERS, QueueNames.SMS_CALLBACKS, QueueNames.SAVE_API_EMAIL, QueueNames.SAVE_API_SMS, @@ -55,11 +49,7 @@ class QueueNames(object): class TaskNames(object): PROCESS_INCOMPLETE_JOBS = 'process-incomplete-jobs' - ZIP_AND_SEND_LETTER_PDFS = 'zip-and-send-letter-pdfs' SCAN_FILE = 'scan-file' - SANITISE_LETTER = 'sanitise-and-upload-letter' - CREATE_PDF_FOR_TEMPLATED_LETTER = 'create-pdf-for-templated-letter' - RECREATE_PDF_FOR_PRECOMPILED_LETTER = 'recreate-pdf-for-precompiled-letter' class Config(object): @@ -142,10 +132,6 @@ class Config(object): MAX_FAILED_LOGIN_COUNT = 10 API_RATE_LIMIT_ENABLED = True - # be careful increasing this size without being sure that we won't see slowness in pysftp - MAX_LETTER_PDF_ZIP_FILESIZE = 40 * 1024 * 1024 # 40mb - MAX_LETTER_PDF_COUNT_PER_ZIP = 500 - # Default data CONFIG_FILES = path.dirname(__file__) + '/config_files/' @@ -261,49 +247,11 @@ class Config(object): 'schedule': crontab(hour=4, minute=0), 'options': {'queue': QueueNames.PERIODIC}, }, - 'remove_letter_jobs': { - 'task': 'remove_letter_jobs', - 'schedule': crontab(hour=4, minute=20), - # since we mark jobs as archived - 'options': {'queue': QueueNames.PERIODIC}, - }, - 'check-if-letters-still-in-created': { - 'task': 'check-if-letters-still-in-created', - 'schedule': crontab(day_of_week='mon-fri', hour=7, minute=0), - 'options': {'queue': QueueNames.PERIODIC} - }, - 'check-if-letters-still-pending-virus-check': { - 'task': 'check-if-letters-still-pending-virus-check', - 'schedule': crontab(day_of_week='mon-fri', hour='9,15', minute=0), - 'options': {'queue': QueueNames.PERIODIC} - }, 'check-for-services-with-high-failure-rates-or-sending-to-tv-numbers': { 'task': 'check-for-services-with-high-failure-rates-or-sending-to-tv-numbers', 'schedule': crontab(day_of_week='mon-fri', hour=10, minute=30), 'options': {'queue': QueueNames.PERIODIC} }, - 'raise-alert-if-letter-notifications-still-sending': { - 'task': 'raise-alert-if-letter-notifications-still-sending', - 'schedule': crontab(hour=17, minute=00), - 'options': {'queue': QueueNames.PERIODIC} - }, - # The collate-letter-pdf does assume it is called in an hour that BST does not make a - # difference to the truncate date which translates to the filename to process - 'collate-letter-pdfs-to-be-sent': { - 'task': 'collate-letter-pdfs-to-be-sent', - 'schedule': crontab(hour=17, minute=50), - 'options': {'queue': QueueNames.PERIODIC} - }, - 'raise-alert-if-no-letter-ack-file': { - 'task': 'raise-alert-if-no-letter-ack-file', - 'schedule': crontab(hour=23, minute=00), - 'options': {'queue': QueueNames.PERIODIC} - }, - 'trigger-link-tests': { - 'task': 'trigger-link-tests', - 'schedule': timedelta(minutes=15), - 'options': {'queue': QueueNames.PERIODIC} - }, } } diff --git a/app/config_files/templates.json b/app/config_files/templates.json index ae208541b..527831c40 100644 --- a/app/config_files/templates.json +++ b/app/config_files/templates.json @@ -14,7 +14,7 @@ "content": ["((user_name)) has invited you to collaborate on ((service_name)) on U.S. Notify.", "", "", - "U.S. Notify makes it easy to keep people updated by helping you send text messages, emails and letters.", + "U.S. Notify makes it easy to keep people updated by helping you send text messages and emails.", "", "", "Click this link to create an account on U.S. Notify:", @@ -165,7 +165,7 @@ "name": "Notify organization invitation email", "type": "email", "subject": "((user_name)) has invited you to collaborate on ((organisation_name)) on U.S. Notify", - "content": ["((user_name)) has invited you to collaborate on ((organisation_name)) on U.S. Notify.","","","U.S. Notify makes it easy to keep people updated by helping you send text messages, emails and letters.","","","Open this link to create an account on U.S. Notify:","","((url))","","","This invitation will stop working at midnight tomorrow. This is to keep ((organisation_name)) secure."] + "content": ["((user_name)) has invited you to collaborate on ((organisation_name)) on U.S. Notify.","","","U.S. Notify makes it easy to keep people updated by helping you send text messages and emails.","","","Open this link to create an account on U.S. Notify:","","((url))","","","This invitation will stop working at midnight tomorrow. This is to keep ((organisation_name)) secure."] }, { "id": "c73f1d71-4049-46d5-a647-d013bdeca3f0", @@ -232,7 +232,7 @@ "", "((signed_by_name)) has accepted the U.S. Notify data sharing and financial agreement on your behalf, for ((org_name)).", "", - "U.S. Notify lets teams in the public sector send emails, text messages and letters. It’s built and run by a team in the TTS Public Benefits Studio (part of GSA).", + "U.S. Notify lets teams in the public sector send text messages and emails. It’s built and run by a team in the TTS Public Benefits Studio (part of GSA).", "", "If you need another copy of the agreement you can download it here: ((mou_link))", "", diff --git a/app/dao/daily_sorted_letter_dao.py b/app/dao/daily_sorted_letter_dao.py deleted file mode 100644 index 720eddf38..000000000 --- a/app/dao/daily_sorted_letter_dao.py +++ /dev/null @@ -1,38 +0,0 @@ -from datetime import datetime - -from sqlalchemy.dialects.postgresql import insert - -from app import db -from app.dao.dao_utils import autocommit -from app.models import DailySortedLetter - - -def dao_get_daily_sorted_letter_by_billing_day(billing_day): - return DailySortedLetter.query.filter_by( - billing_day=billing_day - ).first() - - -@autocommit -def dao_create_or_update_daily_sorted_letter(new_daily_sorted_letter): - ''' - This uses the Postgres upsert to avoid race conditions when two threads try and insert - at the same row. The excluded object refers to values that we tried to insert but were - rejected. - http://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#insert-on-conflict-upsert - ''' - table = DailySortedLetter.__table__ - stmt = insert(table).values( - billing_day=new_daily_sorted_letter.billing_day, - file_name=new_daily_sorted_letter.file_name, - unsorted_count=new_daily_sorted_letter.unsorted_count, - sorted_count=new_daily_sorted_letter.sorted_count) - stmt = stmt.on_conflict_do_update( - index_elements=[table.c.billing_day, table.c.file_name], - set_={ - 'unsorted_count': stmt.excluded.unsorted_count, - 'sorted_count': stmt.excluded.sorted_count, - 'updated_at': datetime.utcnow() - } - ) - db.session.connection().execute(stmt) diff --git a/app/dao/fact_billing_dao.py b/app/dao/fact_billing_dao.py index 3175665e1..d6bbd54b4 100644 --- a/app/dao/fact_billing_dao.py +++ b/app/dao/fact_billing_dao.py @@ -14,17 +14,13 @@ from app.dao.date_util import ( from app.dao.organisation_dao import dao_get_organisation_live_services from app.models import ( EMAIL_TYPE, - INTERNATIONAL_POSTAGE_TYPES, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, - LETTER_TYPE, - NOTIFICATION_STATUS_TYPES_BILLABLE_FOR_LETTERS, NOTIFICATION_STATUS_TYPES_BILLABLE_SMS, NOTIFICATION_STATUS_TYPES_SENT_EMAILS, SMS_TYPE, AnnualBilling, FactBilling, - LetterRate, NotificationAllTimeView, NotificationHistory, Organisation, @@ -120,84 +116,6 @@ def fetch_sms_billing_for_all_services(start_date, end_date): return query.all() -def fetch_letter_costs_and_totals_for_all_services(start_date, end_date): - query = db.session.query( - Organisation.name.label("organisation_name"), - Organisation.id.label("organisation_id"), - Service.name.label("service_name"), - Service.id.label("service_id"), - func.sum(FactBilling.notifications_sent).label("total_letters"), - func.sum(FactBilling.notifications_sent * FactBilling.rate).label("letter_cost") - ).select_from( - Service - ).outerjoin( - Service.organisation - ).join( - FactBilling, FactBilling.service_id == Service.id, - ).filter( - FactBilling.service_id == Service.id, - FactBilling.local_date >= start_date, - FactBilling.local_date <= end_date, - FactBilling.notification_type == LETTER_TYPE, - ).group_by( - Organisation.name, - Organisation.id, - Service.id, - Service.name, - ).order_by( - Organisation.name, - Service.name - ) - - return query.all() - - -def fetch_letter_line_items_for_all_services(start_date, end_date): - formatted_postage = case( - [(FactBilling.postage.in_(INTERNATIONAL_POSTAGE_TYPES), "international")], else_=FactBilling.postage - ).label("postage") - - postage_order = case( - (formatted_postage == "second", 1), - (formatted_postage == "first", 2), - (formatted_postage == "international", 3), - else_=0 # assumes never get 0 as a result - ) - - query = db.session.query( - Organisation.name.label("organisation_name"), - Organisation.id.label("organisation_id"), - Service.name.label("service_name"), - Service.id.label("service_id"), - FactBilling.rate.label("letter_rate"), - formatted_postage, - func.sum(FactBilling.notifications_sent).label("letters_sent"), - ).select_from( - Service - ).outerjoin( - Service.organisation - ).join( - FactBilling, FactBilling.service_id == Service.id, - ).filter( - FactBilling.local_date >= start_date, - FactBilling.local_date <= end_date, - FactBilling.notification_type == LETTER_TYPE, - ).group_by( - Organisation.name, - Organisation.id, - Service.id, - Service.name, - FactBilling.rate, - formatted_postage - ).order_by( - Organisation.name, - Service.name, - postage_order, - FactBilling.rate, - ) - return query.all() - - def fetch_billing_totals_for_year(service_id, year): """ Returns a row for each distinct rate and notification_type from ft_billing @@ -233,7 +151,6 @@ def fetch_billing_totals_for_year(service_id, year): for query in [ query_service_sms_usage_for_year(service_id, year).subquery(), query_service_email_usage_for_year(service_id, year).subquery(), - query_service_letter_usage_for_year(service_id, year).subquery(), ] ]).subquery() ).order_by( @@ -244,20 +161,19 @@ def fetch_billing_totals_for_year(service_id, year): def fetch_monthly_billing_for_year(service_id, year): """ - Returns a row for each distinct rate, notification_type, postage and month + Returns a row for each distinct rate, notification_type, and month from ft_billing over the specified financial year e.g. ( rate=0.0165, notification_type=sms, - postage=none, month=2022-04-01 00:00:00, notifications_sent=123, ... ) - The "postage" field is "none" except for letters. Each subquery takes care - of anything specific to the notification type e.g. rate multipliers for SMS. + Each subquery takes care of anything specific to the notification type e.g. + rate multipliers for SMS. Since the data in ft_billing is only refreshed once a day for all services, we also update the table on-the-fly if we need accurate data for this year. @@ -276,7 +192,6 @@ def fetch_monthly_billing_for_year(service_id, year): db.session.query( query.c.rate.label("rate"), query.c.notification_type.label("notification_type"), - query.c.postage.label("postage"), func.date_trunc('month', query.c.local_date).cast(Date).label("month"), func.sum(query.c.notifications_sent).label("notifications_sent"), @@ -287,13 +202,11 @@ def fetch_monthly_billing_for_year(service_id, year): ).group_by( query.c.rate, query.c.notification_type, - query.c.postage, 'month', ) for query in [ query_service_sms_usage_for_year(service_id, year).subquery(), query_service_email_usage_for_year(service_id, year).subquery(), - query_service_letter_usage_for_year(service_id, year).subquery(), ] ]).subquery() ).order_by( @@ -308,7 +221,6 @@ def query_service_email_usage_for_year(service_id, year): return db.session.query( FactBilling.local_date, - FactBilling.postage, # should always be "none" FactBilling.notifications_sent, FactBilling.billable_units.label("chargeable_units"), FactBilling.rate, @@ -324,30 +236,6 @@ def query_service_email_usage_for_year(service_id, year): ) -def query_service_letter_usage_for_year(service_id, year): - year_start, year_end = get_financial_year_dates(year) - - return db.session.query( - FactBilling.local_date, - FactBilling.postage, - FactBilling.notifications_sent, - # We can't use billable_units here as it represents the - # sheet count for letters, which is already accounted for - # in the rate. We actually charge per letter, not sheet. - FactBilling.notifications_sent.label("chargeable_units"), - FactBilling.rate, - FactBilling.notification_type, - (FactBilling.notifications_sent * FactBilling.rate).label("cost"), - literal(0).label("free_allowance_used"), - FactBilling.notifications_sent.label("charged_units"), - ).filter( - FactBilling.service_id == service_id, - FactBilling.local_date >= year_start, - FactBilling.local_date <= year_end, - FactBilling.notification_type == LETTER_TYPE - ) - - def query_service_sms_usage_for_year(service_id, year): """ Returns rows from the ft_billing table with some calculated values like cost, @@ -410,7 +298,6 @@ def query_service_sms_usage_for_year(service_id, year): return db.session.query( FactBilling.local_date, - FactBilling.postage, # should always be "none" FactBilling.notifications_sent, this_rows_chargeable_units.label("chargeable_units"), FactBilling.rate, @@ -453,7 +340,7 @@ def fetch_billing_data_for_day(process_day, service_id=None, check_permissions=F services = [Service.query.get(service_id)] for service in services: - for notification_type in (SMS_TYPE, EMAIL_TYPE, LETTER_TYPE): + for notification_type in (SMS_TYPE, EMAIL_TYPE): if (not check_permissions) or service.has_permission(notification_type): results = _query_for_billing_data( notification_type=notification_type, @@ -476,8 +363,6 @@ def _query_for_billing_data(notification_type, start_date, end_date, service): literal('ses').label('sent_by'), literal(0).label('rate_multiplier'), literal(False).label('international'), - literal(None).label('letter_page_count'), - literal('none').label('postage'), literal(0).label('billable_units'), func.count().label('notifications_sent'), ).filter( @@ -503,8 +388,6 @@ def _query_for_billing_data(notification_type, start_date, end_date, service): sent_by.label('sent_by'), rate_multiplier.label('rate_multiplier'), international.label('international'), - literal(None).label('letter_page_count'), - literal('none').label('postage'), func.sum(NotificationAllTimeView.billable_units).label('billable_units'), func.count().label('notifications_sent'), ).filter( @@ -521,40 +404,9 @@ def _query_for_billing_data(notification_type, start_date, end_date, service): international, ) - def _letter_query(): - rate_multiplier = func.coalesce(NotificationAllTimeView.rate_multiplier, 1).cast(Integer) - postage = func.coalesce(NotificationAllTimeView.postage, 'none') - return db.session.query( - NotificationAllTimeView.template_id, - literal(service.crown).label('crown'), - literal(service.id).label('service_id'), - literal(notification_type).label('notification_type'), - literal('dvla').label('sent_by'), - rate_multiplier.label('rate_multiplier'), - NotificationAllTimeView.international, - NotificationAllTimeView.billable_units.label('letter_page_count'), - postage.label('postage'), - func.sum(NotificationAllTimeView.billable_units).label('billable_units'), - func.count().label('notifications_sent'), - ).filter( - NotificationAllTimeView.status.in_(NOTIFICATION_STATUS_TYPES_BILLABLE_FOR_LETTERS), - NotificationAllTimeView.key_type.in_((KEY_TYPE_NORMAL, KEY_TYPE_TEAM)), - NotificationAllTimeView.created_at >= start_date, - NotificationAllTimeView.created_at < end_date, - NotificationAllTimeView.notification_type == notification_type, - NotificationAllTimeView.service_id == service.id - ).group_by( - NotificationAllTimeView.template_id, - rate_multiplier, - NotificationAllTimeView.billable_units, - postage, - NotificationAllTimeView.international - ) - query_funcs = { SMS_TYPE: _sms_query, EMAIL_TYPE: _email_query, - LETTER_TYPE: _letter_query } query = query_funcs[notification_type]() @@ -562,9 +414,8 @@ def _query_for_billing_data(notification_type, start_date, end_date, service): def get_rates_for_billing(): - non_letter_rates = Rate.query.order_by(desc(Rate.valid_from)).all() - letter_rates = LetterRate.query.order_by(desc(LetterRate.start_date)).all() - return non_letter_rates, letter_rates + rates = Rate.query.order_by(desc(Rate.valid_from)).all() + return rates def get_service_ids_that_need_billing_populated(start_date, end_date): @@ -573,34 +424,20 @@ def get_service_ids_that_need_billing_populated(start_date, end_date): ).filter( NotificationHistory.created_at >= start_date, NotificationHistory.created_at <= end_date, - NotificationHistory.notification_type.in_([SMS_TYPE, EMAIL_TYPE, LETTER_TYPE]), + NotificationHistory.notification_type.in_([SMS_TYPE, EMAIL_TYPE]), NotificationHistory.billable_units != 0 ).distinct().all() def get_rate( - non_letter_rates, letter_rates, notification_type, date, crown=None, letter_page_count=None, post_class='second' + rates, notification_type, date, crown=None ): start_of_day = get_local_midnight_in_utc(date) - if notification_type == LETTER_TYPE: - if letter_page_count == 0: - return 0 - # if crown is not set default to true, this is okay because the rates are the same for both crown and non-crown. - crown = crown or True + if notification_type == SMS_TYPE: return next( r.rate - for r in letter_rates if ( - start_of_day >= r.start_date and - crown == r.crown and - letter_page_count == r.sheet_count and - post_class == r.post_class - ) - ) - elif notification_type == SMS_TYPE: - return next( - r.rate - for r in non_letter_rates if ( + for r in rates if ( notification_type == r.notification_type and start_of_day >= r.valid_from ) @@ -610,14 +447,11 @@ def get_rate( def update_fact_billing(data, process_day): - non_letter_rates, letter_rates = get_rates_for_billing() - rate = get_rate(non_letter_rates, - letter_rates, + rates = get_rates_for_billing() + rate = get_rate(rates, data.notification_type, process_day, - data.crown, - data.letter_page_count, - data.postage) + data.crown) billing_record = create_billing_record(data, rate, process_day) table = FactBilling.__table__ @@ -638,7 +472,6 @@ def update_fact_billing(data, process_day): billable_units=billing_record.billable_units, notifications_sent=billing_record.notifications_sent, rate=billing_record.rate, - postage=billing_record.postage, ) stmt = stmt.on_conflict_do_update( @@ -664,36 +497,10 @@ def create_billing_record(data, rate, process_day): billable_units=data.billable_units, notifications_sent=data.notifications_sent, rate=rate, - postage=data.postage, ) return billing_record -def fetch_letter_costs_for_organisation(organisation_id, start_date, end_date): - query = db.session.query( - Service.name.label("service_name"), - Service.id.label("service_id"), - func.sum(FactBilling.notifications_sent * FactBilling.rate).label("letter_cost") - ).select_from( - Service - ).join( - FactBilling, FactBilling.service_id == Service.id, - ).filter( - FactBilling.local_date >= start_date, - FactBilling.local_date <= end_date, - FactBilling.notification_type == LETTER_TYPE, - Service.organisation_id == organisation_id, - Service.restricted.is_(False) - ).group_by( - Service.id, - Service.name, - ).order_by( - Service.name - ) - - return query.all() - - def fetch_email_usage_for_organisation(organisation_id, start_date, end_date): query = db.session.query( Service.name.label("service_name"), @@ -840,12 +647,10 @@ def fetch_usage_year_for_organisation(organisation_id, year): 'sms_billable_units': 0, 'chargeable_billable_sms': 0, 'sms_cost': 0.0, - 'letter_cost': 0.0, 'emails_sent': 0, 'active': service.active } sms_usages = fetch_sms_billing_for_organisation(organisation_id, year) - letter_usages = fetch_letter_costs_for_organisation(organisation_id, year_start, year_end) email_usages = fetch_email_usage_for_organisation(organisation_id, year_start, year_end) for usage in sms_usages: service_with_usage[str(usage.service_id)] = { @@ -856,12 +661,9 @@ def fetch_usage_year_for_organisation(organisation_id, year): 'sms_billable_units': usage.sms_billable_units, 'chargeable_billable_sms': usage.chargeable_billable_sms, 'sms_cost': float(usage.sms_cost), - 'letter_cost': 0.0, 'emails_sent': 0, 'active': usage.active } - for letter_usage in letter_usages: - service_with_usage[str(letter_usage.service_id)]['letter_cost'] = float(letter_usage.letter_cost) for email_usage in email_usages: service_with_usage[str(email_usage.service_id)]['emails_sent'] = email_usage.emails_sent @@ -910,16 +712,6 @@ def fetch_daily_volumes_for_platform(start_date, end_date): (FactBilling.notification_type == EMAIL_TYPE, FactBilling.notifications_sent) ], else_=0 )).label('email_totals'), - func.sum(case( - [ - (FactBilling.notification_type == LETTER_TYPE, FactBilling.notifications_sent) - ], else_=0 - )).label('letter_totals'), - func.sum(case( - [ - (FactBilling.notification_type == LETTER_TYPE, FactBilling.billable_units) - ], else_=0 - )).label('letter_sheet_totals') ).filter( FactBilling.local_date >= start_date, FactBilling.local_date <= end_date @@ -935,8 +727,6 @@ def fetch_daily_volumes_for_platform(start_date, end_date): func.sum( daily_volume_stats.c.sms_fragments_times_multiplier).label('sms_chargeable_units'), func.sum(daily_volume_stats.c.email_totals).label('email_totals'), - func.sum(daily_volume_stats.c.letter_totals).label('letter_totals'), - func.sum(daily_volume_stats.c.letter_sheet_totals).label('letter_sheet_totals') ).group_by( daily_volume_stats.c.local_date ).order_by( @@ -988,17 +778,6 @@ def fetch_volumes_by_service(start_date, end_date): func.sum(case([ (FactBilling.notification_type == EMAIL_TYPE, FactBilling.notifications_sent) ], else_=0)).label('email_totals'), - func.sum(case([ - (FactBilling.notification_type == LETTER_TYPE, FactBilling.notifications_sent) - ], else_=0)).label('letter_totals'), - func.sum(case([ - (FactBilling.notification_type == LETTER_TYPE, FactBilling.notifications_sent * FactBilling.rate) - ], else_=0)).label("letter_cost"), - func.sum(case( - [ - (FactBilling.notification_type == LETTER_TYPE, FactBilling.billable_units) - ], else_=0 - )).label('letter_sheet_totals') ).filter( FactBilling.local_date >= start_date, FactBilling.local_date <= end_date @@ -1029,9 +808,6 @@ def fetch_volumes_by_service(start_date, end_date): func.coalesce(func.sum(volume_stats.c.sms_fragments_times_multiplier), 0 ).label("sms_chargeable_units"), func.coalesce(func.sum(volume_stats.c.email_totals), 0).label("email_totals"), - func.coalesce(func.sum(volume_stats.c.letter_totals), 0).label("letter_totals"), - func.coalesce(func.sum(volume_stats.c.letter_cost), 0).label("letter_cost"), - func.coalesce(func.sum(volume_stats.c.letter_sheet_totals), 0).label("letter_sheet_totals") ).select_from( Service ).outerjoin( diff --git a/app/dao/fact_notification_status_dao.py b/app/dao/fact_notification_status_dao.py index ed1647802..e4ab55203 100644 --- a/app/dao/fact_notification_status_dao.py +++ b/app/dao/fact_notification_status_dao.py @@ -156,7 +156,7 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(service_ query = db.session.query( *([ Template.name.label("template_name"), - Template.is_precompiled_letter, + False, # TODO: this is related to is_precompiled_letter all_stats_table.c.template_id ] if by_template else []), all_stats_table.c.notification_type, @@ -168,7 +168,7 @@ def fetch_notification_status_for_service_for_today_and_7_previous_days(service_ query = query.filter(all_stats_table.c.template_id == Template.id) return query.group_by( - *([Template.name, Template.is_precompiled_letter, all_stats_table.c.template_id] if by_template else []), + *([Template.name, all_stats_table.c.template_id] if by_template else []), all_stats_table.c.notification_type, all_stats_table.c.status, ).all() @@ -333,7 +333,6 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): FactNotificationStatus.template_id.label('template_id'), Template.name.label('name'), Template.template_type.label('template_type'), - Template.is_precompiled_letter.label('is_precompiled_letter'), extract('month', FactNotificationStatus.local_date).label('month'), extract('year', FactNotificationStatus.local_date).label('year'), func.sum(FactNotificationStatus.notification_count).label('count') @@ -349,7 +348,6 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): FactNotificationStatus.template_id, Template.name, Template.template_type, - Template.is_precompiled_letter, extract('month', FactNotificationStatus.local_date).label('month'), extract('year', FactNotificationStatus.local_date).label('year'), ).order_by( @@ -366,7 +364,6 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): Notification.template_id.label('template_id'), Template.name.label('name'), Template.template_type.label('template_type'), - Template.is_precompiled_letter.label('is_precompiled_letter'), extract('month', month).label('month'), extract('year', month).label('year'), func.count().label('count') @@ -389,7 +386,6 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): query = db.session.query( all_stats_table.c.template_id, all_stats_table.c.name, - all_stats_table.c.is_precompiled_letter, all_stats_table.c.template_type, func.cast(all_stats_table.c.month, Integer).label('month'), func.cast(all_stats_table.c.year, Integer).label('year'), @@ -397,7 +393,6 @@ def fetch_monthly_template_usage_for_service(start_date, end_date, service_id): ).group_by( all_stats_table.c.template_id, all_stats_table.c.name, - all_stats_table.c.is_precompiled_letter, all_stats_table.c.template_type, all_stats_table.c.month, all_stats_table.c.year, @@ -424,11 +419,6 @@ def get_total_notifications_for_date_range(start_date, end_date): (FactNotificationStatus.notification_type == 'sms', FactNotificationStatus.notification_count) ], else_=0)).label('sms'), - func.sum(case( - [ - (FactNotificationStatus.notification_type == 'letter', FactNotificationStatus.notification_count) - ], - else_=0)).label('letters'), ).filter( FactNotificationStatus.key_type != KEY_TYPE_TEST, ).group_by( diff --git a/app/dao/jobs_dao.py b/app/dao/jobs_dao.py index 575f8a65b..49b12e898 100644 --- a/app/dao/jobs_dao.py +++ b/app/dao/jobs_dao.py @@ -2,23 +2,13 @@ import uuid from datetime import datetime, timedelta from flask import current_app -from notifications_utils.letter_timings import ( - CANCELLABLE_JOB_LETTER_STATUSES, - letter_can_be_cancelled, -) from sqlalchemy import and_, asc, desc, func from app import db -from app.dao.dao_utils import autocommit -from app.dao.templates_dao import dao_get_template_by_id from app.models import ( - JOB_STATUS_CANCELLED, JOB_STATUS_FINISHED, JOB_STATUS_PENDING, JOB_STATUS_SCHEDULED, - LETTER_TYPE, - NOTIFICATION_CANCELLED, - NOTIFICATION_CREATED, FactNotificationStatus, Job, Notification, @@ -183,40 +173,6 @@ def dao_get_jobs_older_than_data_retention(notification_types): return jobs -@autocommit -def dao_cancel_letter_job(job): - number_of_notifications_cancelled = Notification.query.filter( - Notification.job_id == job.id - ).update({'status': NOTIFICATION_CANCELLED, - 'updated_at': datetime.utcnow(), - 'billable_units': 0}) - job.job_status = JOB_STATUS_CANCELLED - dao_update_job(job) - return number_of_notifications_cancelled - - -def can_letter_job_be_cancelled(job): - template = dao_get_template_by_id(job.template_id) - if template.template_type != LETTER_TYPE: - return False, "Only letter jobs can be cancelled through this endpoint. This is not a letter job." - - notifications = Notification.query.filter( - Notification.job_id == job.id - ).all() - count_notifications = len(notifications) - if job.job_status != JOB_STATUS_FINISHED or count_notifications != job.notification_count: - return False, "We are still processing these letters, please try again in a minute." - count_cancellable_notifications = len([ - n for n in notifications if n.status in CANCELLABLE_JOB_LETTER_STATUSES - ]) - if count_cancellable_notifications != job.notification_count or not letter_can_be_cancelled( - NOTIFICATION_CREATED, job.created_at - ): - return False, "It’s too late to cancel sending, these letters have already been sent." - - return True, None - - def find_jobs_with_missing_rows(): # Jobs can be a maximum of 100,000 rows. It typically takes 10 minutes to create all those notifications. # Using 20 minutes as a condition seems reasonable. diff --git a/app/dao/letter_branding_dao.py b/app/dao/letter_branding_dao.py deleted file mode 100644 index b07106596..000000000 --- a/app/dao/letter_branding_dao.py +++ /dev/null @@ -1,29 +0,0 @@ -from app import db -from app.dao.dao_utils import autocommit -from app.models import LetterBranding - - -def dao_get_letter_branding_by_id(letter_branding_id): - return LetterBranding.query.filter(LetterBranding.id == letter_branding_id).one() - - -def dao_get_letter_branding_by_name(letter_branding_name): - return LetterBranding.query.filter_by(name=letter_branding_name).first() - - -def dao_get_all_letter_branding(): - return LetterBranding.query.order_by(LetterBranding.name).all() - - -@autocommit -def dao_create_letter_branding(letter_branding): - db.session.add(letter_branding) - - -@autocommit -def dao_update_letter_branding(letter_branding_id, **kwargs): - letter_branding = LetterBranding.query.get(letter_branding_id) - for key, value in kwargs.items(): - setattr(letter_branding, key, value or None) - db.session.add(letter_branding) - return letter_branding diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 22f04592c..d560e61eb 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -1,6 +1,5 @@ from datetime import datetime, timedelta -from botocore.exceptions import ClientError from flask import current_app from notifications_utils.international_billing_rates import ( INTERNATIONAL_BILLING_RATES, @@ -10,10 +9,6 @@ from notifications_utils.recipients import ( try_validate_and_format_phone_number, validate_and_format_email_address, ) -from notifications_utils.timezones import ( - convert_local_timezone_to_utc, - convert_utc_to_local_timezone, -) from sqlalchemy import asc, desc, func, or_, union from sqlalchemy.orm import joinedload from sqlalchemy.orm.exc import NoResultFound @@ -23,19 +18,15 @@ from werkzeug.datastructures import MultiDict from app import create_uuid, db from app.dao.dao_utils import autocommit -from app.letters.utils import LetterPDFNotFound, find_letter_pdf_in_s3 from app.models import ( EMAIL_TYPE, - KEY_TYPE_NORMAL, KEY_TYPE_TEST, - LETTER_TYPE, NOTIFICATION_CREATED, NOTIFICATION_PENDING, NOTIFICATION_PENDING_VIRUS_CHECK, NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_SENDING, NOTIFICATION_SENT, - NOTIFICATION_STATUS_TYPES_COMPLETED, NOTIFICATION_TEMPORARY_FAILURE, SMS_TYPE, FactNotificationStatus, @@ -140,7 +131,7 @@ def update_notification_status_by_id(notification_id, status, sent_by=None): @autocommit def update_notification_status_by_reference(reference, status): - # this is used to update letters and emails + # this is used to update emails notification = Notification.query.filter(Notification.reference == reference).first() if not notification: @@ -304,7 +295,7 @@ def insert_notification_history_delete_notifications( SELECT id, job_id, job_row_number, service_id, template_id, template_version, api_key_id, key_type, notification_type, created_at, sent_at, sent_by, updated_at, reference, billable_units, client_reference, international, phone_prefix, rate_multiplier, notification_status, - created_by_id, postage, document_download_count + created_by_id, document_download_count FROM notifications WHERE service_id = :service_id AND notification_type = :notification_type @@ -312,20 +303,6 @@ def insert_notification_history_delete_notifications( AND key_type in ('normal', 'team') limit :qry_limit """ - select_into_temp_table_for_letters = """ - CREATE TEMP TABLE NOTIFICATION_ARCHIVE ON COMMIT DROP AS - SELECT id, job_id, job_row_number, service_id, template_id, template_version, api_key_id, - key_type, notification_type, created_at, sent_at, sent_by, updated_at, reference, billable_units, - client_reference, international, phone_prefix, rate_multiplier, notification_status, - created_by_id, postage, document_download_count - FROM notifications - WHERE service_id = :service_id - AND notification_type = :notification_type - AND created_at < :timestamp_to_delete_backwards_from - AND notification_status NOT IN ('pending-virus-check', 'created', 'sending') - AND key_type in ('normal', 'team') - limit :qry_limit - """ # Insert into NotificationHistory if the row already exists do nothing. insert_query = """ insert into notification_history @@ -344,8 +321,7 @@ def insert_notification_history_delete_notifications( "qry_limit": qry_limit } - select_to_use = select_into_temp_table_for_letters if notification_type == 'letter' else select_into_temp_table - db.session.execute(select_to_use, input_params) + db.session.execute(select_into_temp_table, input_params) result = db.session.execute("select count(*) from NOTIFICATION_ARCHIVE").fetchone()[0] @@ -363,10 +339,6 @@ def move_notifications_to_notification_history( qry_limit=50000 ): deleted = 0 - if notification_type == LETTER_TYPE: - _delete_letters_from_s3( - notification_type, service_id, timestamp_to_delete_backwards_from, qry_limit - ) delete_count_per_call = 1 while delete_count_per_call > 0: delete_count_per_call = insert_notification_history_delete_notifications( @@ -389,32 +361,6 @@ def move_notifications_to_notification_history( return deleted -def _delete_letters_from_s3( - notification_type, service_id, date_to_delete_from, query_limit -): - letters_to_delete_from_s3 = db.session.query( - Notification - ).filter( - Notification.notification_type == notification_type, - Notification.created_at < date_to_delete_from, - Notification.service_id == service_id, - # although letters in non completed statuses do have PDFs in s3, they do not exist in the - # production-letters-pdf bucket as they never made it that far so we do not try and delete - # them from it - Notification.status.in_(NOTIFICATION_STATUS_TYPES_COMPLETED) - ).limit(query_limit).all() - for letter in letters_to_delete_from_s3: - try: - letter_pdf = find_letter_pdf_in_s3(letter) - letter_pdf.delete() - except ClientError: - current_app.logger.exception( - "Error deleting S3 object for letter: {}".format(letter.id)) - except LetterPDFNotFound: - current_app.logger.warning( - "No S3 object to delete for letter: {}".format(letter.id)) - - @autocommit def dao_delete_notifications_by_id(notification_id): db.session.query(Notification).filter( @@ -493,10 +439,8 @@ def dao_get_notifications_by_recipient_or_reference( except InvalidEmailError: normalised = search_term.lower() - elif notification_type in {LETTER_TYPE, None}: - # For letters, we store the address without spaces, so we need - # to removes spaces from the search term to match. We also do - # this when a notification type isn’t provided (this will + elif notification_type is None: + # This happens when a notification type isn’t provided (this will # happen if a user doesn’t have permission to see the dashboard) # because email addresses and phone numbers will never be stored # with spaces either. @@ -504,7 +448,7 @@ def dao_get_notifications_by_recipient_or_reference( else: raise TypeError( - f'Notification type must be {EMAIL_TYPE}, {SMS_TYPE}, {LETTER_TYPE} or None' + f'Notification type must be {EMAIL_TYPE}, {SMS_TYPE}, or None' ) normalised = escape_special_characters(normalised) @@ -559,8 +503,7 @@ def dao_get_notifications_processing_time_stats(start_date, end_date): created_at > 'START DATE' AND created_at < 'END DATE' AND api_key_id IS NOT NULL AND - key_type != 'test' AND - notification_type != 'letter'; + key_type != 'test'; """ under_10_secs = Notification.sent_at - Notification.created_at <= timedelta(seconds=10) sum_column = functions.coalesce(functions.sum( @@ -580,7 +523,6 @@ def dao_get_notifications_processing_time_stats(start_date, end_date): Notification.created_at < end_date, Notification.api_key_id.isnot(None), Notification.key_type != KEY_TYPE_TEST, - Notification.notification_type != LETTER_TYPE ).one() @@ -605,97 +547,6 @@ def notifications_not_yet_sent(should_be_sending_after_seconds, notification_typ return notifications -def dao_get_letters_to_be_printed(print_run_deadline, postage, query_limit=10000): - """ - Return all letters created before the print run deadline that have not yet been sent. This yields in batches of 10k - to prevent the query taking too long and eating up too much memory. As each 10k batch is yielded, the - get_key_and_size_of_letters_to_be_sent_to_print function will go and fetch the s3 data, andhese start sending off - tasks to the notify-ftp app to send them. - - CAUTION! Modify this query with caution. Modifying filters etc is fine, but if we join onto another table, then - there may be undefined behaviour. Essentially we need each ORM object returned for each row to be unique, - and we should avoid modifying state of returned objects. - - For more reading: - https://docs.sqlalchemy.org/en/13/orm/query.html?highlight=yield_per#sqlalchemy.orm.query.Query.yield_per - https://www.mail-archive.com/sqlalchemy@googlegroups.com/msg12443.html - """ - notifications = Notification.query.filter( - Notification.created_at < convert_local_timezone_to_utc(print_run_deadline), - Notification.notification_type == LETTER_TYPE, - Notification.status == NOTIFICATION_CREATED, - Notification.key_type == KEY_TYPE_NORMAL, - Notification.postage == postage, - Notification.billable_units > 0 - ).order_by( - Notification.service_id, - Notification.created_at - ).yield_per(query_limit) - return notifications - - -def dao_get_letters_and_sheets_volume_by_postage(print_run_deadline): - notifications = db.session.query( - func.count(Notification.id).label('letters_count'), - func.sum(Notification.billable_units).label('sheets_count'), - Notification.postage - ).filter( - Notification.created_at < convert_local_timezone_to_utc(print_run_deadline), - Notification.notification_type == LETTER_TYPE, - Notification.status == NOTIFICATION_CREATED, - Notification.key_type == KEY_TYPE_NORMAL, - Notification.billable_units > 0 - ).group_by( - Notification.postage - ).order_by( - Notification.postage - ).all() - return notifications - - -def dao_old_letters_with_created_status(): - yesterday_bst = convert_utc_to_local_timezone(datetime.utcnow()) - timedelta(days=1) - last_processing_deadline = yesterday_bst.replace(hour=17, minute=30, second=0, microsecond=0) - - notifications = Notification.query.filter( - Notification.created_at < convert_local_timezone_to_utc(last_processing_deadline), - Notification.notification_type == LETTER_TYPE, - Notification.status == NOTIFICATION_CREATED - ).order_by( - Notification.created_at - ).all() - return notifications - - -def letters_missing_from_sending_bucket(seconds_to_subtract): - older_than_date = datetime.utcnow() - timedelta(seconds=seconds_to_subtract) - # We expect letters to have a `created` status, updated_at timestamp and billable units greater than zero. - notifications = Notification.query.filter( - Notification.billable_units == 0, - Notification.updated_at == None, # noqa - Notification.status == NOTIFICATION_CREATED, - Notification.created_at <= older_than_date, - Notification.notification_type == LETTER_TYPE, - Notification.key_type == KEY_TYPE_NORMAL - ).order_by( - Notification.created_at - ).all() - - return notifications - - -def dao_precompiled_letters_still_pending_virus_check(): - ninety_minutes_ago = datetime.utcnow() - timedelta(seconds=5400) - - notifications = Notification.query.filter( - Notification.created_at < ninety_minutes_ago, - Notification.status == NOTIFICATION_PENDING_VIRUS_CHECK - ).order_by( - Notification.created_at - ).all() - return notifications - - def _duplicate_update_warning(notification, status): current_app.logger.info( ( diff --git a/app/dao/organisation_dao.py b/app/dao/organisation_dao.py index 198f1e30a..d489a8a54 100644 --- a/app/dao/organisation_dao.py +++ b/app/dao/organisation_dao.py @@ -89,9 +89,6 @@ def dao_update_organisation(organisation_id, **kwargs): if 'email_branding_id' in kwargs: _update_organisation_services(organisation, 'email_branding') - if 'letter_branding_id' in kwargs: - _update_organisation_services(organisation, 'letter_branding') - return num_updated diff --git a/app/dao/permissions_dao.py b/app/dao/permissions_dao.py index 16daaa3c4..733358fd0 100644 --- a/app/dao/permissions_dao.py +++ b/app/dao/permissions_dao.py @@ -6,7 +6,6 @@ from app.models import ( MANAGE_TEMPLATES, MANAGE_USERS, SEND_EMAILS, - SEND_LETTERS, SEND_TEXTS, VIEW_ACTIVITY, Permission, @@ -19,7 +18,6 @@ default_service_permissions = [ MANAGE_SETTINGS, SEND_TEXTS, SEND_EMAILS, - SEND_LETTERS, MANAGE_API_KEYS, VIEW_ACTIVITY] diff --git a/app/dao/returned_letters_dao.py b/app/dao/returned_letters_dao.py deleted file mode 100644 index d118aa3c1..000000000 --- a/app/dao/returned_letters_dao.py +++ /dev/null @@ -1,118 +0,0 @@ -from datetime import datetime - -from sqlalchemy import desc, func -from sqlalchemy.dialects.postgresql import insert - -from app import db -from app.dao.dao_utils import autocommit -from app.models import ( - Job, - Notification, - NotificationHistory, - ReturnedLetter, - Template, - User, -) -from app.utils import midnight_n_days_ago - - -def _get_notification_ids_for_references(references): - notification_ids = db.session.query(Notification.id, Notification.service_id).filter( - Notification.reference.in_(references) - ).all() - - notification_history_ids = db.session.query(NotificationHistory.id, NotificationHistory.service_id).filter( - NotificationHistory.reference.in_(references) - ).all() - - return notification_ids + notification_history_ids - - -@autocommit -def insert_or_update_returned_letters(references): - data = _get_notification_ids_for_references(references) - for row in data: - table = ReturnedLetter.__table__ - - stmt = insert(table).values( - reported_at=datetime.utcnow().date(), - service_id=row.service_id, - notification_id=row.id, - created_at=datetime.utcnow() - ) - - stmt = stmt.on_conflict_do_update( - index_elements=[table.c.notification_id], - set_={ - 'reported_at': datetime.utcnow().date(), - 'updated_at': datetime.utcnow() - } - ) - db.session.connection().execute(stmt) - - -def fetch_recent_returned_letter_count(service_id): - return db.session.query( - func.count(ReturnedLetter.notification_id).label('returned_letter_count'), - ).filter( - ReturnedLetter.service_id == service_id, - ReturnedLetter.reported_at > midnight_n_days_ago(7), - ).one() - - -def fetch_most_recent_returned_letter(service_id): - return db.session.query( - ReturnedLetter.reported_at, - ).filter( - ReturnedLetter.service_id == service_id, - ).order_by( - desc(ReturnedLetter.reported_at) - ).first() - - -def fetch_returned_letter_summary(service_id): - return db.session.query( - func.count(ReturnedLetter.notification_id).label('returned_letter_count'), - ReturnedLetter.reported_at - ).filter( - ReturnedLetter.service_id == service_id, - ).group_by( - ReturnedLetter.reported_at - ).order_by( - desc(ReturnedLetter.reported_at) - ).all() - - -def fetch_returned_letters(service_id, report_date): - results = [] - for table in [Notification, NotificationHistory]: - query = db.session.query( - ReturnedLetter.notification_id, - ReturnedLetter.reported_at, - table.client_reference, - table.created_at, - Template.name.label('template_name'), - table.template_id, - table.template_version, - Template.hidden, - table.api_key_id, - table.created_by_id, - User.name.label('user_name'), - User.email_address, - Job.original_file_name, - (table.job_row_number + 1).label('job_row_number') # row numbers start at 0 - ).outerjoin( - User, table.created_by_id == User.id - ).outerjoin( - Job, table.job_id == Job.id - ).filter( - ReturnedLetter.service_id == service_id, - ReturnedLetter.reported_at == report_date, - ReturnedLetter.notification_id == table.id, - table.template_id == Template.id - ).order_by( - desc(ReturnedLetter.reported_at), desc(table.created_at) - ) - results = results + query.all() - results = sorted(results, key=lambda i: i.created_at, reverse=True) - return results diff --git a/app/dao/service_letter_contact_dao.py b/app/dao/service_letter_contact_dao.py deleted file mode 100644 index 7ee409142..000000000 --- a/app/dao/service_letter_contact_dao.py +++ /dev/null @@ -1,105 +0,0 @@ -from sqlalchemy import desc - -from app import db -from app.dao.dao_utils import autocommit -from app.models import ServiceLetterContact, Template - - -def dao_get_letter_contacts_by_service_id(service_id): - letter_contacts = db.session.query( - ServiceLetterContact - ).filter( - ServiceLetterContact.service_id == service_id, - ServiceLetterContact.archived == False # noqa - ).order_by( - desc(ServiceLetterContact.is_default), - desc(ServiceLetterContact.created_at) - ).all() - - return letter_contacts - - -def dao_get_letter_contact_by_id(service_id, letter_contact_id): - letter_contact = db.session.query( - ServiceLetterContact - ).filter( - ServiceLetterContact.service_id == service_id, - ServiceLetterContact.id == letter_contact_id, - ServiceLetterContact.archived == False # noqa - ).one() - return letter_contact - - -@autocommit -def add_letter_contact_for_service(service_id, contact_block, is_default): - old_default = _get_existing_default(service_id) - if is_default: - _reset_old_default_to_false(old_default) - - new_letter_contact = ServiceLetterContact( - service_id=service_id, - contact_block=contact_block, - is_default=is_default - ) - db.session.add(new_letter_contact) - return new_letter_contact - - -@autocommit -def update_letter_contact(service_id, letter_contact_id, contact_block, is_default): - old_default = _get_existing_default(service_id) - # if we want to make this the default, ensure there are no other existing defaults - if is_default: - _reset_old_default_to_false(old_default) - - letter_contact_update = ServiceLetterContact.query.get(letter_contact_id) - letter_contact_update.contact_block = contact_block - letter_contact_update.is_default = is_default - db.session.add(letter_contact_update) - return letter_contact_update - - -@autocommit -def archive_letter_contact(service_id, letter_contact_id): - letter_contact_to_archive = ServiceLetterContact.query.filter_by( - id=letter_contact_id, - service_id=service_id - ).one() - - Template.query.filter_by( - service_letter_contact_id=letter_contact_id - ).update({ - 'service_letter_contact_id': None - }) - - letter_contact_to_archive.archived = True - - db.session.add(letter_contact_to_archive) - return letter_contact_to_archive - - -def _get_existing_default(service_id): - old_defaults = [ - x for x - in dao_get_letter_contacts_by_service_id(service_id=service_id) - if x.is_default - ] - - if len(old_defaults) == 0: - return None - - if len(old_defaults) == 1: - return old_defaults[0] - - raise Exception( - "There should only be one default letter contact for each service. Service {} has {}".format( - service_id, - len(old_defaults) - ) - ) - - -def _reset_old_default_to_false(old_default): - if old_default: - old_default.is_default = False - db.session.add(old_default) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index eda383edb..e10a702b5 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -15,13 +15,10 @@ from app.dao.service_user_dao import dao_get_service_user from app.dao.template_folder_dao import dao_get_valid_template_folders_by_id from app.models import ( EMAIL_TYPE, - INTERNATIONAL_LETTERS, INTERNATIONAL_SMS_TYPE, KEY_TYPE_TEST, - LETTER_TYPE, NOTIFICATION_PERMANENT_FAILURE, SMS_TYPE, - UPLOAD_LETTERS, AnnualBilling, ApiKey, FactBilling, @@ -35,7 +32,6 @@ from app.models import ( Service, ServiceContactList, ServiceEmailReplyTo, - ServiceLetterContact, ServicePermission, ServiceSmsSender, Template, @@ -53,10 +49,7 @@ from app.utils import ( DEFAULT_SERVICE_PERMISSIONS = [ SMS_TYPE, EMAIL_TYPE, - LETTER_TYPE, INTERNATIONAL_SMS_TYPE, - UPLOAD_LETTERS, - INTERNATIONAL_LETTERS, ] @@ -113,16 +106,12 @@ def dao_fetch_live_services_data(): Service.go_live_at.label("live_date"), Service.volume_sms.label('sms_volume_intent'), Service.volume_email.label('email_volume_intent'), - Service.volume_letter.label('letter_volume_intent'), case([ (this_year_ft_billing.c.notification_type == 'email', func.sum(this_year_ft_billing.c.notifications_sent)) ], else_=0).label("email_totals"), case([ (this_year_ft_billing.c.notification_type == 'sms', func.sum(this_year_ft_billing.c.notifications_sent)) ], else_=0).label("sms_totals"), - case([ - (this_year_ft_billing.c.notification_type == 'letter', func.sum(this_year_ft_billing.c.notifications_sent)) - ], else_=0).label("letter_totals"), AnnualBilling.free_sms_fragment_limit, ).join( Service.annual_billing @@ -156,7 +145,6 @@ def dao_fetch_live_services_data(): Service.go_live_at, Service.volume_sms, Service.volume_email, - Service.volume_letter, this_year_ft_billing.c.notification_type, AnnualBilling.free_sms_fragment_limit, ).order_by( @@ -169,7 +157,6 @@ def dao_fetch_live_services_data(): if existing_service is not None: existing_service["email_totals"] += row.email_totals existing_service["sms_totals"] += row.sms_totals - existing_service["letter_totals"] += row.letter_totals else: results.append(row._asdict()) return results @@ -315,9 +302,6 @@ def dao_create_service( if organisation.email_branding: service.email_branding = organisation.email_branding - if organisation.letter_branding: - service.letter_branding = organisation.letter_branding - if organisation: service.crown = organisation.crown service.count_as_live = not user.platform_admin @@ -378,7 +362,6 @@ def delete_service_and_all_associated_db_objects(service): _delete_commit(ServiceSmsSender.query.filter_by(service=service)) _delete_commit(ServiceEmailReplyTo.query.filter_by(service=service)) - _delete_commit(ServiceLetterContact.query.filter_by(service=service)) _delete_commit(ServiceContactList.query.filter_by(service=service)) _delete_commit(InvitedUser.query.filter_by(service=service)) _delete_commit(Permission.query.filter_by(service=service)) diff --git a/app/dao/templates_dao.py b/app/dao/templates_dao.py index fb669565f..225718f93 100644 --- a/app/dao/templates_dao.py +++ b/app/dao/templates_dao.py @@ -1,19 +1,11 @@ import uuid from datetime import datetime -from flask import current_app from sqlalchemy import asc, desc from app import db from app.dao.dao_utils import VersionOptions, autocommit, version_class -from app.dao.users_dao import get_user_by_id -from app.models import ( - LETTER_TYPE, - SECOND_CLASS, - Template, - TemplateHistory, - TemplateRedacted, -) +from app.models import Template, TemplateHistory, TemplateRedacted @autocommit @@ -46,37 +38,6 @@ def dao_update_template(template): db.session.add(template) -@autocommit -def dao_update_template_reply_to(template_id, reply_to): - Template.query.filter_by(id=template_id).update( - {"service_letter_contact_id": reply_to, - "updated_at": datetime.utcnow(), - "version": Template.version + 1, - } - ) - template = Template.query.filter_by(id=template_id).one() - - history = TemplateHistory(** - { - "id": template.id, - "name": template.name, - "template_type": template.template_type, - "created_at": template.created_at, - "updated_at": template.updated_at, - "content": template.content, - "service_id": template.service_id, - "subject": template.subject, - "postage": template.postage, - "created_by_id": template.created_by_id, - "version": template.version, - "archived": template.archived, - "process_type": template.process_type, - "service_letter_contact_id": template.service_letter_contact_id, - }) - db.session.add(history) - return template - - @autocommit def dao_redact_template(template, user_id): template.template_redacted.redact_personalisation = True @@ -132,28 +93,3 @@ def dao_get_template_versions(service_id, template_id): ).order_by( desc(TemplateHistory.version) ).all() - - -def get_precompiled_letter_template(service_id): - template = Template.query.filter_by( - service_id=service_id, - template_type=LETTER_TYPE, - hidden=True - ).first() - if template is not None: - return template - - template = Template( - name='Pre-compiled PDF', - created_by=get_user_by_id(current_app.config['NOTIFY_USER_ID']), - service_id=service_id, - template_type=LETTER_TYPE, - hidden=True, - subject='Pre-compiled PDF', - content='', - postage=SECOND_CLASS - ) - - dao_create_template(template) - - return template diff --git a/app/dao/uploads_dao.py b/app/dao/uploads_dao.py index c3d525459..056c60898 100644 --- a/app/dao/uploads_dao.py +++ b/app/dao/uploads_dao.py @@ -129,23 +129,3 @@ def dao_get_uploads_by_service_id(service_id, limit_days=None, page=1, page_size ).order_by( desc("processing_started"), desc("created_at") ).paginate(page=page, per_page=page_size) - - -def dao_get_uploaded_letters_by_print_date(service_id, letter_print_date, page=1, page_size=50): - return db.session.query( - Notification, - ).join( - Template, Notification.template_id == Template.id - ).filter( - Notification.service_id == service_id, - Notification.notification_type == LETTER_TYPE, - Notification.api_key_id.is_(None), - Notification.status != NOTIFICATION_CANCELLED, - Template.hidden.is_(True), - _get_printing_day(Notification.created_at) == letter_print_date.date(), - ).order_by( - desc(Notification.created_at) - ).paginate( - page=page, - per_page=page_size, - ) diff --git a/app/job/rest.py b/app/job/rest.py index 36905bb8c..daf63d11f 100644 --- a/app/job/rest.py +++ b/app/job/rest.py @@ -9,8 +9,6 @@ from app.dao.fact_notification_status_dao import ( fetch_notification_statuses_for_job, ) from app.dao.jobs_dao import ( - can_letter_job_be_cancelled, - dao_cancel_letter_job, dao_create_job, dao_get_future_scheduled_job_by_id_and_service_id, dao_get_job_by_service_id_and_job_id, @@ -30,7 +28,6 @@ from app.models import ( JOB_STATUS_CANCELLED, JOB_STATUS_PENDING, JOB_STATUS_SCHEDULED, - LETTER_TYPE, ) from app.schemas import ( job_schema, @@ -66,17 +63,6 @@ def cancel_job(service_id, job_id): return get_job_by_service_and_job_id(service_id, job_id) -@job_blueprint.route('//cancel-letter-job', methods=['POST']) -def cancel_letter_job(service_id, job_id): - job = dao_get_job_by_service_id_and_job_id(service_id, job_id) - can_we_cancel, errors = can_letter_job_be_cancelled(job) - if can_we_cancel: - data = dao_cancel_letter_job(job) - return jsonify(data), 200 - else: - return jsonify(message=errors), 400 - - @job_blueprint.route('//notifications', methods=['GET']) def get_all_notifications_for_service_job(service_id, job_id): data = notifications_filter_schema.load(request.args) @@ -160,9 +146,6 @@ def create_job(service_id): data['template'] = data.pop('template_id') template = dao_get_template_by_id(data['template']) - if template.template_type == LETTER_TYPE and service.restricted: - raise InvalidRequest("Create letter job is not allowed for service in trial mode ", 403) - if data.get('valid') != 'True': raise InvalidRequest("File is not valid, can't create job", 400) diff --git a/app/letter_branding/__init__.py b/app/letter_branding/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/app/letter_branding/letter_branding_rest.py b/app/letter_branding/letter_branding_rest.py deleted file mode 100644 index d1f6fb0bb..000000000 --- a/app/letter_branding/letter_branding_rest.py +++ /dev/null @@ -1,71 +0,0 @@ -from celery import current_app -from flask import Blueprint, jsonify, request -from sqlalchemy.exc import IntegrityError - -from app.dao.letter_branding_dao import ( - dao_create_letter_branding, - dao_get_all_letter_branding, - dao_get_letter_branding_by_id, - dao_update_letter_branding, -) -from app.errors import register_errors -from app.letter_branding.letter_branding_schema import ( - post_letter_branding_schema, -) -from app.models import LetterBranding -from app.schema_validation import validate - -letter_branding_blueprint = Blueprint('letter_branding', __name__, url_prefix='/letter-branding') -register_errors(letter_branding_blueprint) - - -@letter_branding_blueprint.errorhandler(IntegrityError) -def handle_integrity_error(exc): - """ - Handle integrity errors caused by the unique constraint - """ - for col in {'name', 'filename'}: - if 'letter_branding_{}_key'.format(col) in str(exc): - return jsonify( - result='error', - message={col: ["{} already in use".format(col.title())]} - ), 400 - current_app.logger.exception(exc) - return jsonify(result='error', message="Internal server error"), 500 - - -@letter_branding_blueprint.route('', methods=['GET']) -def get_all_letter_brands(): - letter_brands = dao_get_all_letter_branding() - - return jsonify([lb.serialize() for lb in letter_brands]) - - -@letter_branding_blueprint.route('/', methods=['GET']) -def get_letter_brand_by_id(letter_branding_id): - letter_branding = dao_get_letter_branding_by_id(letter_branding_id) - - return jsonify(letter_branding.serialize()), 200 - - -@letter_branding_blueprint.route('', methods=['POST']) -def create_letter_brand(): - data = request.get_json() - - validate(data, post_letter_branding_schema) - - letter_branding = LetterBranding(**data) - dao_create_letter_branding(letter_branding) - - return jsonify(letter_branding.serialize()), 201 - - -@letter_branding_blueprint.route('/', methods=['POST']) -def update_letter_branding(letter_branding_id): - data = request.get_json() - - validate(data, post_letter_branding_schema) - - letter_branding = dao_update_letter_branding(letter_branding_id, **data) - - return jsonify(letter_branding.serialize()), 201 diff --git a/app/letter_branding/letter_branding_schema.py b/app/letter_branding/letter_branding_schema.py deleted file mode 100644 index 3c7acb7b0..000000000 --- a/app/letter_branding/letter_branding_schema.py +++ /dev/null @@ -1,10 +0,0 @@ -post_letter_branding_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "POST schema for creating or updating a letter brand", - "type": "object", - "properties": { - "name": {"type": ["string", "null"]}, - "filename": {"type": ["string", "null"]}, - }, - "required": ["name", "filename"] -} diff --git a/app/letters/__init__.py b/app/letters/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/app/letters/letter_schemas.py b/app/letters/letter_schemas.py deleted file mode 100644 index 13d10b277..000000000 --- a/app/letters/letter_schemas.py +++ /dev/null @@ -1,17 +0,0 @@ -letter_references = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "list of letter notification references", - "type": "object", - "title": "references", - "properties": { - "references": { - "type": "array", - "items": { - "type": "string", - "pattern": "^[0-9A-Z]{16}$" - }, - "minItems": 1 - }, - }, - "required": ["references"] -} diff --git a/app/letters/rest.py b/app/letters/rest.py deleted file mode 100644 index e5b4cd518..000000000 --- a/app/letters/rest.py +++ /dev/null @@ -1,28 +0,0 @@ -from flask import Blueprint, jsonify, request - -from app.celery.tasks import process_returned_letters_list -from app.config import QueueNames -from app.letters.letter_schemas import letter_references -from app.schema_validation import validate -from app.v2.errors import register_errors - -letter_job = Blueprint("letter-job", __name__) -register_errors(letter_job) - -# too many references will make SQS error (as the task can only be 256kb) -# Maybe doesn't matter anymore with Redis as the celery backing store -MAX_REFERENCES_PER_TASK = 5000 - - -@letter_job.route('/letters/returned', methods=['POST']) -def create_process_returned_letters_job(): - references = validate(request.get_json(), letter_references)['references'] - - for start_index in range(0, len(references), MAX_REFERENCES_PER_TASK): - process_returned_letters_list.apply_async( - args=(references[start_index:start_index + MAX_REFERENCES_PER_TASK], ), - queue=QueueNames.DATABASE, - compression='zlib' - ) - - return jsonify(references=references), 200 diff --git a/app/letters/utils.py b/app/letters/utils.py deleted file mode 100644 index 564549eb9..000000000 --- a/app/letters/utils.py +++ /dev/null @@ -1,235 +0,0 @@ -import io -import json -import math -from datetime import datetime, timedelta -from enum import Enum - -import boto3 -from flask import current_app -from notifications_utils.letter_timings import LETTER_PROCESSING_DEADLINE -from notifications_utils.pdf import pdf_page_count -from notifications_utils.s3 import s3upload -from notifications_utils.timezones import convert_utc_to_local_timezone - -from app.models import ( - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - RESOLVE_POSTAGE_FOR_FILE_NAME, - SECOND_CLASS, -) - - -class ScanErrorType(Enum): - ERROR = 1 - FAILURE = 2 - - -LETTERS_PDF_FILE_LOCATION_STRUCTURE = \ - '{folder}NOTIFY.{reference}.{duplex}.{letter_class}.{colour}.{date}.pdf' - -PRECOMPILED_BUCKET_PREFIX = '{folder}NOTIFY.{reference}' - - -def get_folder_name(created_at): - print_datetime = convert_utc_to_local_timezone(created_at) - if print_datetime.time() > LETTER_PROCESSING_DEADLINE: - print_datetime += timedelta(days=1) - return '{}/'.format(print_datetime.date()) - - -class LetterPDFNotFound(Exception): - pass - - -def find_letter_pdf_in_s3(notification): - bucket_name, prefix = get_bucket_name_and_prefix_for_notification(notification) - - s3 = boto3.resource('s3') - bucket = s3.Bucket(bucket_name) - try: - item = next(x for x in bucket.objects.filter(Prefix=prefix)) - except StopIteration: - raise LetterPDFNotFound(f'File not found in bucket {bucket_name} with prefix {prefix}', ) - return item - - -def generate_letter_pdf_filename(reference, created_at, ignore_folder=False, postage=SECOND_CLASS): - upload_file_name = LETTERS_PDF_FILE_LOCATION_STRUCTURE.format( - folder='' if ignore_folder else get_folder_name(created_at), - reference=reference, - duplex="D", - letter_class=RESOLVE_POSTAGE_FOR_FILE_NAME[postage], - colour="C", - date=created_at.strftime('%Y%m%d%H%M%S') - ).upper() - return upload_file_name - - -def get_bucket_name_and_prefix_for_notification(notification): - folder = '' - if notification.status == NOTIFICATION_VALIDATION_FAILED: - bucket_name = current_app.config['INVALID_PDF_BUCKET_NAME'] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config['TEST_LETTERS_BUCKET_NAME'] - else: - bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] - folder = get_folder_name(notification.created_at) - - upload_file_name = PRECOMPILED_BUCKET_PREFIX.format( - folder=folder, - reference=notification.reference - ).upper() - - return bucket_name, upload_file_name - - -def get_reference_from_filename(filename): - # filename looks like '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.20180113120000.PDF' - filename_parts = filename.split('.') - return filename_parts[1] - - -def upload_letter_pdf(notification, pdf_data, precompiled=False): - current_app.logger.info("PDF Letter {} reference {} created at {}, {} bytes".format( - notification.id, notification.reference, notification.created_at, len(pdf_data))) - - upload_file_name = generate_letter_pdf_filename( - reference=notification.reference, - created_at=notification.created_at, - ignore_folder=precompiled or notification.key_type == KEY_TYPE_TEST, - postage=notification.postage - ) - - if precompiled: - bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME'] - elif notification.key_type == KEY_TYPE_TEST: - bucket_name = current_app.config['TEST_LETTERS_BUCKET_NAME'] - else: - bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] - - s3upload( - filedata=pdf_data, - region=current_app.config['AWS_REGION'], - bucket_name=bucket_name, - file_location=upload_file_name - ) - - current_app.logger.info("Uploaded letters PDF {} to {} for notification id {}".format( - upload_file_name, bucket_name, notification.id)) - return upload_file_name - - -def move_failed_pdf(source_filename, scan_error_type): - scan_bucket = current_app.config['LETTERS_SCAN_BUCKET_NAME'] - - target_filename = ('ERROR/' if scan_error_type == ScanErrorType.ERROR else 'FAILURE/') + source_filename - - _move_s3_object(scan_bucket, source_filename, scan_bucket, target_filename) - - -def move_error_pdf_to_scan_bucket(source_filename): - scan_bucket = current_app.config['LETTERS_SCAN_BUCKET_NAME'] - error_file = 'ERROR/' + source_filename - - _move_s3_object(scan_bucket, error_file, scan_bucket, source_filename) - - -def move_scan_to_invalid_pdf_bucket(source_filename, message=None, invalid_pages=None, page_count=None): - metadata = {} - if message: - metadata["message"] = message - if invalid_pages: - metadata["invalid_pages"] = json.dumps(invalid_pages) - if page_count: - metadata["page_count"] = str(page_count) - - _move_s3_object( - source_bucket=current_app.config['LETTERS_SCAN_BUCKET_NAME'], - source_filename=source_filename, - target_bucket=current_app.config['INVALID_PDF_BUCKET_NAME'], - target_filename=source_filename, - metadata=metadata - ) - - -def move_uploaded_pdf_to_letters_bucket(source_filename, upload_filename): - _move_s3_object( - source_bucket=current_app.config['TRANSIENT_UPLOADED_LETTERS'], - source_filename=source_filename, - target_bucket=current_app.config['LETTERS_PDF_BUCKET_NAME'], - target_filename=upload_filename, - ) - - -def move_sanitised_letter_to_test_or_live_pdf_bucket(filename, is_test_letter, created_at, new_filename): - target_bucket_config = 'TEST_LETTERS_BUCKET_NAME' if is_test_letter else 'LETTERS_PDF_BUCKET_NAME' - target_bucket_name = current_app.config[target_bucket_config] - target_folder = '' if is_test_letter else get_folder_name(created_at) - target_filename = target_folder + new_filename - - _move_s3_object( - source_bucket=current_app.config['LETTER_SANITISE_BUCKET_NAME'], - source_filename=filename, - target_bucket=target_bucket_name, - target_filename=target_filename, - ) - - -def get_file_names_from_error_bucket(): - s3 = boto3.resource('s3') - scan_bucket = current_app.config['LETTERS_SCAN_BUCKET_NAME'] - bucket = s3.Bucket(scan_bucket) - - return bucket.objects.filter(Prefix="ERROR") - - -def get_letter_pdf_and_metadata(notification): - obj = find_letter_pdf_in_s3(notification).get() - return obj["Body"].read(), obj["Metadata"] - - -def _move_s3_object(source_bucket, source_filename, target_bucket, target_filename, metadata=None): - s3 = boto3.resource('s3') - copy_source = {'Bucket': source_bucket, 'Key': source_filename} - - target_bucket = s3.Bucket(target_bucket) - obj = target_bucket.Object(target_filename) - - # Tags are copied across but the expiration time is reset in the destination bucket - # e.g. if a file has 5 days left to expire on a ONE_WEEK retention in the source bucket, - # in the destination bucket the expiration time will be reset to 7 days left to expire - put_args = {'ServerSideEncryption': 'AES256'} - if metadata: - put_args['Metadata'] = metadata - put_args["MetadataDirective"] = "REPLACE" - obj.copy(copy_source, ExtraArgs=put_args) - - s3.Object(source_bucket, source_filename).delete() - - current_app.logger.info("Moved letter PDF: {}/{} to {}/{}".format( - source_bucket, source_filename, target_bucket, target_filename)) - - -def letter_print_day(created_at): - bst_print_datetime = convert_utc_to_local_timezone(created_at) + timedelta(hours=6, minutes=30) - bst_print_date = bst_print_datetime.date() - - current_local_date = convert_utc_to_local_timezone(datetime.utcnow()).date() - - if bst_print_date >= current_local_date: - return 'today' - else: - print_date = bst_print_datetime.strftime('%d %B').lstrip('0') - return 'on {}'.format(print_date) - - -def get_page_count(pdf): - return pdf_page_count(io.BytesIO(pdf)) - - -def get_billable_units_for_letter_page_count(page_count): - if not page_count: - return 0 - pages_per_sheet = 2 - billable_units = math.ceil(page_count / pages_per_sheet) - return billable_units diff --git a/app/models.py b/app/models.py index d104c7f8b..72e5f158a 100644 --- a/app/models.py +++ b/app/models.py @@ -6,11 +6,6 @@ from flask import current_app, url_for from notifications_utils.clients.encryption.encryption_client import ( EncryptionError, ) -from notifications_utils.insensitive_dict import InsensitiveDict -from notifications_utils.letter_timings import get_letter_timings -from notifications_utils.postal_address import ( - address_lines_1_to_6_and_postcode_keys, -) from notifications_utils.recipients import ( InvalidEmailError, InvalidPhoneError, @@ -19,7 +14,6 @@ from notifications_utils.recipients import ( validate_phone_number, ) from notifications_utils.template import ( - LetterPrintTemplate, PlainTextEmailTemplate, SMSMessageTemplate, ) @@ -35,7 +29,6 @@ from sqlalchemy import ( from sqlalchemy.dialects.postgresql import JSON, JSONB, UUID from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext.declarative import declared_attr -from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import validates from sqlalchemy.orm.collections import attribute_mapped_collection @@ -52,8 +45,8 @@ SMS_TYPE = 'sms' EMAIL_TYPE = 'email' LETTER_TYPE = 'letter' -TEMPLATE_TYPES = [SMS_TYPE, EMAIL_TYPE, LETTER_TYPE] -NOTIFICATION_TYPES = [SMS_TYPE, EMAIL_TYPE, LETTER_TYPE] +TEMPLATE_TYPES = [SMS_TYPE, EMAIL_TYPE] +NOTIFICATION_TYPES = [SMS_TYPE, EMAIL_TYPE] template_types = db.Enum(*TEMPLATE_TYPES, name='template_type') @@ -297,53 +290,22 @@ service_email_branding = db.Table( ) -class LetterBranding(db.Model): - __tablename__ = 'letter_branding' - id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - name = db.Column(db.String(255), unique=True, nullable=False) - filename = db.Column(db.String(255), unique=True, nullable=False) - - def serialize(self): - return { - "id": str(self.id), - "name": self.name, - "filename": self.filename, - } - - -service_letter_branding = db.Table( - 'service_letter_branding', - db.Model.metadata, - # service_id is a primary key as you can only have one letter branding per service - db.Column('service_id', UUID(as_uuid=True), db.ForeignKey('services.id'), primary_key=True, nullable=False), - db.Column('letter_branding_id', UUID(as_uuid=True), db.ForeignKey('letter_branding.id'), nullable=False), -) - - INTERNATIONAL_SMS_TYPE = 'international_sms' INBOUND_SMS_TYPE = 'inbound_sms' SCHEDULE_NOTIFICATIONS = 'schedule_notifications' EMAIL_AUTH = 'email_auth' -LETTERS_AS_PDF = 'letters_as_pdf' -PRECOMPILED_LETTER = 'precompiled_letter' UPLOAD_DOCUMENT = 'upload_document' EDIT_FOLDER_PERMISSIONS = 'edit_folder_permissions' -UPLOAD_LETTERS = 'upload_letters' -INTERNATIONAL_LETTERS = 'international_letters' SERVICE_PERMISSION_TYPES = [ EMAIL_TYPE, SMS_TYPE, - LETTER_TYPE, INTERNATIONAL_SMS_TYPE, INBOUND_SMS_TYPE, SCHEDULE_NOTIFICATIONS, EMAIL_AUTH, - LETTERS_AS_PDF, UPLOAD_DOCUMENT, EDIT_FOLDER_PERMISSIONS, - UPLOAD_LETTERS, - INTERNATIONAL_LETTERS, ] @@ -410,13 +372,6 @@ class Organisation(db.Model): nullable=True, ) - letter_branding = db.relationship('LetterBranding') - letter_branding_id = db.Column( - UUID(as_uuid=True), - db.ForeignKey('letter_branding.id'), - nullable=True, - ) - notes = db.Column(db.Text, nullable=True) purchase_order_number = db.Column(db.String(255), nullable=True) billing_contact_names = db.Column(db.Text, nullable=True) @@ -443,7 +398,6 @@ class Organisation(db.Model): "active": self.active, "crown": self.crown, "organisation_type": self.organisation_type, - "letter_branding_id": self.letter_branding_id, "email_branding_id": self.email_branding_id, "agreement_signed": self.agreement_signed, "agreement_signed_at": self.agreement_signed_at, @@ -508,7 +462,6 @@ class Service(db.Model, Versioned): contact_link = db.Column(db.String(255), nullable=True, unique=False) volume_sms = db.Column(db.Integer(), nullable=True, unique=False) volume_email = db.Column(db.Integer(), nullable=True, unique=False) - volume_letter = db.Column(db.Integer(), nullable=True, unique=False) consent_to_research = db.Column(db.Boolean, nullable=True) count_as_live = db.Column(db.Boolean, nullable=False, default=True) go_live_user_id = db.Column(UUID(as_uuid=True), db.ForeignKey('users.id'), nullable=True) @@ -529,11 +482,6 @@ class Service(db.Model, Versioned): secondary=service_email_branding, uselist=False, backref=db.backref('services', lazy='dynamic')) - letter_branding = db.relationship( - 'LetterBranding', - secondary=service_letter_branding, - uselist=False, - backref=db.backref('services', lazy='dynamic')) @classmethod def from_json(cls, data): @@ -562,10 +510,6 @@ class Service(db.Model, Versioned): default_reply_to = [x for x in self.reply_to_email_addresses if x.is_default] return default_reply_to[0].email_address if default_reply_to else None - def get_default_letter_contact(self): - default_letter_contact = [x for x in self.letter_contacts if x.is_default] - return default_letter_contact[0].contact_block if default_letter_contact else None - def has_permission(self, permission): return permission in [p.permission for p in self.permissions] @@ -923,9 +867,6 @@ template_folder_map = db.Table( ) -PRECOMPILED_TEMPLATE_NAME = 'Pre-compiled PDF' - - class TemplateBase(db.Model): __abstract__ = True @@ -944,7 +885,6 @@ class TemplateBase(db.Model): archived = db.Column(db.Boolean, nullable=False, default=False) hidden = db.Column(db.Boolean, nullable=False, default=False) subject = db.Column(db.Text) - postage = db.Column(db.String, nullable=True) @declared_attr def service_id(cls): @@ -970,58 +910,31 @@ class TemplateBase(db.Model): redact_personalisation = association_proxy('template_redacted', 'redact_personalisation') - @declared_attr - def service_letter_contact_id(cls): - return db.Column(UUID(as_uuid=True), db.ForeignKey('service_letter_contacts.id'), nullable=True) - - @declared_attr - def service_letter_contact(cls): - return db.relationship('ServiceLetterContact', viewonly=True) - + # TODO: possibly unnecessary after removing letters @property def reply_to(self): - if self.template_type == LETTER_TYPE: - return self.service_letter_contact_id - else: - return None + return None @reply_to.setter def reply_to(self, value): - if self.template_type == LETTER_TYPE: - self.service_letter_contact_id = value - elif value is None: + if value is None: pass else: raise ValueError('Unable to set sender for {} template'.format(self.template_type)) def get_reply_to_text(self): - if self.template_type == LETTER_TYPE: - return self.service_letter_contact.contact_block if self.service_letter_contact else None - elif self.template_type == EMAIL_TYPE: + if self.template_type == EMAIL_TYPE: return self.service.get_default_reply_to_email_address() elif self.template_type == SMS_TYPE: return try_validate_and_format_phone_number(self.service.get_default_sms_sender()) else: return None - @hybrid_property - def is_precompiled_letter(self): - return self.hidden and self.name == PRECOMPILED_TEMPLATE_NAME and self.template_type == LETTER_TYPE - - @is_precompiled_letter.setter - def is_precompiled_letter(self, value): - pass - def _as_utils_template(self): if self.template_type == EMAIL_TYPE: return PlainTextEmailTemplate(self.__dict__) if self.template_type == SMS_TYPE: return SMSMessageTemplate(self.__dict__) - if self.template_type == LETTER_TYPE: - return LetterPrintTemplate( - self.__dict__, - contact_block=self.get_reply_to_text(), - ) def _as_utils_template_with_personalisation(self, values): template = self._as_utils_template() @@ -1037,7 +950,7 @@ class TemplateBase(db.Model): "created_by": self.created_by.email_address, "version": self.version, "body": self.content, - "subject": self.subject if self.template_type in {EMAIL_TYPE, LETTER_TYPE} else None, + "subject": self.subject if self.template_type == EMAIL_TYPE else None, "name": self.name, "personalisation": { key: { @@ -1045,8 +958,6 @@ class TemplateBase(db.Model): } for key in self._as_utils_template().placeholders }, - "postage": self.postage, - "letter_contact_block": self.service_letter_contact.contact_block if self.service_letter_contact else None, } return serialized @@ -1130,8 +1041,7 @@ SMS_PROVIDERS = [SNS_PROVIDER] EMAIL_PROVIDERS = [SES_PROVIDER] PROVIDERS = SMS_PROVIDERS + EMAIL_PROVIDERS -NOTIFICATION_TYPE = [EMAIL_TYPE, SMS_TYPE, LETTER_TYPE] -notification_types = db.Enum(*NOTIFICATION_TYPE, name='notification_type') +notification_types = db.Enum(*NOTIFICATION_TYPES, name='notification_type') class ProviderDetails(db.Model): @@ -1291,7 +1201,6 @@ NOTIFICATION_PERMANENT_FAILURE = 'permanent-failure' NOTIFICATION_PENDING_VIRUS_CHECK = 'pending-virus-check' NOTIFICATION_VALIDATION_FAILED = 'validation-failed' NOTIFICATION_VIRUS_SCAN_FAILED = 'virus-scan-failed' -NOTIFICATION_RETURNED_LETTER = 'returned-letter' NOTIFICATION_STATUS_TYPES_FAILED = [ NOTIFICATION_TECHNICAL_FAILURE, @@ -1299,7 +1208,6 @@ NOTIFICATION_STATUS_TYPES_FAILED = [ NOTIFICATION_PERMANENT_FAILURE, NOTIFICATION_VALIDATION_FAILED, NOTIFICATION_VIRUS_SCAN_FAILED, - NOTIFICATION_RETURNED_LETTER, ] NOTIFICATION_STATUS_TYPES_COMPLETED = [ @@ -1309,7 +1217,6 @@ NOTIFICATION_STATUS_TYPES_COMPLETED = [ NOTIFICATION_TECHNICAL_FAILURE, NOTIFICATION_TEMPORARY_FAILURE, NOTIFICATION_PERMANENT_FAILURE, - NOTIFICATION_RETURNED_LETTER, NOTIFICATION_CANCELLED, ] @@ -1326,7 +1233,6 @@ NOTIFICATION_STATUS_TYPES_BILLABLE = [ NOTIFICATION_FAILED, NOTIFICATION_TEMPORARY_FAILURE, NOTIFICATION_PERMANENT_FAILURE, - NOTIFICATION_RETURNED_LETTER, ] NOTIFICATION_STATUS_TYPES_BILLABLE_SMS = [ @@ -1338,11 +1244,6 @@ NOTIFICATION_STATUS_TYPES_BILLABLE_SMS = [ NOTIFICATION_PERMANENT_FAILURE, ] -NOTIFICATION_STATUS_TYPES_BILLABLE_FOR_LETTERS = [ - NOTIFICATION_SENDING, - NOTIFICATION_DELIVERED, - NOTIFICATION_RETURNED_LETTER, -] # we don't really have a concept of billable emails - however the ft billing table only includes emails that we have # actually sent. NOTIFICATION_STATUS_TYPES_SENT_EMAILS = [ @@ -1366,32 +1267,12 @@ NOTIFICATION_STATUS_TYPES = [ NOTIFICATION_PENDING_VIRUS_CHECK, NOTIFICATION_VALIDATION_FAILED, NOTIFICATION_VIRUS_SCAN_FAILED, - NOTIFICATION_RETURNED_LETTER, ] NOTIFICATION_STATUS_TYPES_NON_BILLABLE = list(set(NOTIFICATION_STATUS_TYPES) - set(NOTIFICATION_STATUS_TYPES_BILLABLE)) NOTIFICATION_STATUS_TYPES_ENUM = db.Enum(*NOTIFICATION_STATUS_TYPES, name='notify_status_type') -NOTIFICATION_STATUS_LETTER_ACCEPTED = 'accepted' -NOTIFICATION_STATUS_LETTER_RECEIVED = 'received' - -DVLA_RESPONSE_STATUS_SENT = 'Sent' - -FIRST_CLASS = 'first' -SECOND_CLASS = 'second' -EUROPE = 'europe' -REST_OF_WORLD = 'rest-of-world' -POSTAGE_TYPES = [FIRST_CLASS, SECOND_CLASS, EUROPE, REST_OF_WORLD] -UK_POSTAGE_TYPES = [FIRST_CLASS, SECOND_CLASS] -INTERNATIONAL_POSTAGE_TYPES = [EUROPE, REST_OF_WORLD] -RESOLVE_POSTAGE_FOR_FILE_NAME = { - FIRST_CLASS: 1, - SECOND_CLASS: 2, - EUROPE: 'E', - REST_OF_WORLD: 'N', -} - class NotificationStatusTypes(db.Model): __tablename__ = 'notification_status_types' @@ -1407,6 +1288,10 @@ class NotificationAllTimeView(db.Model): """ __tablename__ = 'notifications_all_time_view' + # Tell alembic not to create this as a table. We have a migration where we manually set this up as a view. + # This is custom logic we apply - not built-in logic. See `migrations/env.py` + __table_args__ = {"info": {"managed_by_alembic": False}} + id = db.Column(UUID(as_uuid=True), primary_key=True) job_id = db.Column(UUID(as_uuid=True)) job_row_number = db.Column(db.Integer) @@ -1428,7 +1313,6 @@ class NotificationAllTimeView(db.Model): phone_prefix = db.Column(db.String) rate_multiplier = db.Column(db.Numeric(asdecimal=False)) created_by_id = db.Column(UUID(as_uuid=True)) - postage = db.Column(db.String) document_download_count = db.Column(db.Integer) @@ -1491,7 +1375,6 @@ class Notification(db.Model): document_download_count = db.Column(db.Integer, nullable=True) - postage = db.Column(db.String, nullable=True) provider_response = db.Column(db.Text, nullable=True) # queue_name = db.Column(db.Text, nullable=True) @@ -1571,10 +1454,7 @@ class Notification(db.Model): def _substitute_status_str(_status): return ( - NOTIFICATION_STATUS_TYPES_FAILED if _status == NOTIFICATION_FAILED else - [NOTIFICATION_CREATED, NOTIFICATION_SENDING] if _status == NOTIFICATION_STATUS_LETTER_ACCEPTED else - NOTIFICATION_DELIVERED if _status == NOTIFICATION_STATUS_LETTER_RECEIVED else - [_status] + NOTIFICATION_STATUS_TYPES_FAILED if _status == NOTIFICATION_FAILED else [_status] ) def _substitute_status_seq(_statuses): @@ -1619,36 +1499,9 @@ class Notification(db.Model): 'sending': 'Sending', 'created': 'Sending', 'sent': 'Sent internationally' - }, - 'letter': { - 'technical-failure': 'Technical failure', - 'permanent-failure': 'Permanent failure', - 'sending': 'Accepted', - 'created': 'Accepted', - 'delivered': 'Received', - 'returned-letter': 'Returned', } }[self.template.template_type].get(self.status, self.status) - def get_letter_status(self): - """ - Return the notification_status, as we should present for letters. The distinction between created and sending is - a bit more confusing for letters, not to mention that there's no concept of temporary or permanent failure yet. - - - """ - # this should only ever be called for letter notifications - it makes no sense otherwise and I'd rather not - # get the two code flows mixed up at all - assert self.notification_type == LETTER_TYPE # nosec B101 - current calling code validates correct type - - if self.status in [NOTIFICATION_CREATED, NOTIFICATION_SENDING]: - return NOTIFICATION_STATUS_LETTER_ACCEPTED - elif self.status in [NOTIFICATION_DELIVERED, NOTIFICATION_RETURNED_LETTER]: - return NOTIFICATION_STATUS_LETTER_RECEIVED - else: - # Currently can only be technical-failure OR pending-virus-check OR validation-failed - return self.status - def get_created_by_name(self): if self.created_by: return self.created_by.name @@ -1698,7 +1551,7 @@ class Notification(db.Model): "line_6": None, "postcode": None, "type": self.notification_type, - "status": self.get_letter_status() if self.notification_type == LETTER_TYPE else self.status, + "status": self.status, "provider_response": self.provider_response, "template": template_dict, "body": self.content, @@ -1708,29 +1561,8 @@ class Notification(db.Model): "sent_at": get_dt_string_or_none(self.sent_at), "completed_at": self.completed_at(), "scheduled_for": None, - "postage": self.postage } - if self.notification_type == LETTER_TYPE: - personalisation = InsensitiveDict(self.personalisation) - - ( - serialized['line_1'], - serialized['line_2'], - serialized['line_3'], - serialized['line_4'], - serialized['line_5'], - serialized['line_6'], - serialized['postcode'], - ) = ( - personalisation.get(line) for line in address_lines_1_to_6_and_postcode_keys - ) - - serialized['estimated_delivery'] = \ - get_letter_timings(serialized['created_at'], postage=self.postage)\ - .earliest_delivery\ - .strftime(DATETIME_FORMAT) - return serialized @@ -1771,8 +1603,6 @@ class NotificationHistory(db.Model, HistoryModel): created_by_id = db.Column(UUID(as_uuid=True), nullable=True) - postage = db.Column(db.String, nullable=True) - document_download_count = db.Column(db.Integer, nullable=True) __table_args__ = ( @@ -1880,7 +1710,6 @@ MANAGE_TEMPLATES = 'manage_templates' MANAGE_SETTINGS = 'manage_settings' SEND_TEXTS = 'send_texts' SEND_EMAILS = 'send_emails' -SEND_LETTERS = 'send_letters' MANAGE_API_KEYS = 'manage_api_keys' PLATFORM_ADMIN = 'platform_admin' VIEW_ACTIVITY = 'view_activity' @@ -1892,7 +1721,6 @@ PERMISSION_LIST = [ MANAGE_SETTINGS, SEND_TEXTS, SEND_EMAILS, - SEND_LETTERS, MANAGE_API_KEYS, PLATFORM_ADMIN, VIEW_ACTIVITY, @@ -2000,18 +1828,6 @@ class InboundSmsHistory(db.Model, HistoryModel): provider = db.Column(db.String, nullable=False) -class LetterRate(db.Model): - __tablename__ = 'letter_rates' - - id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - start_date = db.Column(db.DateTime, nullable=False) - end_date = db.Column(db.DateTime, nullable=True) - sheet_count = db.Column(db.Integer, nullable=False) # double sided sheet - rate = db.Column(db.Numeric(), nullable=False) - crown = db.Column(db.Boolean, nullable=False) - post_class = db.Column(db.String, nullable=False) - - class ServiceEmailReplyTo(db.Model): __tablename__ = "service_email_reply_to" @@ -2038,52 +1854,12 @@ class ServiceEmailReplyTo(db.Model): } -class ServiceLetterContact(db.Model): - __tablename__ = "service_letter_contacts" - - id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - - service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), unique=False, index=True, nullable=False) - service = db.relationship(Service, backref=db.backref("letter_contacts")) - - contact_block = db.Column(db.Text, nullable=False, index=False, unique=False) - is_default = db.Column(db.Boolean, nullable=False, default=True) - archived = db.Column(db.Boolean, nullable=False, default=False) - created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) - updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - - def serialize(self): - return { - 'id': str(self.id), - 'service_id': str(self.service_id), - 'contact_block': self.contact_block, - 'is_default': self.is_default, - 'archived': self.archived, - 'created_at': self.created_at.strftime(DATETIME_FORMAT), - 'updated_at': get_dt_string_or_none(self.updated_at), - } - - class AuthType(db.Model): __tablename__ = 'auth_type' name = db.Column(db.String, primary_key=True) -class DailySortedLetter(db.Model): - __tablename__ = "daily_sorted_letter" - - id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - billing_day = db.Column(db.Date, nullable=False, index=True) - file_name = db.Column(db.String, nullable=True, index=True) - unsorted_count = db.Column(db.Integer, nullable=False, default=0) - sorted_count = db.Column(db.Integer, nullable=False, default=0) - updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - - __table_args__ = (UniqueConstraint('file_name', 'billing_day', name='uix_file_name_billing_day'), - ) - - class FactBilling(db.Model): __tablename__ = "ft_billing" @@ -2095,7 +1871,6 @@ class FactBilling(db.Model): rate_multiplier = db.Column(db.Integer(), nullable=False, primary_key=True) international = db.Column(db.Boolean, nullable=False, primary_key=True) rate = db.Column(db.Numeric(), nullable=False, primary_key=True) - postage = db.Column(db.String, nullable=False, primary_key=True) billable_units = db.Column(db.Integer(), nullable=True) notifications_sent = db.Column(db.Integer(), nullable=True) created_at = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow) @@ -2185,18 +1960,6 @@ class ServiceDataRetention(db.Model): } -class ReturnedLetter(db.Model): - __tablename__ = 'returned_letters' - - id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - reported_at = db.Column(db.Date, nullable=False) - service_id = db.Column(UUID(as_uuid=True), db.ForeignKey('services.id'), unique=False, index=True, nullable=False) - service = db.relationship(Service, backref=db.backref('returned_letters')) - notification_id = db.Column(UUID(as_uuid=True), unique=True, nullable=False) - created_at = db.Column(db.DateTime, nullable=False) - updated_at = db.Column(db.DateTime, nullable=True, onupdate=datetime.datetime.utcnow) - - class ServiceContactList(db.Model): __tablename__ = 'service_contact_list' diff --git a/app/notifications/notifications_letter_callback.py b/app/notifications/notifications_letter_callback.py deleted file mode 100644 index 9108d5c45..000000000 --- a/app/notifications/notifications_letter_callback.py +++ /dev/null @@ -1,61 +0,0 @@ -import json -from functools import wraps - -from flask import Blueprint, current_app, jsonify, request - -from app.celery.tasks import ( - record_daily_sorted_counts, - update_letter_notifications_statuses, -) -from app.config import QueueNames -from app.notifications.utils import autoconfirm_subscription -from app.schema_validation import validate -from app.v2.errors import register_errors - -letter_callback_blueprint = Blueprint('notifications_letter_callback', __name__) -register_errors(letter_callback_blueprint) - - -dvla_sns_callback_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "sns callback received on s3 update", - "type": "object", - "title": "dvla internal sns callback", - "properties": { - "Type": {"enum": ["Notification", "SubscriptionConfirmation"]}, - "MessageId": {"type": "string"}, - "Message": {"type": ["string", "object"]} - }, - "required": ["Type", "MessageId", "Message"] -} - - -def validate_schema(schema): - def decorator(f): - @wraps(f) - def wrapper(*args, **kw): - validate(request.get_json(force=True), schema) - return f(*args, **kw) - return wrapper - return decorator - - -@letter_callback_blueprint.route('/notifications/letter/dvla', methods=['POST']) -@validate_schema(dvla_sns_callback_schema) -def process_letter_response(): - req_json = request.get_json(force=True) - current_app.logger.debug('Received SNS callback: {}'.format(req_json)) - if not autoconfirm_subscription(req_json): - # The callback should have one record for an S3 Put Event. - message = json.loads(req_json['Message']) - filename = message['Records'][0]['s3']['object']['key'] - current_app.logger.info('Received file from DVLA: {}'.format(filename)) - - if filename.lower().endswith('rs.txt') or filename.lower().endswith('rsp.txt'): - current_app.logger.info('DVLA callback: Calling task to update letter notifications') - update_letter_notifications_statuses.apply_async([filename], queue=QueueNames.NOTIFY) - record_daily_sorted_counts.apply_async([filename], queue=QueueNames.NOTIFY) - - return jsonify( - result="success", message="DVLA callback succeeded" - ), 200 diff --git a/app/notifications/process_letter_notifications.py b/app/notifications/process_letter_notifications.py deleted file mode 100644 index 3cce47433..000000000 --- a/app/notifications/process_letter_notifications.py +++ /dev/null @@ -1,41 +0,0 @@ -from notifications_utils.postal_address import PostalAddress - -from app import create_random_identifier -from app.models import LETTER_TYPE -from app.notifications.process_notifications import persist_notification - - -def create_letter_notification( - letter_data, - template, - service, - api_key, - status, - reply_to_text=None, - billable_units=None, - updated_at=None, - postage=None -): - notification = persist_notification( - template_id=template.id, - template_version=template.version, - # we only accept addresses_with_underscores from the API (from CSV we also accept dashes, spaces etc) - recipient=PostalAddress.from_personalisation(letter_data['personalisation']).normalised, - service=service, - personalisation=letter_data['personalisation'], - notification_type=LETTER_TYPE, - api_key_id=api_key.id, - key_type=api_key.key_type, - job_id=None, - job_row_number=None, - reference=create_random_identifier(), - client_reference=letter_data.get('reference'), - status=status, - reply_to_text=reply_to_text, - billable_units=billable_units, - # letter_data.get('postage') is only set for precompiled letters (if international it is set after sanitise) - # letters from a template will pass in 'europe' or 'rest-of-world' if None then use postage from template - postage=postage or letter_data.get('postage') or template.postage, - updated_at=updated_at - ) - return notification diff --git a/app/notifications/process_notifications.py b/app/notifications/process_notifications.py index 4abda5acd..d9d588dd7 100644 --- a/app/notifications/process_notifications.py +++ b/app/notifications/process_notifications.py @@ -10,14 +10,12 @@ from notifications_utils.recipients import ( validate_and_format_phone_number, ) from notifications_utils.template import ( - LetterPrintTemplate, PlainTextEmailTemplate, SMSMessageTemplate, ) from app import redis_store from app.celery import provider_tasks -from app.celery.letters_pdf_tasks import get_pdf_for_templated_letter from app.config import QueueNames from app.dao.notifications_dao import ( dao_create_notification, @@ -25,9 +23,7 @@ from app.dao.notifications_dao import ( ) from app.models import ( EMAIL_TYPE, - INTERNATIONAL_POSTAGE_TYPES, KEY_TYPE_TEST, - LETTER_TYPE, NOTIFICATION_CREATED, SMS_TYPE, Notification, @@ -58,16 +54,6 @@ def create_content_for_notification(template, personalisation): }, personalisation, ) - if template.template_type == LETTER_TYPE: - template_object = LetterPrintTemplate( - { - 'content': template.content, - 'subject': template.subject, - 'template_type': template.template_type, - }, - personalisation, - contact_block=template.reply_to_text, - ) check_placeholders(template_object) @@ -101,7 +87,6 @@ def persist_notification( status=NOTIFICATION_CREATED, reply_to_text=None, billable_units=None, - postage=None, document_download_count=None, updated_at=None ): @@ -149,10 +134,6 @@ def persist_notification( current_app.logger.info('Persisting notification with type: {}'.format(EMAIL_TYPE)) notification.normalised_to = format_email_address(notification.to) current_app.logger.info('Persisting notification to formatted email: {}'.format(notification.normalised_to)) - elif notification_type == LETTER_TYPE: - notification.postage = postage - notification.international = postage in INTERNATIONAL_POSTAGE_TYPES - notification.normalised_to = ''.join(notification.to.split()).lower() # if simulated create a Notification model to return but do not persist the Notification to the dB if not simulated: @@ -194,10 +175,6 @@ def send_notification_to_queue_detached( if not queue: queue = QueueNames.SEND_EMAIL deliver_task = provider_tasks.deliver_email - if notification_type == LETTER_TYPE: - if not queue: - queue = QueueNames.CREATE_LETTERS_PDF - deliver_task = get_pdf_for_templated_letter try: deliver_task.apply_async([str(notification_id)], queue=queue) diff --git a/app/notifications/rest.py b/app/notifications/rest.py index d90ef425d..59b6348da 100644 --- a/app/notifications/rest.py +++ b/app/notifications/rest.py @@ -5,13 +5,7 @@ from app import api_user, authenticated_service from app.config import QueueNames from app.dao import notifications_dao from app.errors import InvalidRequest, register_errors -from app.models import ( - EMAIL_TYPE, - KEY_TYPE_TEAM, - LETTER_TYPE, - PRIORITY, - SMS_TYPE, -) +from app.models import EMAIL_TYPE, KEY_TYPE_TEAM, PRIORITY, SMS_TYPE from app.notifications.process_notifications import ( persist_notification, send_notification_to_queue, @@ -81,7 +75,6 @@ def send_notification(notification_type): if notification_type not in [SMS_TYPE, EMAIL_TYPE]: msg = "{} notification type is not supported".format(notification_type) - msg = msg + ", please use the latest version of the client" if notification_type == LETTER_TYPE else msg raise InvalidRequest(msg, 400) notification_form = ( @@ -111,7 +104,6 @@ def send_notification(notification_type): simulated = simulated_recipient(notification_form['to'], notification_type) notification_model = persist_notification(template_id=template.id, template_version=template.version, - postage=template.postage, recipient=request.get_json()['to'], service=authenticated_service, personalisation=notification_form.get('personalisation', None), diff --git a/app/notifications/validators.py b/app/notifications/validators.py index 7a079df66..9960402a8 100644 --- a/app/notifications/validators.py +++ b/app/notifications/validators.py @@ -5,7 +5,6 @@ from notifications_utils.clients.redis import ( daily_limit_cache_key, rate_limit_cache_key, ) -from notifications_utils.postal_address import PostalAddress from notifications_utils.recipients import ( get_international_phone_info, validate_and_format_email_address, @@ -15,15 +14,12 @@ from sqlalchemy.orm.exc import NoResultFound from app import redis_store from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id -from app.dao.service_letter_contact_dao import dao_get_letter_contact_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id from app.models import ( EMAIL_TYPE, - INTERNATIONAL_LETTERS, INTERNATIONAL_SMS_TYPE, KEY_TYPE_TEAM, KEY_TYPE_TEST, - LETTER_TYPE, SMS_TYPE, ServicePermission, ) @@ -33,12 +29,7 @@ from app.notifications.process_notifications import ( from app.serialised_models import SerialisedTemplate from app.service.utils import service_allowed_to_send_to from app.utils import get_public_notify_type_text -from app.v2.errors import ( - BadRequestError, - RateLimitError, - TooManyRequestsError, - ValidationError, -) +from app.v2.errors import BadRequestError, RateLimitError, TooManyRequestsError REDIS_EXCEEDED_RATE_LIMIT_DURATION_SECONDS = Histogram( 'redis_exceeded_rate_limit_duration_seconds', @@ -208,8 +199,6 @@ def check_reply_to(service_id, reply_to_id, type_): return check_service_email_reply_to_id(service_id, reply_to_id, type_) elif type_ == SMS_TYPE: return check_service_sms_sender_id(service_id, reply_to_id, type_) - elif type_ == LETTER_TYPE: - return check_service_letter_contact_id(service_id, reply_to_id, type_) def check_service_email_reply_to_id(service_id, reply_to_id, notification_type): @@ -230,44 +219,3 @@ def check_service_sms_sender_id(service_id, sms_sender_id, notification_type): message = 'sms_sender_id {} does not exist in database for service id {}' \ .format(sms_sender_id, service_id) raise BadRequestError(message=message) - - -def check_service_letter_contact_id(service_id, letter_contact_id, notification_type): - if letter_contact_id: - try: - return dao_get_letter_contact_by_id(service_id, letter_contact_id).contact_block - except NoResultFound: - message = 'letter_contact_id {} does not exist in database for service id {}' \ - .format(letter_contact_id, service_id) - raise BadRequestError(message=message) - - -def validate_address(service, letter_data): - address = PostalAddress.from_personalisation( - letter_data, - allow_international_letters=(INTERNATIONAL_LETTERS in str(service.permissions)), - ) - if not address.has_enough_lines: - raise ValidationError( - message=f'Address must be at least {PostalAddress.MIN_LINES} lines' - ) - if address.has_too_many_lines: - raise ValidationError( - message=f'Address must be no more than {PostalAddress.MAX_LINES} lines' - ) - if not address.has_valid_last_line: - if address.allow_international_letters: - raise ValidationError( - message='Last line of address must be a real UK postcode or another country' - ) - raise ValidationError( - message='Must be a real UK postcode' - ) - if address.has_invalid_characters: - raise ValidationError( - message='Address lines must not start with any of the following characters: @ ( ) = [ ] ” \\ / , < >' - ) - if address.international: - return address.postage - else: - return None diff --git a/app/performance_dashboard/rest.py b/app/performance_dashboard/rest.py index 463961651..1b38bf315 100644 --- a/app/performance_dashboard/rest.py +++ b/app/performance_dashboard/rest.py @@ -34,7 +34,7 @@ def get_performance_dashboard(): start_date = datetime.strptime(request.args.get('start_date', today), '%Y-%m-%d').date() end_date = datetime.strptime(request.args.get('end_date', today), '%Y-%m-%d').date() total_for_all_time = get_total_notifications_for_date_range(start_date=None, end_date=None) - total_notifications, emails, sms, letters = transform_results_into_totals(total_for_all_time) + total_notifications, emails, sms = transform_results_into_totals(total_for_all_time) totals_for_date_range = get_total_notifications_for_date_range(start_date=start_date, end_date=end_date) processing_time_results = get_processing_time_percentage_for_date_range(start_date=start_date, end_date=end_date) services = get_live_services_with_organisation() @@ -42,12 +42,10 @@ def get_performance_dashboard(): "total_notifications": total_notifications, "email_notifications": emails, "sms_notifications": sms, - "letter_notifications": letters, "notifications_by_type": transform_into_notification_by_type_json(totals_for_date_range), "processing_time": transform_processing_time_results_to_json(processing_time_results), "live_service_count": len(services), "services_using_notify": transform_services_to_json(services) - } return jsonify(stats) @@ -57,21 +55,18 @@ def transform_results_into_totals(total_notifications_results): total_notifications = 0 emails = 0 sms = 0 - letters = 0 for x in total_notifications_results: total_notifications += x.emails total_notifications += x.sms - total_notifications += x.letters emails += x.emails sms += x.sms - letters += x.letters - return total_notifications, emails, sms, letters + return total_notifications, emails, sms def transform_into_notification_by_type_json(total_notifications): j = [] for x in total_notifications: - j.append({"date": x.local_date, "emails": x.emails, "sms": x.sms, "letters": x.letters}) + j.append({"date": x.local_date, "emails": x.emails, "sms": x.sms}) return j diff --git a/app/performance_platform/total_sent_notifications.py b/app/performance_platform/total_sent_notifications.py index 62b38e390..1de291067 100644 --- a/app/performance_platform/total_sent_notifications.py +++ b/app/performance_platform/total_sent_notifications.py @@ -4,6 +4,7 @@ from app.dao.fact_notification_status_dao import ( ) +# TODO: is this obsolete? it doesn't seem to be used anywhere def send_total_notifications_sent_for_day_stats(start_time, notification_type, count): payload = performance_platform_client.format_payload( dataset='notifications', @@ -16,13 +17,12 @@ def send_total_notifications_sent_for_day_stats(start_time, notification_type, c performance_platform_client.send_stats_to_performance_platform(payload) +# TODO: is this obsolete? it doesn't seem to be used anywhere def get_total_sent_notifications_for_day(day): email_count = get_total_sent_notifications_for_day_and_type(day, 'email') sms_count = get_total_sent_notifications_for_day_and_type(day, 'sms') - letter_count = get_total_sent_notifications_for_day_and_type(day, 'letter') return { "email": email_count, "sms": sms_count, - "letter": letter_count, } diff --git a/app/platform_stats/rest.py b/app/platform_stats/rest.py index ff5a44b28..554fc3988 100644 --- a/app/platform_stats/rest.py +++ b/app/platform_stats/rest.py @@ -7,8 +7,6 @@ from app.dao.fact_billing_dao import ( fetch_billing_details_for_all_services, fetch_daily_sms_provider_volumes_for_platform, fetch_daily_volumes_for_platform, - fetch_letter_costs_and_totals_for_all_services, - fetch_letter_line_items_for_all_services, fetch_sms_billing_for_all_services, fetch_volumes_by_service, ) @@ -16,7 +14,6 @@ from app.dao.fact_notification_status_dao import ( fetch_notification_status_totals_for_all_services, ) from app.errors import InvalidRequest, register_errors -from app.models import UK_POSTAGE_TYPES from app.platform_stats.platform_stats_schema import platform_stats_request from app.schema_validation import validate from app.service.statistics import format_admin_stats @@ -75,14 +72,7 @@ def get_data_for_billing_report(): start_date, end_date = validate_date_range_is_within_a_financial_year(start_date, end_date) sms_costs = fetch_sms_billing_for_all_services(start_date, end_date) - letter_overview = fetch_letter_costs_and_totals_for_all_services(start_date, end_date) - letter_breakdown = fetch_letter_line_items_for_all_services(start_date, end_date) - lb_by_service = [ - (lb.service_id, - f"{lb.letters_sent} {postage_description(lb.postage)} letters at {format_letter_rate(lb.letter_rate)}") - for lb in letter_breakdown - ] combined = {} for s in sms_costs: if float(s.sms_cost) > 0: @@ -93,34 +83,9 @@ def get_data_for_billing_report(): "service_name": s.service_name, "sms_cost": float(s.sms_cost), "sms_chargeable_units": s.chargeable_billable_sms, - "total_letters": 0, - "letter_cost": 0, - "letter_breakdown": "" } combined[s.service_id] = entry - for data in letter_overview: - if data.service_id in combined: - combined[data.service_id].update( - {'total_letters': data.total_letters, 'letter_cost': float(data.letter_cost)} - ) - - else: - letter_entry = { - "organisation_id": str(data.organisation_id) if data.organisation_id else "", - "organisation_name": data.organisation_name or "", - "service_id": str(data.service_id), - "service_name": data.service_name, - "sms_cost": 0, - "sms_chargeable_units": 0, - "total_letters": data.total_letters, - "letter_cost": float(data.letter_cost), - "letter_breakdown": "" - } - combined[data.service_id] = letter_entry - for service_id, breakdown in lb_by_service: - combined[service_id]['letter_breakdown'] += (breakdown + '\n') - billing_details = fetch_billing_details_for_all_services() for service in billing_details: if service.service_id in combined: @@ -156,8 +121,6 @@ def daily_volumes_report(): "sms_fragment_totals": int(row.sms_fragment_totals), "sms_chargeable_units": int(row.sms_chargeable_units), "email_totals": int(row.email_totals), - "letter_totals": int(row.letter_totals), - "letter_sheet_totals": int(row.letter_sheet_totals) }) return jsonify(report) @@ -201,23 +164,6 @@ def volumes_by_service_report(): "sms_notifications": int(row.sms_notifications), "sms_chargeable_units": int(row.sms_chargeable_units), "email_totals": int(row.email_totals), - "letter_totals": int(row.letter_totals), - "letter_sheet_totals": int(row.letter_sheet_totals), - "letter_cost": float(row.letter_cost), }) return jsonify(report) - - -def postage_description(postage): - if postage in UK_POSTAGE_TYPES: - return f'{postage} class' - else: - return 'international' - - -def format_letter_rate(number): - if number >= 1: - return f"£{number:,.2f}" - - return f"{number * 100:.0f}p" diff --git a/app/schema_validation/__init__.py b/app/schema_validation/__init__.py index 7e813f4d0..86af11404 100644 --- a/app/schema_validation/__init__.py +++ b/app/schema_validation/__init__.py @@ -35,14 +35,6 @@ def validate_schema_email_address(instance): return True -@format_checker.checks('postage', raises=ValidationError) -def validate_schema_postage(instance): - if isinstance(instance, str): - if instance not in ["first", "second", "europe", "rest-of-world"]: - raise ValidationError("invalid. It must be first, second, europe or rest-of-world.") - return True - - @format_checker.checks('datetime_within_next_day', raises=ValidationError) def validate_schema_date_with_hour(instance): if isinstance(instance, str): diff --git a/app/schemas.py b/app/schemas.py index b83382350..6acf85396 100644 --- a/app/schemas.py +++ b/app/schemas.py @@ -229,15 +229,11 @@ class ServiceSchema(BaseSchema, UUIDsAsStringsMixin): created_by = field_for(models.Service, 'created_by', required=True) organisation_type = field_for(models.Service, 'organisation_type') - letter_logo_filename = fields.Method(dump_only=True, serialize='get_letter_logo_filename') permissions = fields.Method("serialize_service_permissions", "deserialize_service_permissions") email_branding = field_for(models.Service, 'email_branding') organisation = field_for(models.Service, 'organisation') go_live_at = field_for(models.Service, 'go_live_at', format=DATETIME_FORMAT_NO_TIMEZONE) - def get_letter_logo_filename(self, service): - return service.letter_branding and service.letter_branding.filename - def serialize_service_permissions(self, service): return [p.permission for p in service.permissions] @@ -253,9 +249,6 @@ class ServiceSchema(BaseSchema, UUIDsAsStringsMixin): return in_data - def get_letter_contact(self, service): - return service.get_default_letter_contact() - class Meta(BaseSchema.Meta): model = models.Service exclude = ( @@ -271,10 +264,7 @@ class ServiceSchema(BaseSchema, UUIDsAsStringsMixin): 'inbound_number', 'inbound_sms', 'jobs', - 'letter_contacts', - 'letter_logo_filename', 'reply_to_email_addresses', - 'returned_letters', 'service_sms_senders', 'templates', 'updated_at', @@ -334,7 +324,6 @@ class DetailedServiceSchema(BaseSchema): 'permissions', 'rate_limit', 'reply_to_email_addresses', - 'returned_letters', 'service_sms_senders', 'templates', 'users', @@ -365,7 +354,7 @@ class BaseTemplateSchema(BaseSchema): class Meta(BaseSchema.Meta): model = models.Template - exclude = ("service_id", "jobs", "service_letter_contact_id") + exclude = ("service_id", "jobs") class TemplateSchema(BaseTemplateSchema, UUIDsAsStringsMixin): @@ -381,22 +370,12 @@ class TemplateSchema(BaseTemplateSchema, UUIDsAsStringsMixin): @validates_schema def validate_type(self, data, **kwargs): - if data.get('template_type') in {models.EMAIL_TYPE, models.LETTER_TYPE}: + if data.get('template_type') == models.EMAIL_TYPE: subject = data.get('subject') if not subject or subject.strip() == '': raise ValidationError('Invalid template subject', 'subject') -class TemplateSchemaNested(TemplateSchema): - """ - Contains extra 'is_precompiled_letter' field for use with NotificationWithTemplateSchema - """ - is_precompiled_letter = fields.Method('get_is_precompiled_letter') - - def get_is_precompiled_letter(self, template): - return template.is_precompiled_letter - - class TemplateSchemaNoDetail(TemplateSchema): class Meta(TemplateSchema.Meta): exclude = TemplateSchema.Meta.exclude + ( @@ -405,13 +384,11 @@ class TemplateSchemaNoDetail(TemplateSchema): 'created_by', 'created_by_id', 'hidden', - 'postage', 'process_type', 'redact_personalisation', 'reply_to', 'reply_to_text', 'service', - 'service_letter_contact', 'subject', 'template_redacted', 'updated_at', @@ -545,7 +522,7 @@ class NotificationWithTemplateSchema(BaseSchema): exclude = ('_personalisation',) template = fields.Nested( - TemplateSchemaNested, + TemplateSchema, only=[ 'id', 'version', @@ -553,8 +530,7 @@ class NotificationWithTemplateSchema(BaseSchema): 'template_type', 'content', 'subject', - 'redact_personalisation', - 'is_precompiled_letter' + 'redact_personalisation' ], dump_only=True ) diff --git a/app/serialised_models.py b/app/serialised_models.py index 583c13591..628a12b2d 100644 --- a/app/serialised_models.py +++ b/app/serialised_models.py @@ -41,7 +41,6 @@ class SerialisedTemplate(SerialisedModel): 'archived', 'content', 'id', - 'postage', 'process_type', 'reply_to_text', 'subject', diff --git a/app/service/rest.py b/app/service/rest.py index e74c66e5a..449da3786 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -2,10 +2,6 @@ import itertools from datetime import datetime from flask import Blueprint, current_app, jsonify, request -from notifications_utils.letter_timings import ( - letter_can_be_cancelled, - too_late_to_cancel_letter, -) from notifications_utils.timezones import convert_utc_to_local_timezone from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound @@ -32,12 +28,6 @@ from app.dao.fact_notification_status_dao import ( ) from app.dao.inbound_numbers_dao import dao_allocate_number_for_service from app.dao.organisation_dao import dao_get_organisation_by_service_id -from app.dao.returned_letters_dao import ( - fetch_most_recent_returned_letter, - fetch_recent_returned_letter_count, - fetch_returned_letter_summary, - fetch_returned_letters, -) from app.dao.service_contact_list_dao import ( dao_archive_contact_list, dao_get_contact_list_by_id, @@ -63,13 +53,6 @@ from app.dao.service_guest_list_dao import ( dao_fetch_service_guest_list, dao_remove_service_guest_list, ) -from app.dao.service_letter_contact_dao import ( - add_letter_contact_for_service, - archive_letter_contact, - dao_get_letter_contact_by_id, - dao_get_letter_contacts_by_service_id, - update_letter_contact, -) from app.dao.service_sms_sender_dao import ( archive_sms_sender, dao_add_sms_sender_for_service, @@ -97,13 +80,9 @@ from app.dao.services_dao import ( from app.dao.templates_dao import dao_get_template_by_id from app.dao.users_dao import get_user_by_id from app.errors import InvalidRequest, register_errors -from app.letters.utils import letter_print_day from app.models import ( KEY_TYPE_NORMAL, - LETTER_TYPE, - NOTIFICATION_CANCELLED, EmailBranding, - LetterBranding, Permission, Service, ServiceContactList, @@ -122,11 +101,7 @@ from app.schemas import ( service_schema, ) from app.service import statistics -from app.service.send_notification import ( - send_one_off_notification, - send_pdf_letter_notification, -) -from app.service.send_pdf_letter_schema import send_pdf_letter_request +from app.service.send_notification import send_one_off_notification from app.service.sender import send_notification_to_service_users from app.service.service_contact_list_schema import ( create_service_contact_list_schema, @@ -137,17 +112,11 @@ from app.service.service_data_retention_schema import ( ) from app.service.service_senders_schema import ( add_service_email_reply_to_request, - add_service_letter_contact_block_request, add_service_sms_sender_request, ) from app.service.utils import get_guest_list_objects from app.user.users_schema import post_set_permissions_schema -from app.utils import ( - DATE_FORMAT, - DATETIME_FORMAT_NO_TIMEZONE, - get_prev_next_pagination_links, - midnight_n_days_ago, -) +from app.utils import get_prev_next_pagination_links service_blueprint = Blueprint('service', __name__) @@ -275,9 +244,6 @@ def update_service(service_id): if 'email_branding' in req_json: email_branding_id = req_json['email_branding'] service.email_branding = None if not email_branding_id else EmailBranding.query.get(email_branding_id) - if 'letter_branding' in req_json: - letter_branding_id = req_json['letter_branding'] - service.letter_branding = None if not letter_branding_id else LetterBranding.query.get(letter_branding_id) dao_update_service(service) if service_going_live: @@ -491,39 +457,6 @@ def get_notification_for_service(service_id, notification_id): ), 200 -@service_blueprint.route('//notifications//cancel', methods=['POST']) -def cancel_notification_for_service(service_id, notification_id): - notification = notifications_dao.get_notification_by_id(notification_id, service_id) - - if not notification: - raise InvalidRequest('Notification not found', status_code=404) - elif notification.notification_type != LETTER_TYPE: - raise InvalidRequest('Notification cannot be cancelled - only letters can be cancelled', status_code=400) - elif not letter_can_be_cancelled(notification.status, notification.created_at): - print_day = letter_print_day(notification.created_at) - if too_late_to_cancel_letter(notification.created_at): - message = "It’s too late to cancel this letter. Printing started {} at 5.30pm".format(print_day) - elif notification.status == 'cancelled': - message = "This letter has already been cancelled." - else: - message = ( - f"We could not cancel this letter. " - f"Letter status: {notification.status}, created_at: {notification.created_at}" - ) - raise InvalidRequest( - message, - status_code=400) - - updated_notification = notifications_dao.update_notification_status_by_id( - notification_id, - NOTIFICATION_CANCELLED, - ) - - return jsonify( - notification_with_template_schema.dump(updated_notification) - ), 200 - - def search_for_notification_by_to_field(service_id, search_term, statuses, notification_type): results = notifications_dao.dao_get_notifications_by_recipient_or_reference( service_id=service_id, @@ -734,7 +667,6 @@ def get_monthly_template_usage(service_id): 'month': i.month, 'year': i.year, 'count': i.count, - 'is_precompiled_letter': i.is_precompiled_letter } ) @@ -749,13 +681,6 @@ def create_one_off_notification(service_id): return jsonify(resp), 201 -@service_blueprint.route('//send-pdf-letter', methods=['POST']) -def create_pdf_letter(service_id): - data = validate(request.get_json(), send_pdf_letter_request) - resp = send_pdf_letter_notification(service_id, data) - return jsonify(resp), 201 - - @service_blueprint.route('//email-reply-to', methods=["GET"]) def get_email_reply_to_addresses(service_id): result = dao_get_reply_to_by_service_id(service_id) @@ -823,48 +748,6 @@ def delete_service_reply_to_email_address(service_id, reply_to_email_id): return jsonify(data=archived_reply_to.serialize()), 200 -@service_blueprint.route('//letter-contact', methods=["GET"]) -def get_letter_contacts(service_id): - result = dao_get_letter_contacts_by_service_id(service_id) - return jsonify([i.serialize() for i in result]), 200 - - -@service_blueprint.route('//letter-contact/', methods=["GET"]) -def get_letter_contact_by_id(service_id, letter_contact_id): - result = dao_get_letter_contact_by_id(service_id=service_id, letter_contact_id=letter_contact_id) - return jsonify(result.serialize()), 200 - - -@service_blueprint.route('//letter-contact', methods=['POST']) -def add_service_letter_contact(service_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_letter_contact = add_letter_contact_for_service(service_id=service_id, - contact_block=form['contact_block'], - is_default=form.get('is_default', True)) - return jsonify(data=new_letter_contact.serialize()), 201 - - -@service_blueprint.route('//letter-contact/', methods=['POST']) -def update_service_letter_contact(service_id, letter_contact_id): - # validate the service exists, throws ResultNotFound exception. - dao_fetch_service_by_id(service_id) - form = validate(request.get_json(), add_service_letter_contact_block_request) - new_reply_to = update_letter_contact(service_id=service_id, - letter_contact_id=letter_contact_id, - contact_block=form['contact_block'], - is_default=form.get('is_default', True)) - return jsonify(data=new_reply_to.serialize()), 200 - - -@service_blueprint.route('//letter-contact//archive', methods=['POST']) -def delete_service_letter_contact(service_id, letter_contact_id): - archived_letter_contact = archive_letter_contact(service_id, letter_contact_id) - - return jsonify(data=archived_letter_contact.serialize()), 200 - - @service_blueprint.route('//sms-sender', methods=['POST']) def add_service_sms_sender(service_id): dao_fetch_service_by_id(service_id) @@ -1042,71 +925,6 @@ def check_if_reply_to_address_already_in_use(service_id, email_address): ) -@service_blueprint.route('//returned-letter-statistics', methods=['GET']) -def returned_letter_statistics(service_id): - - most_recent = fetch_most_recent_returned_letter(service_id) - - if not most_recent: - return jsonify({ - 'returned_letter_count': 0, - 'most_recent_report': None, - }) - - most_recent_reported_at = datetime.combine( - most_recent.reported_at, datetime.min.time() - ) - - if most_recent_reported_at < midnight_n_days_ago(7): - return jsonify({ - 'returned_letter_count': 0, - 'most_recent_report': most_recent.reported_at.strftime(DATETIME_FORMAT_NO_TIMEZONE), - }) - - count = fetch_recent_returned_letter_count(service_id) - - return jsonify({ - 'returned_letter_count': count.returned_letter_count, - 'most_recent_report': most_recent.reported_at.strftime(DATETIME_FORMAT_NO_TIMEZONE), - }) - - -@service_blueprint.route('//returned-letter-summary', methods=['GET']) -def returned_letter_summary(service_id): - results = fetch_returned_letter_summary(service_id) - - json_results = [{'returned_letter_count': x.returned_letter_count, - 'reported_at': x.reported_at.strftime(DATE_FORMAT) - } for x in results] - - return jsonify(json_results) - - -@service_blueprint.route('//returned-letters', methods=['GET']) -def get_returned_letters(service_id): - results = fetch_returned_letters(service_id=service_id, report_date=request.args.get('reported_at')) - - json_results = [ - {'notification_id': x.notification_id, - # client reference can only be added on API letters - 'client_reference': x.client_reference if x.api_key_id else None, - 'reported_at': x.reported_at.strftime(DATE_FORMAT), - 'created_at': x.created_at.strftime(DATETIME_FORMAT_NO_TIMEZONE), - # it doesn't make sense to show hidden/precompiled templates - 'template_name': x.template_name if not x.hidden else None, - 'template_id': x.template_id if not x.hidden else None, - 'template_version': x.template_version if not x.hidden else None, - 'user_name': x.user_name or 'API', - 'email_address': x.email_address or 'API', - 'original_file_name': x.original_file_name, - 'job_row_number': x.job_row_number, - # the file name for a letter uploaded via the UI - 'uploaded_letter_file_name': x.client_reference if x.hidden and not x.api_key_id else None - } for x in results] - - return jsonify(sorted(json_results, key=lambda i: i['created_at'], reverse=True)) - - @service_blueprint.route('//contact-list', methods=['GET']) def get_contact_list(service_id): contact_lists = dao_get_contact_lists(service_id) diff --git a/app/service/send_notification.py b/app/service/send_notification.py index 28208eea8..9f7f7685b 100644 --- a/app/service/send_notification.py +++ b/app/service/send_notification.py @@ -1,46 +1,18 @@ -import urllib - -from flask import current_app -from notifications_utils.s3 import S3ObjectNotFound -from notifications_utils.s3 import s3download as utils_s3download from sqlalchemy.orm.exc import NoResultFound -from app import create_random_identifier from app.config import QueueNames -from app.dao.notifications_dao import ( - _update_notification_status, - get_notification_by_id, -) from app.dao.service_email_reply_to_dao import dao_get_reply_to_by_id from app.dao.service_sms_sender_dao import dao_get_service_sms_senders_by_id from app.dao.services_dao import dao_fetch_service_by_id -from app.dao.templates_dao import ( - dao_get_template_by_id_and_service_id, - get_precompiled_letter_template, -) +from app.dao.templates_dao import dao_get_template_by_id_and_service_id from app.dao.users_dao import get_user_by_id -from app.letters.utils import ( - generate_letter_pdf_filename, - get_billable_units_for_letter_page_count, - get_page_count, - move_uploaded_pdf_to_letters_bucket, -) -from app.models import ( - EMAIL_TYPE, - KEY_TYPE_NORMAL, - LETTER_TYPE, - NOTIFICATION_DELIVERED, - PRIORITY, - SMS_TYPE, -) +from app.models import EMAIL_TYPE, KEY_TYPE_NORMAL, PRIORITY, SMS_TYPE from app.notifications.process_notifications import ( persist_notification, send_notification_to_queue, ) from app.notifications.validators import ( - check_service_has_permission, check_service_over_daily_message_limit, - validate_address, validate_and_format_recipient, validate_template, ) @@ -57,9 +29,8 @@ def validate_created_by(service, created_by_id): raise BadRequestError(message=message) +# TODO: possibly unnecessary after removing letters def create_one_off_reference(template_type): - if template_type == LETTER_TYPE: - return create_random_identifier() return None @@ -83,16 +54,7 @@ def send_one_off_notification(service_id, post_data): notification_type=template.template_type, allow_guest_list_recipients=False, ) - postage = None client_reference = None - if template.template_type == LETTER_TYPE: - # Validate address and set postage to europe|rest-of-world if international letter, - # otherwise persist_notification with use template postage - postage = validate_address(service, personalisation) - if not postage: - postage = template.postage - from app.utils import get_reference_from_personalisation - client_reference = get_reference_from_personalisation(personalisation) validate_created_by(service, post_data['created_by']) @@ -115,23 +77,16 @@ def send_one_off_notification(service_id, post_data): created_by_id=post_data['created_by'], reply_to_text=reply_to, reference=create_one_off_reference(template.template_type), - postage=postage, client_reference=client_reference ) queue_name = QueueNames.PRIORITY if template.process_type == PRIORITY else None - if template.template_type == LETTER_TYPE and service.research_mode: - _update_notification_status( - notification, - NOTIFICATION_DELIVERED, - ) - else: - send_notification_to_queue( - notification=notification, - research_mode=service.research_mode, - queue=queue_name, - ) + send_notification_to_queue( + notification=notification, + research_mode=service.research_mode, + queue=queue_name, + ) return {'id': str(notification.id)} @@ -151,72 +106,3 @@ def get_reply_to_text(notification_type, sender_id, service, template): else: reply_to = template.get_reply_to_text() return reply_to - - -def send_pdf_letter_notification(service_id, post_data): - service = dao_fetch_service_by_id(service_id) - - check_service_has_permission(LETTER_TYPE, [ - p.permission for p in service.permissions - ]) - check_service_over_daily_message_limit(KEY_TYPE_NORMAL, service) - validate_created_by(service, post_data['created_by']) - validate_and_format_recipient( - send_to=post_data['recipient_address'], - key_type=KEY_TYPE_NORMAL, - service=service, - notification_type=LETTER_TYPE, - allow_guest_list_recipients=False, - ) - - # notification already exists e.g. if the user clicked send in different tabs - if get_notification_by_id(post_data['file_id']): - return {'id': str(post_data['file_id'])} - - template = get_precompiled_letter_template(service.id) - file_location = 'service-{}/{}.pdf'.format(service.id, post_data['file_id']) - - try: - letter = utils_s3download(current_app.config['TRANSIENT_UPLOADED_LETTERS'], file_location) - except S3ObjectNotFound as e: - current_app.logger.warning('Letter {}.pdf not in transient {} bucket'.format( - post_data['file_id'], current_app.config['TRANSIENT_UPLOADED_LETTERS']) - ) - - raise e - - # Getting the page count won't raise an error since admin has already checked the PDF is valid - page_count = get_page_count(letter.read()) - billable_units = get_billable_units_for_letter_page_count(page_count) - - personalisation = { - 'address_line_1': post_data['filename'] - } - - notification = persist_notification( - notification_id=post_data['file_id'], - template_id=template.id, - template_version=template.version, - recipient=urllib.parse.unquote(post_data['recipient_address']), - service=service, - personalisation=personalisation, - notification_type=LETTER_TYPE, - api_key_id=None, - key_type=KEY_TYPE_NORMAL, - reference=create_one_off_reference(LETTER_TYPE), - client_reference=post_data['filename'], - created_by_id=post_data['created_by'], - billable_units=billable_units, - postage=post_data['postage'] or template.postage, - ) - - upload_filename = generate_letter_pdf_filename( - reference=notification.reference, - created_at=notification.created_at, - ignore_folder=False, - postage=notification.postage - ) - - move_uploaded_pdf_to_letters_bucket(file_location, upload_filename) - - return {'id': str(notification.id)} diff --git a/app/service/send_pdf_letter_schema.py b/app/service/send_pdf_letter_schema.py deleted file mode 100644 index e512c834d..000000000 --- a/app/service/send_pdf_letter_schema.py +++ /dev/null @@ -1,14 +0,0 @@ -send_pdf_letter_request = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "POST send uploaded pdf letter", - "type": "object", - "title": "Send an uploaded pdf letter", - "properties": { - "postage": {"type": "string", "format": "postage"}, - "filename": {"type": "string"}, - "created_by": {"type": "string"}, - "file_id": {"type": "string"}, - "recipient_address": {"type": "string"} - }, - "required": ["postage", "filename", "created_by", "file_id", "recipient_address"] -} diff --git a/app/service/service_data_retention_schema.py b/app/service/service_data_retention_schema.py index 24944fedf..ea7efbe00 100644 --- a/app/service/service_data_retention_schema.py +++ b/app/service/service_data_retention_schema.py @@ -5,7 +5,7 @@ add_service_data_retention_request = { "type": "object", "properties": { "days_of_retention": {"type": "integer"}, - "notification_type": {"enum": ["sms", "letter", "email"]}, + "notification_type": {"enum": ["sms", "email"]}, }, "required": ["days_of_retention", "notification_type"] } diff --git a/app/service/service_senders_schema.py b/app/service/service_senders_schema.py index e39765d30..1b4ae2489 100644 --- a/app/service/service_senders_schema.py +++ b/app/service/service_senders_schema.py @@ -13,19 +13,6 @@ add_service_email_reply_to_request = { } -add_service_letter_contact_block_request = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "POST service letter contact block", - "type": "object", - "title": "Add new letter contact block for service", - "properties": { - "contact_block": {"type": "string"}, - "is_default": {"type": "boolean"} - }, - "required": ["contact_block", "is_default"] -} - - add_service_sms_sender_request = { "$schema": "http://json-schema.org/draft-07/schema#", "description": "POST add service SMS sender", diff --git a/app/template/rest.py b/app/template/rest.py index 271458f76..204ce8c73 100644 --- a/app/template/rest.py +++ b/app/template/rest.py @@ -1,16 +1,8 @@ -import base64 -from io import BytesIO - -import botocore -from flask import Blueprint, current_app, jsonify, request +from flask import Blueprint, jsonify, request from notifications_utils import SMS_CHAR_COUNT_LIMIT -from notifications_utils.pdf import extract_page_from_pdf from notifications_utils.template import SMSMessageTemplate -from PyPDF2.errors import PdfReadError -from requests import post as requests_post from sqlalchemy.orm.exc import NoResultFound -from app.dao.notifications_dao import get_notification_by_id from app.dao.services_dao import dao_fetch_service_by_id from app.dao.template_folder_dao import ( dao_get_template_folder_by_id_and_service_id, @@ -18,17 +10,13 @@ from app.dao.template_folder_dao import ( from app.dao.templates_dao import ( dao_create_template, dao_get_all_templates_for_service, - dao_get_template_by_id, dao_get_template_by_id_and_service_id, dao_get_template_versions, dao_redact_template, dao_update_template, - dao_update_template_reply_to, - get_precompiled_letter_template, ) from app.errors import InvalidRequest, register_errors -from app.letters.utils import get_letter_pdf_and_metadata -from app.models import LETTER_TYPE, SECOND_CLASS, SMS_TYPE, Template +from app.models import SMS_TYPE, Template from app.notifications.validators import check_reply_to, service_has_permission from app.schema_validation import validate from app.schemas import ( @@ -84,9 +72,6 @@ def create_template(service_id): errors = {'template_type': [message]} raise InvalidRequest(errors, 403) - if not new_template.postage and new_template.template_type == LETTER_TYPE: - new_template.postage = SECOND_CLASS - new_template.service = fetched_service over_limit = _content_count_greater_than_limit(new_template.content, new_template.template_type) @@ -125,11 +110,6 @@ def update_template(service_id, template_id): if data.get('redact_personalisation') is True: return redact_template(fetched_template, data) - if "reply_to" in data: - check_reply_to(service_id, data.get("reply_to"), fetched_template.template_type) - updated = dao_update_template_reply_to(template_id=template_id, reply_to=data.get("reply_to")) - return jsonify(data=template_schema.dump(updated)), 200 - current_data = dict(template_schema.dump(fetched_template).items()) updated_template = dict(template_schema.dump(fetched_template).items()) updated_template.update(data) @@ -151,14 +131,6 @@ def update_template(service_id, template_id): return jsonify(data=template_schema.dump(update_dict)), 200 -@template_blueprint.route('/precompiled', methods=['GET']) -def get_precompiled_template_for_service(service_id): - template = get_precompiled_letter_template(service_id) - template_dict = template_schema.dump(template) - - return jsonify(template_dict), 200 - - @template_blueprint.route('', methods=['GET']) def get_all_templates_for_service(service_id): templates = dao_get_all_templates_for_service(service_id=service_id) @@ -221,7 +193,7 @@ def get_template_versions(service_id, template_id): def _template_has_not_changed(current_data, updated_template): return all( current_data[key] == updated_template[key] - for key in ('name', 'content', 'subject', 'archived', 'process_type', 'postage') + for key in ('name', 'content', 'subject', 'archived', 'process_type') ) @@ -236,116 +208,3 @@ def redact_template(template, data): if not template.redact_personalisation: dao_redact_template(template, data['created_by']) return 'null', 200 - - -@template_blueprint.route('/preview//', methods=['GET']) -def preview_letter_template_by_notification_id(service_id, notification_id, file_type): - if file_type not in ('pdf', 'png'): - raise InvalidRequest({'content': ["file_type must be pdf or png"]}, status_code=400) - - page = request.args.get('page') - - notification = get_notification_by_id(notification_id) - template = dao_get_template_by_id(notification.template_id, notification.template_version) - metadata = {} - - if template.is_precompiled_letter: - try: - - pdf_file, metadata = get_letter_pdf_and_metadata(notification) - - except botocore.exceptions.ClientError as e: - raise InvalidRequest( - 'Error extracting requested page from PDF file for notification_id {} type {} {}'.format( - notification_id, type(e), e), - status_code=500 - ) - - page_number = page if page else "1" - content = base64.b64encode(pdf_file).decode('utf-8') - content_outside_printable_area = metadata.get("message") == "content-outside-printable-area" - page_is_in_invalid_pages = page_number in metadata.get('invalid_pages', '[]') - - if content_outside_printable_area and (file_type == "pdf" or page_is_in_invalid_pages): - path = '/precompiled/overlay.{}'.format(file_type) - query_string = '?page_number={}'.format(page_number) if file_type == 'png' else '' - content = pdf_file - elif file_type == 'png': - query_string = '?hide_notify=true' if page_number == '1' else '' - path = '/precompiled-preview.png' - else: - path = None - - if file_type == 'png': - try: - pdf_page = extract_page_from_pdf(BytesIO(pdf_file), int(page_number) - 1) - if content_outside_printable_area and page_is_in_invalid_pages: - content = pdf_page - else: - content = base64.b64encode(pdf_page).decode('utf-8') - except PdfReadError as e: - raise InvalidRequest( - 'Error extracting requested page from PDF file for notification_id {} type {} {}'.format( - notification_id, type(e), e), - status_code=500 - ) - - if path: - url = current_app.config['TEMPLATE_PREVIEW_API_HOST'] + path + query_string - response_content = _get_png_preview_or_overlaid_pdf(url, content, notification.id, json=False) - else: - response_content = content - else: - - template_for_letter_print = { - "id": str(notification.template_id), - "subject": template.subject, - "content": template.content, - "version": str(template.version), - "template_type": template.template_type - } - - service = dao_fetch_service_by_id(service_id) - letter_logo_filename = service.letter_branding and service.letter_branding.filename - data = { - 'letter_contact_block': notification.reply_to_text, - 'template': template_for_letter_print, - 'values': notification.personalisation, - 'date': notification.created_at.isoformat(), - 'filename': letter_logo_filename, - } - - url = '{}/preview.{}{}'.format( - current_app.config['TEMPLATE_PREVIEW_API_HOST'], - file_type, - '?page={}'.format(page) if page else '' - ) - response_content = _get_png_preview_or_overlaid_pdf(url, data, notification.id, json=True) - - return jsonify({"content": response_content, "metadata": metadata}) - - -def _get_png_preview_or_overlaid_pdf(url, data, notification_id, json=True): - if json: - resp = requests_post( - url, - json=data, - headers={'Authorization': 'Token {}'.format(current_app.config['TEMPLATE_PREVIEW_API_KEY'])} - ) - else: - resp = requests_post( - url, - data=data, - headers={'Authorization': 'Token {}'.format(current_app.config['TEMPLATE_PREVIEW_API_KEY'])} - ) - - if resp.status_code != 200: - raise InvalidRequest( - 'Error generating preview letter for {} Status code: {} {}'.format( - notification_id, - resp.status_code, - resp.content - ), status_code=500 - ) - - return base64.b64encode(resp.content).decode('utf-8') diff --git a/app/template/template_schemas.py b/app/template/template_schemas.py index fb38de232..932728b18 100644 --- a/app/template/template_schemas.py +++ b/app/template/template_schemas.py @@ -15,11 +15,10 @@ post_create_template_schema = { "subject": {"type": "string"}, "created_by": uuid, "parent_folder_id": uuid, - "postage": {"type": "string", "format": "postage"}, }, "if": { "properties": { - "template_type": {"enum": ["email", "letter"]} + "template_type": {"enum": ["email"]} } }, "then": {"required": ["subject"]}, @@ -39,7 +38,6 @@ post_update_template_schema = { "process_type": {"enum": TEMPLATE_PROCESS_TYPE}, "content": {"type": "string"}, "subject": {"type": "string"}, - "postage": {"type": "string", "format": "postage"}, "reply_to": nullable_uuid, "created_by": uuid, "archived": {"type": "boolean"}, diff --git a/app/template_statistics/rest.py b/app/template_statistics/rest.py index 8188855fc..05218995d 100644 --- a/app/template_statistics/rest.py +++ b/app/template_statistics/rest.py @@ -37,7 +37,6 @@ def get_template_statistics_for_service_by_day(service_id): 'template_id': str(row.template_id), 'template_name': row.template_name, 'template_type': row.notification_type, - 'is_precompiled_letter': row.is_precompiled_letter, 'status': row.status } for row in data diff --git a/app/upload/rest.py b/app/upload/rest.py index 30a93cd39..3dc380107 100644 --- a/app/upload/rest.py +++ b/app/upload/rest.py @@ -1,17 +1,11 @@ -from datetime import datetime - -from flask import Blueprint, abort, current_app, jsonify, request +from flask import Blueprint, current_app, jsonify, request from app.dao.fact_notification_status_dao import ( fetch_notification_statuses_for_job, ) from app.dao.jobs_dao import dao_get_notification_outcomes_for_job -from app.dao.uploads_dao import ( - dao_get_uploaded_letters_by_print_date, - dao_get_uploads_by_service_id, -) +from app.dao.uploads_dao import dao_get_uploads_by_service_id from app.errors import register_errors -from app.schemas import notification_with_template_schema from app.utils import midnight_n_days_ago, pagination_links upload_blueprint = Blueprint('upload', __name__, url_prefix='/service//upload') @@ -73,31 +67,3 @@ def get_paginated_uploads(service_id, limit_days, page): service_id=service_id ) } - - -@upload_blueprint.route('/uploaded-letters/', methods=['GET']) -def get_uploaded_letter_by_service_and_print_day(service_id, letter_print_date): - try: - letter_print_datetime = datetime.strptime(letter_print_date, '%Y-%m-%d') - except ValueError: - abort(400) - pagination = dao_get_uploaded_letters_by_print_date( - service_id, - letter_print_date=letter_print_datetime, - page=request.args.get('page', type=int), - page_size=current_app.config['PAGE_SIZE'] - ) - return jsonify({ - 'notifications': notification_with_template_schema.dump( - pagination.items, - many=True, - ), - 'page_size': pagination.per_page, - 'total': pagination.total, - 'links': pagination_links( - pagination, - '.get_uploaded_letter_by_service_and_print_day', - service_id=service_id, - letter_print_date=letter_print_date, - ), - }) diff --git a/app/utils.py b/app/utils.py index 883eff9ce..dbd9b3056 100644 --- a/app/utils.py +++ b/app/utils.py @@ -2,11 +2,7 @@ from datetime import datetime, timedelta from os import getenv from flask import url_for -from notifications_utils.template import ( - HTMLEmailTemplate, - LetterPrintTemplate, - SMSMessageTemplate, -) +from notifications_utils.template import HTMLEmailTemplate, SMSMessageTemplate from notifications_utils.timezones import convert_local_timezone_to_utc from sqlalchemy import func @@ -46,11 +42,10 @@ def url_with_token(data, url, config, base_url=None): def get_template_instance(template, values): - from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE + from app.models import EMAIL_TYPE, SMS_TYPE return { SMS_TYPE: SMSMessageTemplate, EMAIL_TYPE: HTMLEmailTemplate, - LETTER_TYPE: LetterPrintTemplate, }[template['template_type']](template, values) @@ -86,14 +81,12 @@ def get_local_month_from_utc_column(column): def get_public_notify_type_text(notify_type, plural=False): - from app.models import PRECOMPILED_LETTER, SMS_TYPE, UPLOAD_DOCUMENT + from app.models import SMS_TYPE, UPLOAD_DOCUMENT notify_type_text = notify_type if notify_type == SMS_TYPE: notify_type_text = 'text message' elif notify_type == UPLOAD_DOCUMENT: notify_type_text = 'document' - elif notify_type == PRECOMPILED_LETTER: - notify_type_text = 'precompiled letter' return '{}{}'.format(notify_type_text, 's' if plural else '') diff --git a/app/v2/errors.py b/app/v2/errors.py index bd3e3c7e1..fbe7d2801 100644 --- a/app/v2/errors.py +++ b/app/v2/errors.py @@ -49,11 +49,6 @@ class ValidationError(InvalidRequest): self.message = message if message else self.message -class PDFNotReadyError(BadRequestError): - def __init__(self): - super().__init__(message='PDF not available yet, try again later', status_code=400) - - def register_errors(blueprint): @blueprint.errorhandler(InvalidEmailError) def invalid_format(error): diff --git a/app/v2/notifications/create_response.py b/app/v2/notifications/create_response.py index 6c9532c44..fea4d2117 100644 --- a/app/v2/notifications/create_response.py +++ b/app/v2/notifications/create_response.py @@ -36,20 +36,6 @@ def create_post_email_response_from_notification( return resp -def create_post_letter_response_from_notification( - notification_id, client_reference, template_id, template_version, service_id, - content, subject, url_root -): - resp = __create_notification_response( - notification_id, client_reference, template_id, template_version, service_id, url_root - ) - resp['content'] = { - "body": content, - "subject": subject - } - return resp - - def __create_notification_response( notification_id, client_reference, template_id, template_version, service_id, url_root ): diff --git a/app/v2/notifications/get_notifications.py b/app/v2/notifications/get_notifications.py index 99243e01d..403f9742d 100644 --- a/app/v2/notifications/get_notifications.py +++ b/app/v2/notifications/get_notifications.py @@ -1,18 +1,8 @@ -from io import BytesIO - -from flask import current_app, jsonify, request, send_file, url_for +from flask import current_app, jsonify, request, url_for from app import api_user, authenticated_service from app.dao import notifications_dao -from app.letters.utils import get_letter_pdf_and_metadata -from app.models import ( - LETTER_TYPE, - NOTIFICATION_PENDING_VIRUS_CHECK, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_VIRUS_SCAN_FAILED, -) from app.schema_validation import validate -from app.v2.errors import BadRequestError, PDFNotReadyError from app.v2.notifications import v2_notification_blueprint from app.v2.notifications.notification_schemas import ( get_notifications_request, @@ -30,34 +20,6 @@ def get_notification_by_id(notification_id): return jsonify(notification.serialize()), 200 -@v2_notification_blueprint.route('//pdf', methods=['GET']) -def get_pdf_for_notification(notification_id): - _data = {"notification_id": notification_id} - validate(_data, notification_by_id) - notification = notifications_dao.get_notification_by_id( - notification_id, authenticated_service.id, _raise=True - ) - - if notification.notification_type != LETTER_TYPE: - raise BadRequestError(message="Notification is not a letter") - - if notification.status == NOTIFICATION_VIRUS_SCAN_FAILED: - raise BadRequestError(message='File did not pass the virus scan') - - if notification.status == NOTIFICATION_TECHNICAL_FAILURE: - raise BadRequestError(message='PDF not available for letters in status {}'.format(notification.status)) - - if notification.status == NOTIFICATION_PENDING_VIRUS_CHECK: - raise PDFNotReadyError() - - try: - pdf_data, metadata = get_letter_pdf_and_metadata(notification) - except Exception: - raise PDFNotReadyError() - - return send_file(path_or_file=BytesIO(pdf_data), mimetype='application/pdf') - - @v2_notification_blueprint.route("", methods=['GET']) def get_notifications(): _data = request.args.to_dict(flat=False) diff --git a/app/v2/notifications/notification_schemas.py b/app/v2/notifications/notification_schemas.py index 5d1d923b4..378a3ce09 100644 --- a/app/v2/notifications/notification_schemas.py +++ b/app/v2/notifications/notification_schemas.py @@ -1,9 +1,4 @@ -from app.models import ( - NOTIFICATION_STATUS_LETTER_ACCEPTED, - NOTIFICATION_STATUS_LETTER_RECEIVED, - NOTIFICATION_STATUS_TYPES, - NOTIFICATION_TYPES, -) +from app.models import NOTIFICATION_STATUS_TYPES, NOTIFICATION_TYPES from app.schema_validation.definitions import personalisation, uuid template = { @@ -48,7 +43,7 @@ get_notification_response = { "line_5": {"type": ["string", "null"]}, "line_6": {"type": ["string", "null"]}, "postcode": {"type": ["string", "null"]}, - "type": {"enum": ["sms", "letter", "email"]}, + "type": {"enum": ["sms", "email"]}, "status": {"type": "string"}, "template": template, "body": {"type": "string"}, @@ -75,8 +70,7 @@ get_notifications_request = { "status": { "type": "array", "items": { - "enum": NOTIFICATION_STATUS_TYPES + - [NOTIFICATION_STATUS_LETTER_ACCEPTED + ', ' + NOTIFICATION_STATUS_LETTER_RECEIVED] + "enum": NOTIFICATION_STATUS_TYPES } }, "template_type": { @@ -216,60 +210,3 @@ post_email_response = { }, "required": ["id", "content", "uri", "template"] } - -post_letter_request = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "POST letter notification schema", - "type": "object", - "title": "POST v2/notifications/letter", - "properties": { - "reference": {"type": "string"}, - "template_id": uuid, - "personalisation": personalisation - }, - "required": ["template_id", "personalisation"], - "additionalProperties": False -} - -post_precompiled_letter_request = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "POST precompiled letter notification schema", - "type": "object", - "title": "POST v2/notifications/letter", - "properties": { - "reference": {"type": "string"}, - "content": {"type": "string"}, - "postage": {"type": "string", "format": "postage"} - }, - "required": ["reference", "content"], - "additionalProperties": False -} - -letter_content = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "Letter content for POST letter notification", - "type": "object", - "title": "notification letter content", - "properties": { - "body": {"type": "string"}, - "subject": {"type": "string"} - }, - "required": ["body", "subject"] -} - -post_letter_response = { - "$schema": "http://json-schema.org/draft-07/schema#", - "description": "POST sms notification response schema", - "type": "object", - "title": "response v2/notifications/letter", - "properties": { - "id": uuid, - "reference": {"type": ["string", "null"]}, - "content": letter_content, - "uri": {"type": "string", "format": "uri"}, - "template": template, - # letters cannot be scheduled - "scheduled_for": {"type": "null"} - }, - "required": ["id", "content", "uri", "template"] -} diff --git a/app/v2/notifications/post_notifications.py b/app/v2/notifications/post_notifications.py index 756c4d846..55a3d7cc2 100644 --- a/app/v2/notifications/post_notifications.py +++ b/app/v2/notifications/post_notifications.py @@ -1,4 +1,3 @@ -import base64 import functools import uuid from datetime import datetime @@ -13,36 +12,18 @@ from app import ( authenticated_service, document_download_client, encryption, - notify_celery, ) -from app.celery.letters_pdf_tasks import ( - get_pdf_for_templated_letter, - sanitise_letter, -) -from app.celery.research_mode_tasks import create_fake_letter_response_file from app.celery.tasks import save_api_email, save_api_sms from app.clients.document_download import DocumentDownloadError -from app.config import QueueNames, TaskNames -from app.dao.dao_utils import transaction -from app.dao.templates_dao import get_precompiled_letter_template -from app.letters.utils import upload_letter_pdf +from app.config import QueueNames from app.models import ( EMAIL_TYPE, KEY_TYPE_NORMAL, - KEY_TYPE_TEAM, - KEY_TYPE_TEST, - LETTER_TYPE, NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_PENDING_VIRUS_CHECK, - NOTIFICATION_SENDING, PRIORITY, SMS_TYPE, Notification, ) -from app.notifications.process_letter_notifications import ( - create_letter_notification, -) from app.notifications.process_notifications import ( persist_notification, send_notification_to_queue_detached, @@ -55,7 +36,6 @@ from app.notifications.validators import ( check_service_email_reply_to_id, check_service_has_permission, check_service_sms_sender_id, - validate_address, validate_and_format_recipient, validate_template, ) @@ -65,13 +45,10 @@ from app.v2.errors import BadRequestError from app.v2.notifications import v2_notification_blueprint from app.v2.notifications.create_response import ( create_post_email_response_from_notification, - create_post_letter_response_from_notification, create_post_sms_response_from_notification, ) from app.v2.notifications.notification_schemas import ( post_email_request, - post_letter_request, - post_precompiled_letter_request, post_sms_request, ) from app.v2.utils import get_valid_json @@ -82,40 +59,6 @@ POST_NOTIFICATION_JSON_PARSE_DURATION_SECONDS = Histogram( ) -@v2_notification_blueprint.route('/{}'.format(LETTER_TYPE), methods=['POST']) -def post_precompiled_letter_notification(): - request_json = get_valid_json() - if 'content' not in (request_json or {}): - return post_notification(LETTER_TYPE) - - form = validate(request_json, post_precompiled_letter_request) - - # Check permission to send letters - check_service_has_permission(LETTER_TYPE, authenticated_service.permissions) - - check_rate_limiting(authenticated_service, api_user) - - template = get_precompiled_letter_template(authenticated_service.id) - - # For precompiled letters the to field will be set to Provided as PDF until the validation passes, - # then the address of the letter will be set as the to field - form['personalisation'] = { - 'address_line_1': 'Provided as PDF' - } - - notification = process_letter_notification( - letter_data=form, - api_key=api_user, - service=authenticated_service, - template=template, - template_with_content=None, # not required for precompiled - reply_to_text='', # not required for precompiled - precompiled=True - ) - - return jsonify(notification), 201 - - @v2_notification_blueprint.route('/', methods=['POST']) def post_notification(notification_type): with POST_NOTIFICATION_JSON_PARSE_DURATION_SECONDS.time(): @@ -125,8 +68,6 @@ def post_notification(notification_type): form = validate(request_json, post_email_request) elif notification_type == SMS_TYPE: form = validate(request_json, post_sms_request) - elif notification_type == LETTER_TYPE: - form = validate(request_json, post_letter_request) else: abort(404) @@ -144,25 +85,15 @@ def post_notification(notification_type): reply_to = get_reply_to_text(notification_type, form, template) - if notification_type == LETTER_TYPE: - notification = process_letter_notification( - letter_data=form, - api_key=api_user, - service=authenticated_service, - template=template, - template_with_content=template_with_content, - reply_to_text=reply_to - ) - else: - notification = process_sms_or_email_notification( - form=form, - notification_type=notification_type, - template=template, - template_with_content=template_with_content, - template_process_type=template.process_type, - service=authenticated_service, - reply_to_text=reply_to - ) + notification = process_sms_or_email_notification( + form=form, + notification_type=notification_type, + template=template, + template_with_content=template_with_content, + template_process_type=template.process_type, + service=authenticated_service, + reply_to_text=reply_to + ) return jsonify(notification), 201 @@ -342,113 +273,6 @@ def process_document_uploads(personalisation_data, service, simulated=False): return personalisation_data, len(file_keys) -def process_letter_notification( - *, letter_data, api_key, service, template, template_with_content, reply_to_text, precompiled=False -): - if api_key.key_type == KEY_TYPE_TEAM: - raise BadRequestError(message='Cannot send letters with a team api key', status_code=403) - - if not service.research_mode and service.restricted and api_key.key_type != KEY_TYPE_TEST: - raise BadRequestError(message='Cannot send letters when service is in trial mode', status_code=403) - - if precompiled: - return process_precompiled_letter_notifications(letter_data=letter_data, - api_key=api_key, - service=service, - template=template, - reply_to_text=reply_to_text) - - postage = validate_address(service, letter_data['personalisation']) - - test_key = api_key.key_type == KEY_TYPE_TEST - - status = NOTIFICATION_CREATED - updated_at = None - if test_key: - # if we don't want to actually send the letter, then start it off in SENDING so we don't pick it up - if current_app.config['NOTIFY_ENVIRONMENT'] in ['preview', 'development']: - status = NOTIFICATION_SENDING - # mark test letter as delivered and do not create a fake response later - else: - status = NOTIFICATION_DELIVERED - updated_at = datetime.utcnow() - - queue = QueueNames.CREATE_LETTERS_PDF if not test_key else QueueNames.RESEARCH_MODE - - notification = create_letter_notification(letter_data=letter_data, - service=service, - template=template, - api_key=api_key, - status=status, - reply_to_text=reply_to_text, - updated_at=updated_at, - postage=postage - ) - - get_pdf_for_templated_letter.apply_async( - [str(notification.id)], - queue=queue - ) - - if test_key and current_app.config['NOTIFY_ENVIRONMENT'] in ['preview', 'development']: - create_fake_letter_response_file.apply_async( - (notification.reference,), - queue=queue - ) - - resp = create_response_for_post_notification( - notification_id=notification.id, - client_reference=notification.client_reference, - template_id=notification.template_id, - template_version=notification.template_version, - notification_type=notification.notification_type, - reply_to=reply_to_text, - service_id=notification.service_id, - template_with_content=template_with_content - ) - return resp - - -def process_precompiled_letter_notifications(*, letter_data, api_key, service, template, reply_to_text): - try: - status = NOTIFICATION_PENDING_VIRUS_CHECK - letter_content = base64.b64decode(letter_data['content']) - except ValueError: - raise BadRequestError(message='Cannot decode letter content (invalid base64 encoding)', status_code=400) - - with transaction(): - notification = create_letter_notification(letter_data=letter_data, - service=service, - template=template, - api_key=api_key, - status=status, - reply_to_text=reply_to_text) - filename = upload_letter_pdf(notification, letter_content, precompiled=True) - - resp = { - 'id': notification.id, - 'reference': notification.client_reference, - 'postage': notification.postage - } - - # call task to add the filename to anti virus queue - if current_app.config['ANTIVIRUS_ENABLED']: - current_app.logger.info('Calling task scan-file for {}'.format(filename)) - notify_celery.send_task( - name=TaskNames.SCAN_FILE, - kwargs={'filename': filename}, - queue=QueueNames.ANTIVIRUS, - ) - else: - # stub out antivirus in dev - sanitise_letter.apply_async( - [filename], - queue=QueueNames.LETTERS - ) - - return resp - - def get_reply_to_text(notification_type, form, template): reply_to = None if notification_type == EMAIL_TYPE: @@ -467,9 +291,6 @@ def get_reply_to_text(notification_type, form, template): else: reply_to = template.reply_to_text - elif notification_type == LETTER_TYPE: - reply_to = template.reply_to_text - return reply_to @@ -494,11 +315,6 @@ def create_response_for_post_notification( subject=template_with_content.subject, email_from='{}@{}'.format(authenticated_service.email_from, current_app.config['NOTIFY_EMAIL_DOMAIN']), ) - elif notification_type == LETTER_TYPE: - create_resp_partial = functools.partial( - create_post_letter_response_from_notification, - subject=template_with_content.subject, - ) resp = create_resp_partial( notification_id, client_reference, template_id, template_version, service_id, url_root=request.url_root, diff --git a/app/v2/template/template_schemas.py b/app/v2/template/template_schemas.py index fbefdfbfc..1b64c1b9c 100644 --- a/app/v2/template/template_schemas.py +++ b/app/v2/template/template_schemas.py @@ -36,7 +36,6 @@ get_template_by_id_response = { "body": {"type": "string"}, "subject": {"type": ["string", "null"]}, "name": {"type": "string"}, - "postage": {"type": "string", "format": "postage"} }, "required": ["id", "type", "created_at", "updated_at", "version", "created_by", "body", "name"], } @@ -64,7 +63,6 @@ post_template_preview_response = { "version": {"type": "integer"}, "body": {"type": "string"}, "subject": {"type": ["string", "null"]}, - "postage": {"type": "string", "format": "postage"}, "html": {"type": ["string", "null"]}, }, "required": ["id", "type", "version", "body"], @@ -80,5 +78,4 @@ def create_post_template_preview_response(template, template_object): "body": template_object.content_with_placeholders_filled_in, "html": getattr(template_object, 'html_body', None), "subject": getattr(template_object, 'subject', None), - "postage": template.postage } diff --git a/app/variables.py b/app/variables.py index c359453bc..f6fc3e3b4 100644 --- a/app/variables.py +++ b/app/variables.py @@ -1,8 +1,3 @@ -# all jobs for letters created via the api must have this filename -LETTER_API_FILENAME = 'letter submitted via api' -LETTER_TEST_API_FILENAME = 'test letter submitted via api' - - # S3 tags class Retention: KEY = 'retention' diff --git a/docs/queues-and-tasks.md b/docs/queues-and-tasks.md index 3e40cde4a..2be48ad95 100644 --- a/docs/queues-and-tasks.md +++ b/docs/queues-and-tasks.md @@ -12,14 +12,11 @@ There are a bunch of queues: - job tasks - retry tasks - notify internal tasks -- process ftp tasks -- create letters pdf tasks - service callbacks - service callbacks retry - letter tasks - sms callbacks - antivirus tasks -- sanitise letter tasks - save api email tasks - save api sms tasks @@ -29,7 +26,6 @@ And these tasks: - check if letters still in created - check if letters still pending virus check - check job status -- collate letter pdfs to be sent - create fake letter response file - create nightly billing - create nightly billing for day @@ -45,11 +41,9 @@ And these tasks: - delete verify codes - deliver email - deliver sms -- get pdf for templated letter - process incomplete jobs - process job - process returned letters list -- process sanitised letter - process ses result - process virus scan error - process virus scan failed @@ -59,9 +53,7 @@ And these tasks: - remove letter jobs - remove sms email jobs - replay created notifications -- resanitise pdf - run scheduled jobs -- sanitise letter - save api email - save api sms - save daily notification processing time diff --git a/migrations/versions/0306_letter_rates_price_rise.py b/migrations/versions/0306_letter_rates_price_rise.py index 892b0b7c3..ae9b77b10 100644 --- a/migrations/versions/0306_letter_rates_price_rise.py +++ b/migrations/versions/0306_letter_rates_price_rise.py @@ -26,8 +26,6 @@ from datetime import datetime from alembic import op from sqlalchemy.sql import text -from app.models import LetterRate - revision = '0306_letter_rates_price_rise' down_revision = '0305_add_gp_org_type' @@ -39,28 +37,28 @@ CHANGEOVER_DATE = datetime(2019, 9, 30, 23, 0) def upgrade(): # all old rates are going in the bin conn = op.get_bind() - conn.execute(text("UPDATE letter_rates SET end_date = :start WHERE end_date IS NULL"), start=CHANGEOVER_DATE) + # conn.execute(text("UPDATE letter_rates SET end_date = :start WHERE end_date IS NULL"), start=CHANGEOVER_DATE) - base_prices = { - 'second': 30, - 'first': 56, - } - op.bulk_insert(LetterRate.__table__, [ - { - 'id': uuid.uuid4(), - 'start_date': CHANGEOVER_DATE, - 'end_date': None, - 'sheet_count': sheet_count, - 'rate': (base_prices[post_class] + (5 * sheet_count)) / 100.0, - 'crown': crown, - 'post_class': post_class, - } - for sheet_count, crown, post_class in itertools.product( - range(1, 6), - [True, False], - ['first', 'second'] - ) - ]) + # base_prices = { + # 'second': 30, + # 'first': 56, + # } + # op.bulk_insert('letter_rates', [ + # { + # 'id': uuid.uuid4(), + # 'start_date': CHANGEOVER_DATE, + # 'end_date': None, + # 'sheet_count': sheet_count, + # 'rate': (base_prices[post_class] + (5 * sheet_count)) / 100.0, + # 'crown': crown, + # 'post_class': post_class, + # } + # for sheet_count, crown, post_class in itertools.product( + # range(1, 6), + # [True, False], + # ['first', 'second'] + # ) + # ]) def downgrade(): diff --git a/migrations/versions/0317_uploads_for_all.py b/migrations/versions/0317_uploads_for_all.py index b6389f9bd..a8fed5f00 100644 --- a/migrations/versions/0317_uploads_for_all.py +++ b/migrations/versions/0317_uploads_for_all.py @@ -6,7 +6,6 @@ Create Date: 2019-05-13 10:44:51.867661 """ from alembic import op -from app.models import UPLOAD_LETTERS revision = '0317_uploads_for_all' @@ -18,7 +17,7 @@ def upgrade(): INSERT INTO service_permissions (service_id, permission, created_at) SELECT - id, '{permission}', now() + id, 'upload_letters', now() FROM services WHERE @@ -28,14 +27,10 @@ def upgrade(): service_permissions WHERE service_id = services.id and - permission = '{permission}' + permission = 'upload_letters' ) - """.format( - permission=UPLOAD_LETTERS - )) + """) def downgrade(): - op.execute("DELETE from service_permissions where permission = '{}'".format( - UPLOAD_LETTERS - )) + op.execute("DELETE from service_permissions where permission = 'upload_letters'") diff --git a/migrations/versions/0324_int_letter_rates.py b/migrations/versions/0324_int_letter_rates.py index 59cf140ba..0c27772dd 100644 --- a/migrations/versions/0324_int_letter_rates.py +++ b/migrations/versions/0324_int_letter_rates.py @@ -12,8 +12,6 @@ from datetime import datetime from alembic import op from sqlalchemy.sql import text -from app.models import LetterRate - revision = '0324_int_letter_rates' down_revision = '0323_broadcast_message' @@ -31,22 +29,23 @@ def upgrade(): 4 sheets - £1.08 5 sheets - £1.16 """ - op.bulk_insert(LetterRate.__table__, [ - { - 'id': uuid.uuid4(), - 'start_date': start_date, - 'end_date': None, - 'sheet_count': sheet_count, - 'rate': (base_rate + (8 * sheet_count)) / 100.0, - 'crown': crown, - 'post_class': post_class, - } - for sheet_count, crown, post_class in itertools.product( - range(1, 6), - [True, False], - ['europe', 'rest-of-world'] - ) - ]) + # op.bulk_insert('letter_rates', [ + # { + # 'id': uuid.uuid4(), + # 'start_date': start_date, + # 'end_date': None, + # 'sheet_count': sheet_count, + # 'rate': (base_rate + (8 * sheet_count)) / 100.0, + # 'crown': crown, + # 'post_class': post_class, + # } + # for sheet_count, crown, post_class in itertools.product( + # range(1, 6), + # [True, False], + # ['europe', 'rest-of-world'] + # ) + # ]) + pass def downgrade(): diff --git a/migrations/versions/0341_new_letter_rates.py b/migrations/versions/0341_new_letter_rates.py index cac72c6fe..1d5e12838 100644 --- a/migrations/versions/0341_new_letter_rates.py +++ b/migrations/versions/0341_new_letter_rates.py @@ -12,8 +12,6 @@ from datetime import datetime from alembic import op from sqlalchemy.sql import text -from app.models import LetterRate - revision = '0341_new_letter_rates' down_revision = '0340_stub_training_broadcasts' @@ -36,24 +34,24 @@ def get_new_rate(sheet_count, post_class): def upgrade(): conn = op.get_bind() - conn.execute(text("UPDATE letter_rates SET end_date = :start WHERE end_date IS NULL"), start=CHANGEOVER_DATE) + # conn.execute(text("UPDATE letter_rates SET end_date = :start WHERE end_date IS NULL"), start=CHANGEOVER_DATE) - op.bulk_insert(LetterRate.__table__, [ - { - 'id': uuid.uuid4(), - 'start_date': CHANGEOVER_DATE, - 'end_date': None, - 'sheet_count': sheet_count, - 'rate': get_new_rate(sheet_count, post_class), - 'crown': crown, - 'post_class': post_class, - } - for sheet_count, crown, post_class in itertools.product( - range(1, 6), - [True, False], - ['first', 'second', 'europe', 'rest-of-world'] - ) - ]) + # op.bulk_insert('letter_rates', [ + # { + # 'id': uuid.uuid4(), + # 'start_date': CHANGEOVER_DATE, + # 'end_date': None, + # 'sheet_count': sheet_count, + # 'rate': get_new_rate(sheet_count, post_class), + # 'crown': crown, + # 'post_class': post_class, + # } + # for sheet_count, crown, post_class in itertools.product( + # range(1, 6), + # [True, False], + # ['first', 'second', 'europe', 'rest-of-world'] + # ) + # ]) def downgrade(): # Make sure you've thought about billing implications etc before downgrading! diff --git a/migrations/versions/0366_letter_rates_2022.py b/migrations/versions/0366_letter_rates_2022.py index 80bb5ef20..542ea0de5 100644 --- a/migrations/versions/0366_letter_rates_2022.py +++ b/migrations/versions/0366_letter_rates_2022.py @@ -12,8 +12,6 @@ from datetime import datetime from alembic import op from sqlalchemy.sql import text -from app.models import LetterRate - revision = '0366_letter_rates_2022' down_revision = '0365_add_nhs_branding' @@ -36,24 +34,24 @@ def get_new_rate(sheet_count, post_class): def upgrade(): conn = op.get_bind() - conn.execute(text("UPDATE letter_rates SET end_date = :start WHERE end_date IS NULL"), start=CHANGEOVER_DATE) + # conn.execute(text("UPDATE letter_rates SET end_date = :start WHERE end_date IS NULL"), start=CHANGEOVER_DATE) - op.bulk_insert(LetterRate.__table__, [ - { - 'id': uuid.uuid4(), - 'start_date': CHANGEOVER_DATE, - 'end_date': None, - 'sheet_count': sheet_count, - 'rate': get_new_rate(sheet_count, post_class), - 'crown': crown, - 'post_class': post_class, - } - for sheet_count, crown, post_class in itertools.product( - range(1, 6), - [True, False], - ['first', 'second', 'europe', 'rest-of-world'] - ) - ]) + # op.bulk_insert('letter_rates', [ + # { + # 'id': uuid.uuid4(), + # 'start_date': CHANGEOVER_DATE, + # 'end_date': None, + # 'sheet_count': sheet_count, + # 'rate': get_new_rate(sheet_count, post_class), + # 'crown': crown, + # 'post_class': post_class, + # } + # for sheet_count, crown, post_class in itertools.product( + # range(1, 6), + # [True, False], + # ['first', 'second', 'europe', 'rest-of-world'] + # ) + # ]) def downgrade(): diff --git a/migrations/versions/0367_add_reach.py b/migrations/versions/0367_add_reach.py index 92eef85fd..b948e2583 100644 --- a/migrations/versions/0367_add_reach.py +++ b/migrations/versions/0367_add_reach.py @@ -12,8 +12,6 @@ from datetime import datetime from alembic import op from sqlalchemy.sql import text -from app.models import LetterRate - revision = '0367_add_reach' down_revision = '0366_letter_rates_2022' diff --git a/migrations/versions/0370_remove_reach.py b/migrations/versions/0370_remove_reach.py index 4572fbddc..3bbb9b3e4 100644 --- a/migrations/versions/0370_remove_reach.py +++ b/migrations/versions/0370_remove_reach.py @@ -12,8 +12,6 @@ from datetime import datetime from alembic import op from sqlalchemy.sql import text -from app.models import LetterRate - revision = '0370_remove_reach' down_revision = '0369_update_sms_rates' diff --git a/migrations/versions/0384_remove_letter_branding_.py b/migrations/versions/0384_remove_letter_branding_.py new file mode 100644 index 000000000..fd390dfc6 --- /dev/null +++ b/migrations/versions/0384_remove_letter_branding_.py @@ -0,0 +1,44 @@ +""" + +Revision ID: 0384_remove_letter_branding_ +Revises: 0383_update_default_templates.py +Create Date: 2023-02-09 22:24:07.187569 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0384_remove_letter_branding_' +down_revision = '0383_update_default_templates.py' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('fk_organisation_letter_branding_id', 'organisation', type_='foreignkey') + op.drop_column('organisation', 'letter_branding_id') + op.drop_table('service_letter_branding') + op.drop_table('letter_branding') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('organisation', sa.Column('letter_branding_id', postgresql.UUID(), autoincrement=False, nullable=True)) + op.create_foreign_key('fk_organisation_letter_branding_id', 'organisation', 'letter_branding', ['letter_branding_id'], ['id']) + op.create_table('service_letter_branding', + sa.Column('service_id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('letter_branding_id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['letter_branding_id'], ['letter_branding.id'], name='service_letter_branding_letter_branding_id_fkey'), + sa.ForeignKeyConstraint(['service_id'], ['services.id'], name='service_letter_branding_service_id_fkey'), + sa.PrimaryKeyConstraint('service_id', name='service_letter_branding_pkey') + ) + op.create_table('letter_branding', + sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), + sa.Column('filename', sa.VARCHAR(length=255), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='letter_branding_pkey'), + sa.UniqueConstraint('filename', name='letter_branding_filename_key'), + sa.UniqueConstraint('name', name='letter_branding_name_key') + ) + # ### end Alembic commands ### diff --git a/migrations/versions/0385_remove postage_.py b/migrations/versions/0385_remove postage_.py new file mode 100644 index 000000000..e4ec15642 --- /dev/null +++ b/migrations/versions/0385_remove postage_.py @@ -0,0 +1,171 @@ +""" + +Revision ID: 0385_remove postage_.py +Revises: 0384_remove_letter_branding_ +Create Date: 2023-02-10 12:20:39.411493 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0385_remove postage_.py' +down_revision = '0384_remove_letter_branding_' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint('ft_billing_pkey', 'ft_billing', type_='primary') + op.create_primary_key('ft_billing_pkey', 'ft_billing', ['local_date', + 'template_id', + 'service_id', + 'notification_type', + 'provider', + 'rate_multiplier', + 'international', + 'rate']) + + # we need to replace the entire notifications_all_time_view in order to update it + op.execute("DROP VIEW notifications_all_time_view;") + op.execute(""" + CREATE VIEW notifications_all_time_view AS + ( + SELECT + id, + job_id, + job_row_number, + service_id, + template_id, + template_version, + api_key_id, + key_type, + billable_units, + notification_type, + created_at, + sent_at, + sent_by, + updated_at, + notification_status, + reference, + client_reference, + international, + phone_prefix, + rate_multiplier, + created_by_id, + document_download_count + FROM notifications + ) UNION + ( + SELECT + id, + job_id, + job_row_number, + service_id, + template_id, + template_version, + api_key_id, + key_type, + billable_units, + notification_type, + created_at, + sent_at, + sent_by, + updated_at, + notification_status, + reference, + client_reference, + international, + phone_prefix, + rate_multiplier, + created_by_id, + document_download_count + FROM notification_history + ) + """) + + op.drop_column('notification_history', 'postage') + op.drop_column('notifications', 'postage') + op.drop_column('templates', 'postage') + op.drop_column('templates_history', 'postage') + op.drop_column('ft_billing', 'postage') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('ft_billing', sa.Column('postage', sa.VARCHAR(), autoincrement=False, nullable=True)) + op.add_column('templates_history', sa.Column('postage', sa.VARCHAR(), autoincrement=False, nullable=True)) + op.add_column('templates', sa.Column('postage', sa.VARCHAR(), autoincrement=False, nullable=True)) + op.add_column('notifications', sa.Column('postage', sa.VARCHAR(), autoincrement=False, nullable=True)) + op.add_column('notification_history', sa.Column('postage', sa.VARCHAR(), autoincrement=False, nullable=True)) + + op.drop_constraint('ft_billing_pkey', 'ft_billing', type_='primary') + op.create_primary_key('ft_billing_pkey', 'ft_billing', ['local_date', + 'template_id', + 'service_id', + 'notification_type', + 'provider', + 'rate_multiplier', + 'international', + 'rate', + 'postage']) + + op.execute("DROP VIEW notifications_all_time_view;") + op.execute(""" + CREATE VIEW notifications_all_time_view AS + ( + SELECT + id, + job_id, + job_row_number, + service_id, + template_id, + template_version, + api_key_id, + key_type, + billable_units, + notification_type, + created_at, + sent_at, + sent_by, + updated_at, + notification_status, + reference, + client_reference, + international, + phone_prefix, + rate_multiplier, + postage, + created_by_id, + document_download_count + FROM notifications + ) UNION + ( + SELECT + id, + job_id, + job_row_number, + service_id, + template_id, + template_version, + api_key_id, + key_type, + billable_units, + notification_type, + created_at, + sent_at, + sent_by, + updated_at, + notification_status, + reference, + client_reference, + international, + phone_prefix, + rate_multiplier, + postage, + created_by_id, + document_download_count + FROM notification_history + ) + """) + # ### end Alembic commands ### diff --git a/migrations/versions/0386_remove_letter_rates_.py b/migrations/versions/0386_remove_letter_rates_.py new file mode 100644 index 000000000..81b036250 --- /dev/null +++ b/migrations/versions/0386_remove_letter_rates_.py @@ -0,0 +1,34 @@ +""" + +Revision ID: 0386_remove_letter_rates_.py +Revises: 0385_remove postage_.py +Create Date: 2023-02-15 10:24:55.107467 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0386_remove_letter_rates_.py' +down_revision = '0385_remove postage_.py' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('letter_rates') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('letter_rates', + sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('start_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), + sa.Column('end_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('sheet_count', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('rate', sa.NUMERIC(), autoincrement=False, nullable=False), + sa.Column('crown', sa.BOOLEAN(), autoincrement=False, nullable=False), + sa.Column('post_class', sa.VARCHAR(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='letter_rates_pkey') + ) + # ### end Alembic commands ### diff --git a/migrations/versions/0387_remove_letter_perms_.py b/migrations/versions/0387_remove_letter_perms_.py new file mode 100644 index 000000000..c54b30a45 --- /dev/null +++ b/migrations/versions/0387_remove_letter_perms_.py @@ -0,0 +1,41 @@ +""" + +Revision ID: 0387_remove_letter_perms_.py +Revises: 0386_remove_letter_rates_.py +Create Date: 2023-02-17 11:56:00.993409 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0387_remove_letter_perms_.py' +down_revision = '0386_remove_letter_rates_.py' + + +def upgrade(): + # this is the inverse of migration 0317 + op.execute("DELETE from service_permissions where permission = 'upload_letters'") + # ### end Alembic commands ### + + +def downgrade(): + # this is the inverse of migration 0317 + op.execute(""" + INSERT INTO + service_permissions (service_id, permission, created_at) + SELECT + id, 'upload_letters', now() + FROM + services + WHERE + NOT EXISTS ( + SELECT + FROM + service_permissions + WHERE + service_id = services.id and + permission = 'upload_letters' + ) + """) + # ### end Alembic commands ### diff --git a/migrations/versions/0388_no_serv_letter_contact.py b/migrations/versions/0388_no_serv_letter_contact.py new file mode 100644 index 000000000..bbbafa6c5 --- /dev/null +++ b/migrations/versions/0388_no_serv_letter_contact.py @@ -0,0 +1,45 @@ +""" + +Revision ID: 0388_no_serv_letter_contact.py +Revises: 0387_remove_letter_perms_.py +Create Date: 2023-02-17 14:42:52.679425 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0388_no_serv_letter_contact.py' +down_revision = '0387_remove_letter_perms_.py' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_service_letter_contacts_service_id', table_name='service_letter_contacts') + op.drop_constraint('templates_service_letter_contact_id_fkey', 'templates', type_='foreignkey') + op.drop_column('templates', 'service_letter_contact_id') + op.drop_constraint('templates_history_service_letter_contact_id_fkey', 'templates_history', type_='foreignkey') + op.drop_column('templates_history', 'service_letter_contact_id') + op.drop_table('service_letter_contacts') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('templates_history', sa.Column('service_letter_contact_id', postgresql.UUID(), autoincrement=False, nullable=True)) + op.create_foreign_key('templates_history_service_letter_contact_id_fkey', 'templates_history', 'service_letter_contacts', ['service_letter_contact_id'], ['id']) + op.add_column('templates', sa.Column('service_letter_contact_id', postgresql.UUID(), autoincrement=False, nullable=True)) + op.create_foreign_key('templates_service_letter_contact_id_fkey', 'templates', 'service_letter_contacts', ['service_letter_contact_id'], ['id']) + op.create_table('service_letter_contacts', + sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('service_id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('contact_block', sa.TEXT(), autoincrement=False, nullable=False), + sa.Column('is_default', sa.BOOLEAN(), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('archived', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['service_id'], ['services.id'], name='service_letter_contacts_service_id_fkey'), + sa.PrimaryKeyConstraint('id', name='service_letter_contacts_pkey') + ) + op.create_index('ix_service_letter_contacts_service_id', 'service_letter_contacts', ['service_id'], unique=False) + # ### end Alembic commands ### diff --git a/migrations/versions/0389_no_more_letters.py b/migrations/versions/0389_no_more_letters.py new file mode 100644 index 000000000..e69c4615b --- /dev/null +++ b/migrations/versions/0389_no_more_letters.py @@ -0,0 +1,56 @@ +""" + +Revision ID: 0389_no_more_letters.py +Revises: 0388_no_serv_letter_contact.py +Create Date: 2023-02-28 08:58:38.310095 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0389_no_more_letters.py' +down_revision = '0388_no_serv_letter_contact.py' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_returned_letters_service_id', table_name='returned_letters') + op.drop_table('returned_letters') + op.drop_index('ix_daily_sorted_letter_billing_day', table_name='daily_sorted_letter') + op.drop_index('ix_daily_sorted_letter_file_name', table_name='daily_sorted_letter') + op.drop_table('daily_sorted_letter') + op.drop_column('services', 'volume_letter') + op.drop_column('services_history', 'volume_letter') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('services_history', sa.Column('volume_letter', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('services', sa.Column('volume_letter', sa.INTEGER(), autoincrement=False, nullable=True)) + op.create_table('daily_sorted_letter', + sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('billing_day', sa.DATE(), autoincrement=False, nullable=False), + sa.Column('unsorted_count', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('sorted_count', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('file_name', sa.VARCHAR(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint('id', name='daily_sorted_letter_pkey'), + sa.UniqueConstraint('file_name', 'billing_day', name='uix_file_name_billing_day') + ) + op.create_index('ix_daily_sorted_letter_file_name', 'daily_sorted_letter', ['file_name'], unique=False) + op.create_index('ix_daily_sorted_letter_billing_day', 'daily_sorted_letter', ['billing_day'], unique=False) + op.create_table('returned_letters', + sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('reported_at', sa.DATE(), autoincrement=False, nullable=False), + sa.Column('service_id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('notification_id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['service_id'], ['services.id'], name='returned_letters_service_id_fkey'), + sa.PrimaryKeyConstraint('id', name='returned_letters_pkey'), + sa.UniqueConstraint('notification_id', name='returned_letters_notification_id_key') + ) + op.create_index('ix_returned_letters_service_id', 'returned_letters', ['service_id'], unique=False) + # ### end Alembic commands ### diff --git a/migrations/versions/0390_drop_dvla_provider.py b/migrations/versions/0390_drop_dvla_provider.py new file mode 100644 index 000000000..53d7d74ed --- /dev/null +++ b/migrations/versions/0390_drop_dvla_provider.py @@ -0,0 +1,32 @@ +""" + +Revision ID: 0390_drop_dvla_provider.py +Revises: 0389_no_more_letters.py +Create Date: 2023-02-28 14:25:50.751952 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = '0390_drop_dvla_provider.py' +down_revision = '0389_no_more_letters.py' + + +def upgrade(): + # based on migration 0066, but without provider_rates + op.execute("DELETE FROM provider_details_history where display_name = 'DVLA'") + op.execute("DELETE FROM provider_details where display_name = 'DVLA'") + # ### end Alembic commands ### + + +def downgrade(): + # migration 0066 in reverse + provider_id = str(uuid.uuid4()) + op.execute( + "INSERT INTO provider_details (id, display_name, identifier, priority, notification_type, active, version) values ('{}', 'DVLA', 'dvla', 50, 'letter', true, 1)".format(provider_id) + ) + op.execute( + "INSERT INTO provider_details_history (id, display_name, identifier, priority, notification_type, active, version) values ('{}', 'DVLA', 'dvla', 50, 'letter', true, 1)".format(provider_id) + ) + # ### end Alembic commands ### diff --git a/tests/app/billing/test_rest.py b/tests/app/billing/test_rest.py index b299ff8df..1befafd8b 100644 --- a/tests/app/billing/test_rest.py +++ b/tests/app/billing/test_rest.py @@ -127,12 +127,10 @@ def test_get_yearly_usage_by_monthly_from_ft_billing(admin_request, notify_db_se sms_template = create_template(service=service, template_type="sms") email_template = create_template(service=service, template_type="email") - letter_template = create_template(service=service, template_type="letter") for dt in (date(2016, 4, 28), date(2016, 11, 10), date(2017, 2, 26)): create_ft_billing(local_date=dt, template=sms_template, rate=0.0162) create_ft_billing(local_date=dt, template=email_template, billable_unit=0, rate=0) - create_ft_billing(local_date=dt, template=letter_template, rate=0.33, postage='second') json_response = admin_request.get( 'billing.get_yearly_usage_by_monthly_from_ft_billing', @@ -140,30 +138,18 @@ def test_get_yearly_usage_by_monthly_from_ft_billing(admin_request, notify_db_se year=2016 ) - assert len(json_response) == 6 # 3 billed months for SMS and letters + assert len(json_response) == 3 # 3 billed months for SMS email_rows = [row for row in json_response if row['notification_type'] == 'email'] assert len(email_rows) == 0 - letter_row = next(x for x in json_response if x['notification_type'] == 'letter') sms_row = next(x for x in json_response if x['notification_type'] == 'sms') - assert letter_row["month"] == "April" - assert letter_row["notification_type"] == "letter" - assert letter_row["chargeable_units"] == 1 - assert letter_row["notifications_sent"] == 1 - assert letter_row["rate"] == 0.33 - assert letter_row["postage"] == "second" - assert letter_row["cost"] == 0.33 - assert letter_row["free_allowance_used"] == 0 - assert letter_row["charged_units"] == 1 - assert sms_row["month"] == "April" assert sms_row["notification_type"] == "sms" assert sms_row["chargeable_units"] == 1 assert sms_row["notifications_sent"] == 1 assert sms_row["rate"] == 0.0162 - assert sms_row["postage"] == "none" # free allowance is 1 assert sms_row["cost"] == 0 assert sms_row["free_allowance_used"] == 1 @@ -198,12 +184,10 @@ def test_get_yearly_billing_usage_summary_from_ft_billing(admin_request, notify_ sms_template = create_template(service=service, template_type="sms") email_template = create_template(service=service, template_type="email") - letter_template = create_template(service=service, template_type="letter") for dt in (date(2016, 4, 28), date(2016, 11, 10), date(2017, 2, 26)): create_ft_billing(local_date=dt, template=sms_template, rate=0.0162) create_ft_billing(local_date=dt, template=email_template, billable_unit=0, rate=0) - create_ft_billing(local_date=dt, template=letter_template, rate=0.33, postage='second') json_response = admin_request.get( 'billing.get_yearly_billing_usage_summary_from_ft_billing', @@ -211,7 +195,7 @@ def test_get_yearly_billing_usage_summary_from_ft_billing(admin_request, notify_ year=2016 ) - assert len(json_response) == 3 + assert len(json_response) == 2 assert json_response[0]['notification_type'] == 'email' assert json_response[0]['chargeable_units'] == 0 @@ -221,18 +205,10 @@ def test_get_yearly_billing_usage_summary_from_ft_billing(admin_request, notify_ assert json_response[0]['free_allowance_used'] == 0 assert json_response[0]['charged_units'] == 0 - assert json_response[1]['notification_type'] == 'letter' + assert json_response[1]['notification_type'] == 'sms' assert json_response[1]['chargeable_units'] == 3 assert json_response[1]['notifications_sent'] == 3 - assert json_response[1]['rate'] == 0.33 - assert json_response[1]['cost'] == 0.99 - assert json_response[1]['free_allowance_used'] == 0 - assert json_response[1]['charged_units'] == 3 - - assert json_response[2]['notification_type'] == 'sms' - assert json_response[2]['chargeable_units'] == 3 - assert json_response[2]['notifications_sent'] == 3 - assert json_response[2]['rate'] == 0.0162 - assert json_response[2]['cost'] == 0.0324 - assert json_response[2]['free_allowance_used'] == 1 - assert json_response[2]['charged_units'] == 2 + assert json_response[1]['rate'] == 0.0162 + assert json_response[1]['cost'] == 0.0324 + assert json_response[1]['free_allowance_used'] == 1 + assert json_response[1]['charged_units'] == 2 diff --git a/tests/app/celery/test_ftp_update_tasks.py b/tests/app/celery/test_ftp_update_tasks.py deleted file mode 100644 index d973f7a93..000000000 --- a/tests/app/celery/test_ftp_update_tasks.py +++ /dev/null @@ -1,328 +0,0 @@ -import os -from collections import defaultdict, namedtuple -from datetime import date, datetime - -import pytest -from flask import current_app -from freezegun import freeze_time - -from app.celery.tasks import ( - check_billable_units, - get_local_billing_date_from_filename, - persist_daily_sorted_letter_counts, - process_updates_from_file, - record_daily_sorted_counts, - update_letter_notifications_statuses, - update_letter_notifications_to_error, - update_letter_notifications_to_sent_to_dvla, -) -from app.dao.daily_sorted_letter_dao import ( - dao_get_daily_sorted_letter_by_billing_day, -) -from app.exceptions import DVLAException, NotificationTechnicalFailureException -from app.models import ( - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_SENDING, - NOTIFICATION_TECHNICAL_FAILURE, - NOTIFICATION_TEMPORARY_FAILURE, - DailySortedLetter, - NotificationHistory, -) -from tests.app.db import ( - create_notification, - create_notification_history, - create_service_callback_api, -) -from tests.conftest import set_config - - -@pytest.fixture -def notification_update(): - """ - Returns a namedtuple to use as the argument for the check_billable_units function - """ - NotificationUpdate = namedtuple('NotificationUpdate', ['reference', 'status', 'page_count', 'cost_threshold']) - return NotificationUpdate('REFERENCE_ABC', 'sent', '1', 'cost') - - -def test_update_letter_notifications_statuses_raises_for_invalid_format(notify_api, mocker): - invalid_file = 'ref-foo|Sent|1|Unsorted\nref-bar|Sent|2' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=invalid_file) - - with pytest.raises(DVLAException) as e: - update_letter_notifications_statuses(filename='NOTIFY-20170823160812-RSP.TXT') - assert 'DVLA response file: {} has an invalid format'.format('NOTIFY-20170823160812-RSP.TXT') in str(e.value) - - -def test_update_letter_notification_statuses_when_notification_does_not_exist_updates_notification_history( - sample_letter_template, - mocker -): - valid_file = 'ref-foo|Sent|1|Unsorted' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - notification = create_notification_history(sample_letter_template, reference='ref-foo', status=NOTIFICATION_SENDING, - billable_units=1) - - update_letter_notifications_statuses(filename="NOTIFY-20170823160812-RSP.TXT") - - updated_history = NotificationHistory.query.filter_by(id=notification.id).one() - assert updated_history.status == NOTIFICATION_DELIVERED - - -def test_update_letter_notifications_statuses_raises_dvla_exception(notify_api, mocker, sample_letter_template): - valid_file = 'ref-foo|Failed|1|Unsorted' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - create_notification(sample_letter_template, reference='ref-foo', status=NOTIFICATION_SENDING, - billable_units=0) - - with pytest.raises(DVLAException) as e: - update_letter_notifications_statuses(filename="failed.txt") - failed = ["ref-foo"] - assert "DVLA response file: {filename} has failed letters with notification.reference {failures}".format( - filename="failed.txt", failures=failed - ) in str(e.value) - - -def test_update_letter_notifications_statuses_calls_with_correct_bucket_location(notify_api, mocker): - s3_mock = mocker.patch('app.celery.tasks.s3.get_s3_object') - - with set_config(notify_api, 'NOTIFY_EMAIL_DOMAIN', 'foo.bar'): - update_letter_notifications_statuses(filename='NOTIFY-20170823160812-RSP.TXT') - s3_mock.assert_called_with('{}-ftp'.format( - current_app.config['NOTIFY_EMAIL_DOMAIN']), - 'NOTIFY-20170823160812-RSP.TXT', - os.environ.get('AWS_ACCESS_KEY_ID'), - os.environ.get('AWS_SECRET_ACCESS_KEY'), - os.environ.get('AWS_REGION'), - ) - - -def test_update_letter_notifications_statuses_builds_updates_from_content(notify_api, mocker): - valid_file = 'ref-foo|Sent|1|Unsorted\nref-bar|Sent|2|Sorted' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - update_mock = mocker.patch('app.celery.tasks.process_updates_from_file') - - update_letter_notifications_statuses(filename='NOTIFY-20170823160812-RSP.TXT') - - update_mock.assert_called_with('ref-foo|Sent|1|Unsorted\nref-bar|Sent|2|Sorted') - - -def test_update_letter_notifications_statuses_builds_updates_list(notify_api, mocker): - valid_file = 'ref-foo|Sent|1|Unsorted\nref-bar|Sent|2|Sorted' - updates = process_updates_from_file(valid_file) - - assert len(updates) == 2 - - assert updates[0].reference == 'ref-foo' - assert updates[0].status == 'Sent' - assert updates[0].page_count == '1' - assert updates[0].cost_threshold == 'Unsorted' - - assert updates[1].reference == 'ref-bar' - assert updates[1].status == 'Sent' - assert updates[1].page_count == '2' - assert updates[1].cost_threshold == 'Sorted' - - -def test_update_letter_notifications_statuses_persisted(notify_api, mocker, sample_letter_template): - sent_letter = create_notification(sample_letter_template, reference='ref-foo', status=NOTIFICATION_SENDING, - billable_units=1) - failed_letter = create_notification(sample_letter_template, reference='ref-bar', status=NOTIFICATION_SENDING, - billable_units=2) - create_service_callback_api(service=sample_letter_template.service, url="https://original_url.com") - valid_file = '{}|Sent|1|Unsorted\n{}|Failed|2|Sorted'.format( - sent_letter.reference, failed_letter.reference) - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - - with pytest.raises(expected_exception=DVLAException) as e: - update_letter_notifications_statuses(filename='NOTIFY-20170823160812-RSP.TXT') - - assert sent_letter.status == NOTIFICATION_DELIVERED - assert sent_letter.billable_units == 1 - assert sent_letter.updated_at - assert failed_letter.status == NOTIFICATION_TEMPORARY_FAILURE - assert failed_letter.billable_units == 2 - assert failed_letter.updated_at - assert "DVLA response file: {filename} has failed letters with notification.reference {failures}".format( - filename="NOTIFY-20170823160812-RSP.TXT", failures=[format(failed_letter.reference)]) in str(e.value) - - -def test_update_letter_notifications_does_not_call_send_callback_if_no_db_entry(notify_api, mocker, - sample_letter_template): - sent_letter = create_notification(sample_letter_template, reference='ref-foo', status=NOTIFICATION_SENDING, - billable_units=0) - valid_file = '{}|Sent|1|Unsorted\n'.format(sent_letter.reference) - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - - send_mock = mocker.patch( - 'app.celery.service_callback_tasks.send_delivery_status_to_service.apply_async' - ) - - update_letter_notifications_statuses(filename='NOTIFY-20170823160812-RSP.TXT') - send_mock.assert_not_called() - - -def test_update_letter_notifications_to_sent_to_dvla_updates_based_on_notification_references( - client, - sample_letter_template -): - first = create_notification(sample_letter_template, reference='first ref') - second = create_notification(sample_letter_template, reference='second ref') - - dt = datetime.utcnow() - with freeze_time(dt): - update_letter_notifications_to_sent_to_dvla([first.reference]) - - assert first.status == NOTIFICATION_SENDING - assert first.sent_by == 'dvla' - assert first.sent_at == dt - assert first.updated_at == dt - assert second.status == NOTIFICATION_CREATED - - -def test_update_letter_notifications_to_error_updates_based_on_notification_references( - sample_letter_template -): - first = create_notification(sample_letter_template, reference='first ref') - second = create_notification(sample_letter_template, reference='second ref') - create_service_callback_api(service=sample_letter_template.service, url="https://original_url.com") - dt = datetime.utcnow() - with freeze_time(dt): - with pytest.raises(NotificationTechnicalFailureException) as e: - update_letter_notifications_to_error([first.reference]) - assert first.reference in str(e.value) - - assert first.status == NOTIFICATION_TECHNICAL_FAILURE - assert first.sent_by is None - assert first.sent_at is None - assert first.updated_at == dt - assert second.status == NOTIFICATION_CREATED - - -def test_check_billable_units_when_billable_units_matches_page_count( - client, - sample_letter_template, - mocker, - notification_update -): - mock_logger = mocker.patch('app.celery.tasks.current_app.logger.error') - - create_notification(sample_letter_template, reference='REFERENCE_ABC', billable_units=1) - - check_billable_units(notification_update) - - mock_logger.assert_not_called() - - -def test_check_billable_units_when_billable_units_does_not_match_page_count( - client, - sample_letter_template, - mocker, - notification_update -): - mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception') - - notification = create_notification(sample_letter_template, reference='REFERENCE_ABC', billable_units=3) - - check_billable_units(notification_update) - - mock_logger.assert_called_once_with( - 'Notification with id {} has 3 billable_units but DVLA says page count is 1'.format(notification.id) - ) - - -@pytest.mark.parametrize('filename_date, billing_date', [ - ('20170820000000', date(2017, 8, 19)), - ('20170120230000', date(2017, 1, 20)) -]) -def test_get_local_billing_date_from_filename(filename_date, billing_date): - filename = 'NOTIFY-{}-RSP.TXT'.format(filename_date) - result = get_local_billing_date_from_filename(filename) - - assert result == billing_date - - -@freeze_time("2018-01-11 09:00:00") -def test_persist_daily_sorted_letter_counts_saves_sorted_and_unsorted_values(client, notify_db_session): - letter_counts = defaultdict(int, **{'unsorted': 5, 'sorted': 1}) - persist_daily_sorted_letter_counts(date.today(), "test.txt", letter_counts) - day = dao_get_daily_sorted_letter_by_billing_day(date.today()) - - assert day.unsorted_count == 5 - assert day.sorted_count == 1 - - -def test_record_daily_sorted_counts_persists_daily_sorted_letter_count( - notify_api, - notify_db_session, - mocker, -): - valid_file = 'Letter1|Sent|1|uNsOrTeD\nLetter2|Sent|2|SORTED\nLetter3|Sent|2|Sorted' - - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - - assert DailySortedLetter.query.count() == 0 - - record_daily_sorted_counts(filename='NOTIFY-20170823160812-RSP.TXT') - - daily_sorted_counts = DailySortedLetter.query.all() - assert len(daily_sorted_counts) == 1 - assert daily_sorted_counts[0].sorted_count == 2 - assert daily_sorted_counts[0].unsorted_count == 1 - - -def test_record_daily_sorted_counts_raises_dvla_exception_with_unknown_sorted_status( - notify_api, - mocker, -): - file_contents = 'ref-foo|Failed|1|invalid\nrow_2|Failed|1|MM' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=file_contents) - filename = "failed.txt" - with pytest.raises(DVLAException) as e: - record_daily_sorted_counts(filename=filename) - - assert "DVLA response file: {} contains unknown Sorted status".format(filename) in e.value.message - assert "'mm'" in e.value.message - assert "'invalid'" in e.value.message - - -def test_record_daily_sorted_counts_persists_daily_sorted_letter_count_with_no_sorted_values( - notify_api, - mocker, - notify_db_session -): - valid_file = 'Letter1|Sent|1|Unsorted\nLetter2|Sent|2|Unsorted' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - - record_daily_sorted_counts(filename='NOTIFY-20170823160812-RSP.TXT') - - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(date(2017, 8, 23)) - - assert daily_sorted_letter.unsorted_count == 2 - assert daily_sorted_letter.sorted_count == 0 - - -def test_record_daily_sorted_counts_can_run_twice_for_same_file( - notify_api, - mocker, - notify_db_session -): - valid_file = 'Letter1|Sent|1|sorted\nLetter2|Sent|2|Unsorted' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=valid_file) - - record_daily_sorted_counts(filename='NOTIFY-20170823160812-RSP.TXT') - - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(date(2017, 8, 23)) - - assert daily_sorted_letter.unsorted_count == 1 - assert daily_sorted_letter.sorted_count == 1 - - updated_file = 'Letter1|Sent|1|sorted\nLetter2|Sent|2|Unsorted\nLetter3|Sent|2|Unsorted' - mocker.patch('app.celery.tasks.s3.get_s3_file', return_value=updated_file) - - record_daily_sorted_counts(filename='NOTIFY-20170823160812-RSP.TXT') - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(date(2017, 8, 23)) - - assert daily_sorted_letter.unsorted_count == 2 - assert daily_sorted_letter.sorted_count == 1 diff --git a/tests/app/celery/test_nightly_tasks.py b/tests/app/celery/test_nightly_tasks.py index 5849038cd..0599799e4 100644 --- a/tests/app/celery/test_nightly_tasks.py +++ b/tests/app/celery/test_nightly_tasks.py @@ -2,30 +2,20 @@ from datetime import date, datetime, timedelta from unittest.mock import ANY, call import pytest -import pytz -from flask import current_app from freezegun import freeze_time -from notifications_utils.clients.zendesk.zendesk_client import ( - NotifySupportTicket, -) from app.celery import nightly_tasks from app.celery.nightly_tasks import ( _delete_notifications_older_than_retention_by_type, delete_email_notifications_older_than_retention, delete_inbound_sms, - delete_letter_notifications_older_than_retention, delete_sms_notifications_older_than_retention, - get_letter_notifications_still_sending_when_they_shouldnt_be, - letter_raise_alert_if_no_ack_file_for_zip, - raise_alert_if_letter_notifications_still_sending, - remove_letter_csv_files, remove_sms_email_csv_files, s3, save_daily_notification_processing_time, timeout_notifications, ) -from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, FactProcessingTime +from app.models import EMAIL_TYPE, SMS_TYPE, FactProcessingTime from tests.app.db import ( create_job, create_notification, @@ -122,27 +112,6 @@ def test_will_remove_csv_files_for_jobs_older_than_retention_period( ], any_order=True) -@freeze_time('2017-01-01 10:00:00') -def test_remove_csv_files_filters_by_type(mocker, sample_service): - mocker.patch('app.celery.nightly_tasks.s3.remove_job_from_s3') - """ - Jobs older than seven days are deleted, but only two day's worth (two-day window) - """ - letter_template = create_template(service=sample_service, template_type=LETTER_TYPE) - sms_template = create_template(service=sample_service, template_type=SMS_TYPE) - - eight_days_ago = datetime.utcnow() - timedelta(days=8) - - job_to_delete = create_job(template=letter_template, created_at=eight_days_ago) - create_job(template=sms_template, created_at=eight_days_ago) - - remove_letter_csv_files() - - assert s3.remove_job_from_s3.call_args_list == [ - call(job_to_delete.service_id, job_to_delete.id), - ] - - def test_delete_sms_notifications_older_than_retention_calls_child_task(notify_api, mocker): mocked = mocker.patch('app.celery.nightly_tasks._delete_notifications_older_than_retention_by_type') delete_sms_notifications_older_than_retention() @@ -156,23 +125,6 @@ def test_delete_email_notifications_older_than_retentions_calls_child_task(notif mocked_notifications.assert_called_once_with('email') -def test_delete_letter_notifications_older_than_retention_calls_child_task(notify_api, mocker): - mocked = mocker.patch('app.celery.nightly_tasks._delete_notifications_older_than_retention_by_type') - delete_letter_notifications_older_than_retention() - mocked.assert_called_once_with('letter') - - -def test_should_not_update_status_of_letter_notifications(client, sample_letter_template): - created_at = datetime.utcnow() - timedelta(days=5) - not1 = create_notification(template=sample_letter_template, status='sending', created_at=created_at) - not2 = create_notification(template=sample_letter_template, status='created', created_at=created_at) - - timeout_notifications() - - assert not1.status == 'sending' - assert not2.status == 'created' - - @freeze_time("2021-12-13T10:00") def test_timeout_notifications(mocker, sample_notification): mock_update = mocker.patch('app.celery.nightly_tasks.check_and_queue_callback_task') @@ -195,181 +147,6 @@ def test_delete_inbound_sms_calls_child_task(notify_api, mocker): assert nightly_tasks.delete_inbound_sms_older_than_retention.call_count == 1 -def test_create_ticket_if_letter_notifications_still_sending(notify_api, mocker): - mock_get_letters = mocker.patch( - "app.celery.nightly_tasks.get_letter_notifications_still_sending_when_they_shouldnt_be" - ) - - mock_get_letters.return_value = 1, date(2018, 1, 15) - mock_create_ticket = mocker.spy(NotifySupportTicket, '__init__') - mock_send_ticket_to_zendesk = mocker.patch( - 'app.celery.nightly_tasks.zendesk_client.send_ticket_to_zendesk', - autospec=True, - ) - - raise_alert_if_letter_notifications_still_sending() - mock_create_ticket.assert_called_once_with( - ANY, - subject='[test] Letters still sending', - email_ccs=current_app.config['DVLA_EMAIL_ADDRESSES'], - message=( - "There are 1 letters in the 'sending' state from Monday 15 January. Resolve using " - "https://github.com/alphagov/notifications-manuals/wiki/Support-Runbook#deal-with-letters-still-in-sending" - ), - ticket_type='incident', - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - mock_send_ticket_to_zendesk.assert_called_once() - - -def test_dont_create_ticket_if_letter_notifications_not_still_sending(notify_api, mocker): - mock_get_letters = mocker.patch( - "app.celery.nightly_tasks.get_letter_notifications_still_sending_when_they_shouldnt_be" - ) - - mock_get_letters.return_value = 0, None - mock_send_ticket_to_zendesk = mocker.patch( - "app.celery.nightly_tasks.zendesk_client.send_ticket_to_zendesk", - autospec=True - ) - - raise_alert_if_letter_notifications_still_sending() - - mock_send_ticket_to_zendesk.assert_not_called() - - -@freeze_time("Thursday 17th January 2018 17:00") -def test_get_letter_notifications_still_sending_when_they_shouldnt_finds_no_letters_if_sent_a_day_ago( - sample_letter_template -): - today = datetime.utcnow() - one_day_ago = today - timedelta(days=1) - create_notification(template=sample_letter_template, status='sending', sent_at=one_day_ago) - - count, expected_sent_date = get_letter_notifications_still_sending_when_they_shouldnt_be() - assert count == 0 - - -@freeze_time("Thursday 17th January 2018 17:00") -def test_get_letter_notifications_still_sending_when_they_shouldnt_only_finds_letters_still_in_sending_status( - sample_letter_template -): - two_days_ago = datetime(2018, 1, 15, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=two_days_ago) - create_notification(template=sample_letter_template, status='delivered', sent_at=two_days_ago) - create_notification(template=sample_letter_template, status='failed', sent_at=two_days_ago) - - count, expected_sent_date = get_letter_notifications_still_sending_when_they_shouldnt_be() - assert count == 1 - assert expected_sent_date == date(2018, 1, 15) - - -@freeze_time("Thursday 17th January 2018 17:00") -def test_get_letter_notifications_still_sending_when_they_shouldnt_finds_letters_older_than_offset( - sample_letter_template -): - three_days_ago = datetime(2018, 1, 14, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=three_days_ago) - - count, expected_sent_date = get_letter_notifications_still_sending_when_they_shouldnt_be() - assert count == 1 - assert expected_sent_date == date(2018, 1, 15) - - -@freeze_time("Sunday 14th January 2018 17:00") -def test_get_letter_notifications_still_sending_when_they_shouldnt_be_finds_no_letters_on_weekend( - sample_letter_template -): - yesterday = datetime(2018, 1, 13, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=yesterday) - - count, expected_sent_date = get_letter_notifications_still_sending_when_they_shouldnt_be() - assert count == 0 - - -@freeze_time("Monday 15th January 2018 17:00") -def test_get_letter_notifications_still_sending_when_they_shouldnt_finds_thursday_letters_when_run_on_monday( - sample_letter_template -): - thursday = datetime(2018, 1, 11, 13, 30) - yesterday = datetime(2018, 1, 14, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=thursday, postage='first') - create_notification(template=sample_letter_template, status='sending', sent_at=thursday, postage='second') - create_notification(template=sample_letter_template, status='sending', sent_at=yesterday, postage='second') - - count, expected_sent_date = get_letter_notifications_still_sending_when_they_shouldnt_be() - assert count == 2 - assert expected_sent_date == date(2018, 1, 11) - - -@freeze_time("Tuesday 16th January 2018 17:00") -def test_get_letter_notifications_still_sending_when_they_shouldnt_finds_friday_letters_when_run_on_tuesday( - sample_letter_template -): - friday = datetime(2018, 1, 12, 13, 30) - yesterday = datetime(2018, 1, 14, 13, 30) - create_notification(template=sample_letter_template, status='sending', sent_at=friday, postage='first') - create_notification(template=sample_letter_template, status='sending', sent_at=friday, postage='second') - create_notification(template=sample_letter_template, status='sending', sent_at=yesterday, postage='first') - - count, expected_sent_date = get_letter_notifications_still_sending_when_they_shouldnt_be() - assert count == 2 - assert expected_sent_date == date(2018, 1, 12) - - -@freeze_time('2018-01-11T23:00:00') -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_letter_raise_alert_if_no_ack_file_for_zip_does_not_raise_when_files_match_zip_list(mocker, notify_db_session): - mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=mock_s3_get_list_match) - letter_raise_alert_if_no_ack_file_for_zip() - - yesterday = datetime.now(tz=pytz.utc) - timedelta(days=1) # Datatime format on AWS - subfoldername = datetime.utcnow().strftime('%Y-%m-%d') + '/zips_sent' - assert mock_file_list.call_count == 2 - assert mock_file_list.call_args_list == [ - call(bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], subfolder=subfoldername, suffix='.TXT'), - call(bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], subfolder='root/dispatch', - suffix='.ACK.txt', last_modified=yesterday), - ] - - -@freeze_time('2018-01-11T23:00:00') -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_letter_raise_alert_if_ack_files_not_match_zip_list(mocker, notify_db_session): - mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=mock_s3_get_list_diff) - mock_create_ticket = mocker.spy(NotifySupportTicket, '__init__') - mock_send_ticket_to_zendesk = mocker.patch( - 'app.celery.nightly_tasks.zendesk_client.send_ticket_to_zendesk', - autospec=True, - ) - - letter_raise_alert_if_no_ack_file_for_zip() - - assert mock_file_list.call_count == 2 - - mock_create_ticket.assert_called_once_with( - ANY, - subject="Letter acknowledge error", - message=ANY, - ticket_type='incident', - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - mock_send_ticket_to_zendesk.assert_called_once() - assert "['NOTIFY.2018-01-11175009', 'NOTIFY.2018-01-11175010']" in mock_create_ticket.call_args[1]['message'] - assert '2018-01-11/zips_sent' in mock_create_ticket.call_args[1]['message'] - - -@freeze_time('2018-01-11T23:00:00') -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_letter_not_raise_alert_if_no_files_do_not_cause_error(mocker, notify_db_session): - mock_file_list = mocker.patch("app.aws.s3.get_list_of_files_by_suffix", side_effect=None) - letter_raise_alert_if_no_ack_file_for_zip() - - assert mock_file_list.call_count == 2 - - @freeze_time('2021-01-18T02:00') @pytest.mark.parametrize('date_provided', [None, '2021-1-17']) def test_save_daily_notification_processing_time(mocker, sample_template, date_provided): diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index 5cdc55733..926c3d415 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -18,7 +18,6 @@ from app.models import ( KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, - LETTER_TYPE, NOTIFICATION_TYPES, SMS_TYPE, FactBilling, @@ -26,7 +25,6 @@ from app.models import ( Notification, ) from tests.app.db import ( - create_letter_rate, create_notification, create_notification_history, create_rate, @@ -36,11 +34,9 @@ from tests.app.db import ( def mocker_get_rate( - non_letter_rates, letter_rates, notification_type, local_date, crown=None, rate_multiplier=None, post_class="second" + non_letter_rates, notification_type, local_date, crown=None, rate_multiplier=None ): - if notification_type == LETTER_TYPE: - return Decimal(2.1) - elif notification_type == SMS_TYPE: + if notification_type == SMS_TYPE: return Decimal(1.33) elif notification_type == EMAIL_TYPE: return Decimal(0) @@ -87,10 +83,8 @@ def test_create_nightly_notification_status_triggers_tasks( @freeze_time('2019-08-01T00:30') @pytest.mark.parametrize('notification_date, expected_types_aggregated', [ ('2019-08-01', set()), - ('2019-07-31', {EMAIL_TYPE, SMS_TYPE, LETTER_TYPE}), - ('2019-07-28', {EMAIL_TYPE, SMS_TYPE, LETTER_TYPE}), - ('2019-07-27', {LETTER_TYPE}), - ('2019-07-22', {LETTER_TYPE}), + ('2019-07-31', {EMAIL_TYPE, SMS_TYPE}), + ('2019-07-28', {EMAIL_TYPE, SMS_TYPE}), ('2019-07-21', set()), ]) def test_create_nightly_notification_status_triggers_relevant_tasks( @@ -117,7 +111,7 @@ def test_create_nightly_notification_status_triggers_relevant_tasks( @pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_create_nightly_billing_for_day_checks_history( sample_service, - sample_letter_template, + sample_sms_template, mocker ): yesterday = datetime.now() - timedelta(days=1) @@ -125,13 +119,13 @@ def test_create_nightly_billing_for_day_checks_history( create_notification( created_at=yesterday, - template=sample_letter_template, + template=sample_sms_template, status='sending', ) create_notification_history( created_at=yesterday, - template=sample_letter_template, + template=sample_sms_template, status='delivered', ) @@ -143,7 +137,7 @@ def test_create_nightly_billing_for_day_checks_history( assert len(records) == 1 record = records[0] - assert record.notification_type == LETTER_TYPE + assert record.notification_type == SMS_TYPE assert record.notifications_sent == 2 @@ -291,116 +285,6 @@ def test_create_nightly_billing_for_day_different_sent_by( assert record.rate_multiplier == 1.0 -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -def test_create_nightly_billing_for_day_different_letter_postage( - notify_db_session, - sample_letter_template, - mocker -): - yesterday = datetime.now() - timedelta(days=1) - mocker.patch('app.dao.fact_billing_dao.get_rate', side_effect=mocker_get_rate) - - for _ in range(2): - create_notification( - created_at=yesterday, - template=sample_letter_template, - status='delivered', - sent_by='dvla', - billable_units=2, - postage='first' - ) - create_notification( - created_at=yesterday, - template=sample_letter_template, - status='delivered', - sent_by='dvla', - billable_units=2, - postage='second' - ) - create_notification( - created_at=yesterday, - template=sample_letter_template, - status='delivered', - sent_by='dvla', - billable_units=1, - postage='europe' - ) - create_notification( - created_at=yesterday, - template=sample_letter_template, - status='delivered', - sent_by='dvla', - billable_units=3, - postage='rest-of-world' - ) - - records = FactBilling.query.all() - assert len(records) == 0 - create_nightly_billing_for_day(str(yesterday.date())) - - records = FactBilling.query.order_by('postage').all() - assert len(records) == 4 - - assert records[0].notification_type == LETTER_TYPE - assert records[0].local_date == datetime.date(yesterday) - assert records[0].postage == 'europe' - assert records[0].notifications_sent == 1 - assert records[0].billable_units == 1 - - assert records[1].notification_type == LETTER_TYPE - assert records[1].local_date == datetime.date(yesterday) - assert records[1].postage == 'first' - assert records[1].notifications_sent == 2 - assert records[1].billable_units == 4 - - assert records[2].notification_type == LETTER_TYPE - assert records[2].local_date == datetime.date(yesterday) - assert records[2].postage == 'rest-of-world' - assert records[2].notifications_sent == 1 - assert records[2].billable_units == 3 - - assert records[3].notification_type == LETTER_TYPE - assert records[3].local_date == datetime.date(yesterday) - assert records[3].postage == 'second' - assert records[3].notifications_sent == 1 - assert records[3].billable_units == 2 - - -@pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") -def test_create_nightly_billing_for_day_letter( - sample_service, - sample_letter_template, - mocker -): - yesterday = datetime.now() - timedelta(days=1) - - mocker.patch('app.dao.fact_billing_dao.get_rate', side_effect=mocker_get_rate) - - create_notification( - created_at=yesterday, - template=sample_letter_template, - status='delivered', - sent_by='dvla', - international=False, - rate_multiplier=2.0, - billable_units=2, - ) - - records = FactBilling.query.all() - assert len(records) == 0 - create_nightly_billing_for_day(str(yesterday.date())) - - records = FactBilling.query.order_by('rate_multiplier').all() - assert len(records) == 1 - - record = records[0] - assert record.notification_type == LETTER_TYPE - assert record.local_date == datetime.date(yesterday) - assert record.rate == Decimal(2.1) - assert record.billable_units == 2 - assert record.rate_multiplier == 2.0 - - @pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_create_nightly_billing_for_day_null_sent_by_sms( sample_service, @@ -436,38 +320,16 @@ def test_create_nightly_billing_for_day_null_sent_by_sms( assert record.provider == 'unknown' -def test_get_rate_for_letter_latest(notify_db_session): - # letter rates should be passed into the get_rate function as a tuple of start_date, crown, sheet_count, - # rate and post_class - new = create_letter_rate(datetime(2017, 12, 1), crown=True, sheet_count=1, rate=0.33, post_class='second') - old = create_letter_rate(datetime(2016, 12, 1), crown=True, sheet_count=1, rate=0.30, post_class='second') - letter_rates = [new, old] - - rate = get_rate([], letter_rates, LETTER_TYPE, date(2018, 1, 1), True, 1) - assert rate == Decimal('0.33') - - -def test_get_rate_for_letter_latest_if_crown_is_none(notify_db_session): - # letter rates should be passed into the get_rate function as a tuple of start_date, crown, sheet_count, - # rate and post_class - crown = create_letter_rate(datetime(2017, 12, 1), crown=True, sheet_count=1, rate=0.33, post_class='second') - non_crown = create_letter_rate(datetime(2017, 12, 1), crown=False, sheet_count=1, rate=0.35, post_class='second') - letter_rates = [crown, non_crown] - - rate = get_rate([], letter_rates, LETTER_TYPE, date(2018, 1, 1), crown=None, letter_page_count=1) - assert rate == Decimal('0.33') - - def test_get_rate_for_sms_and_email(notify_db_session): non_letter_rates = [ create_rate(datetime(2017, 12, 1), 0.15, SMS_TYPE), create_rate(datetime(2017, 12, 1), 0, EMAIL_TYPE) ] - rate = get_rate(non_letter_rates, [], SMS_TYPE, date(2018, 1, 1)) + rate = get_rate(non_letter_rates, SMS_TYPE, date(2018, 1, 1)) assert rate == Decimal(0.15) - rate = get_rate(non_letter_rates, [], EMAIL_TYPE, date(2018, 1, 1)) + rate = get_rate(non_letter_rates, EMAIL_TYPE, date(2018, 1, 1)) assert rate == Decimal(0) @@ -568,7 +430,6 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio first_template = create_template(service=first_service) second_service = create_service(service_name='second Service') second_template = create_template(service=second_service, template_type='email') - third_template = create_template(service=second_service, template_type='letter') process_day = date.today() - timedelta(days=5) with freeze_time(datetime.combine(process_day, time.max)): @@ -576,25 +437,23 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio create_notification(template=second_template, status='temporary-failure') # team API key notifications are included - create_notification(template=third_template, status='sending', key_type=KEY_TYPE_TEAM) + create_notification(template=second_template, status='sending', key_type=KEY_TYPE_TEAM) # test notifications are ignored - create_notification(template=third_template, status='sending', key_type=KEY_TYPE_TEST) + create_notification(template=second_template, status='sending', key_type=KEY_TYPE_TEST) # historical notifications are included - create_notification_history(template=third_template, status='delivered') + create_notification_history(template=second_template, status='delivered') # these created notifications from a different day get ignored with freeze_time(datetime.combine(date.today() - timedelta(days=4), time.max)): create_notification(template=first_template) create_notification_history(template=second_template) - create_notification(template=third_template) assert len(FactNotificationStatus.query.all()) == 0 create_nightly_notification_status_for_service_and_day(str(process_day), first_service.id, 'sms') create_nightly_notification_status_for_service_and_day(str(process_day), second_service.id, 'email') - create_nightly_notification_status_for_service_and_day(str(process_day), second_service.id, 'letter') new_fact_data = FactNotificationStatus.query.order_by( FactNotificationStatus.notification_type, @@ -603,7 +462,23 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio assert len(new_fact_data) == 4 - email_failure_row = new_fact_data[0] + email_delivered_row = new_fact_data[0] + assert email_delivered_row.template_id == second_template.id + assert email_delivered_row.service_id == second_service.id + assert email_delivered_row.notification_type == 'email' + assert email_delivered_row.notification_status == 'delivered' + assert email_delivered_row.notification_count == 1 + assert email_delivered_row.key_type == KEY_TYPE_NORMAL + + email_sending_row = new_fact_data[1] + assert email_sending_row.template_id == second_template.id + assert email_sending_row.service_id == second_service.id + assert email_sending_row.notification_type == 'email' + assert email_sending_row.notification_status == 'sending' + assert email_sending_row.notification_count == 1 + assert email_sending_row.key_type == KEY_TYPE_TEAM + + email_failure_row = new_fact_data[2] assert email_failure_row.local_date == process_day assert email_failure_row.template_id == second_template.id assert email_failure_row.service_id == second_service.id @@ -613,22 +488,6 @@ def test_create_nightly_notification_status_for_service_and_day(notify_db_sessio assert email_failure_row.notification_count == 1 assert email_failure_row.key_type == KEY_TYPE_NORMAL - letter_delivered_row = new_fact_data[1] - assert letter_delivered_row.template_id == third_template.id - assert letter_delivered_row.service_id == second_service.id - assert letter_delivered_row.notification_type == 'letter' - assert letter_delivered_row.notification_status == 'delivered' - assert letter_delivered_row.notification_count == 1 - assert letter_delivered_row.key_type == KEY_TYPE_NORMAL - - letter_sending_row = new_fact_data[2] - assert letter_sending_row.template_id == third_template.id - assert letter_sending_row.service_id == second_service.id - assert letter_sending_row.notification_type == 'letter' - assert letter_sending_row.notification_status == 'sending' - assert letter_sending_row.notification_count == 1 - assert letter_sending_row.key_type == KEY_TYPE_TEAM - sms_delivered_row = new_fact_data[3] assert sms_delivered_row.template_id == first_template.id assert sms_delivered_row.service_id == first_service.id diff --git a/tests/app/celery/test_research_mode_tasks.py b/tests/app/celery/test_research_mode_tasks.py index 996b779e2..5bc401d54 100644 --- a/tests/app/celery/test_research_mode_tasks.py +++ b/tests/app/celery/test_research_mode_tasks.py @@ -1,21 +1,18 @@ import uuid -from unittest.mock import ANY, call +from unittest.mock import ANY import pytest -import requests_mock -from flask import current_app, json -from freezegun import freeze_time +from flask import json from app.celery.research_mode_tasks import ( HTTPError, - create_fake_letter_response_file, send_email_response, send_sms_response, ses_notification_callback, sns_callback, ) from app.config import QueueNames -from tests.conftest import Matcher, set_config_values +from tests.conftest import Matcher dvla_response_file_matcher = Matcher( 'dvla_response_file', @@ -96,114 +93,3 @@ def test_temp_failure_sns_callback(): assert data['status'] == "4" assert data['reference'] == "sns_reference" assert data['CID'] == "1234" - - -@freeze_time("2018-01-25 14:00:30") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_create_fake_letter_response_file_uploads_response_file_s3( - notify_api, mocker): - mocker.patch('app.celery.research_mode_tasks.file_exists', return_value=False) - mock_s3upload = mocker.patch('app.celery.research_mode_tasks.s3upload') - - with requests_mock.Mocker() as request_mock: - request_mock.post( - 'http://localhost:6011/notifications/letter/dvla', - content=b'{}', - status_code=200 - ) - - create_fake_letter_response_file('random-ref') - - mock_s3upload.assert_called_once_with( - filedata='random-ref|Sent|0|Sorted', - region=current_app.config['AWS_REGION'], - bucket_name=current_app.config['DVLA_RESPONSE_BUCKET_NAME'], - file_location=dvla_response_file_matcher - ) - - -@freeze_time("2018-01-25 14:00:30") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_create_fake_letter_response_file_calls_dvla_callback_on_development( - notify_api, mocker): - mocker.patch('app.celery.research_mode_tasks.file_exists', return_value=False) - mocker.patch('app.celery.research_mode_tasks.s3upload') - - with set_config_values(notify_api, { - 'NOTIFY_ENVIRONMENT': 'development' - }): - with requests_mock.Mocker() as request_mock: - request_mock.post( - 'http://localhost:6011/notifications/letter/dvla', - content=b'{}', - status_code=200 - ) - - create_fake_letter_response_file('random-ref') - - assert request_mock.last_request.json() == { - "Type": "Notification", - "MessageId": "some-message-id", - "Message": ANY - } - assert json.loads(request_mock.last_request.json()['Message']) == { - "Records": [ - { - "s3": { - "object": { - "key": dvla_response_file_matcher - } - } - } - ] - } - - -@freeze_time("2018-01-25 14:00:30") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_create_fake_letter_response_file_does_not_call_dvla_callback_on_preview( - notify_api, mocker): - mocker.patch('app.celery.research_mode_tasks.file_exists', return_value=False) - mocker.patch('app.celery.research_mode_tasks.s3upload') - - with set_config_values(notify_api, { - 'NOTIFY_ENVIRONMENT': 'preview' - }): - with requests_mock.Mocker() as request_mock: - create_fake_letter_response_file('random-ref') - - assert request_mock.last_request is None - - -@freeze_time("2018-01-25 14:00:30") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_create_fake_letter_response_file_tries_to_create_files_with_other_filenames(notify_api, mocker): - mock_file_exists = mocker.patch('app.celery.research_mode_tasks.file_exists', side_effect=[True, True, False]) - mock_s3upload = mocker.patch('app.celery.research_mode_tasks.s3upload') - - create_fake_letter_response_file('random-ref') - - assert mock_file_exists.mock_calls == [ - call('test.notify.com-ftp', dvla_response_file_matcher), - call('test.notify.com-ftp', dvla_response_file_matcher), - call('test.notify.com-ftp', dvla_response_file_matcher), - ] - mock_s3upload.assert_called_once_with( - filedata=ANY, - region=ANY, - bucket_name=ANY, - file_location=dvla_response_file_matcher - ) - - -@freeze_time("2018-01-25 14:00:30") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_create_fake_letter_response_file_gives_up_after_thirty_times(notify_api, mocker): - mock_file_exists = mocker.patch('app.celery.research_mode_tasks.file_exists', return_value=True) - mock_s3upload = mocker.patch('app.celery.research_mode_tasks.s3upload') - - with pytest.raises(ValueError): - create_fake_letter_response_file('random-ref') - - assert len(mock_file_exists.mock_calls) == 30 - assert not mock_s3upload.called diff --git a/tests/app/celery/test_scheduled_tasks.py b/tests/app/celery/test_scheduled_tasks.py index 55298af2c..c9b66a3d3 100644 --- a/tests/app/celery/test_scheduled_tasks.py +++ b/tests/app/celery/test_scheduled_tasks.py @@ -4,7 +4,6 @@ from unittest import mock from unittest.mock import ANY, call import pytest -from freezegun import freeze_time from notifications_utils.clients.zendesk.zendesk_client import ( NotifySupportTicket, ) @@ -13,23 +12,19 @@ from app.celery import scheduled_tasks from app.celery.scheduled_tasks import ( check_for_missing_rows_in_completed_jobs, check_for_services_with_high_failure_rates_or_sending_to_tv_numbers, - check_if_letters_still_in_created, - check_if_letters_still_pending_virus_check, check_job_status, delete_invitations, delete_verify_codes, replay_created_notifications, run_scheduled_jobs, ) -from app.config import QueueNames, TaskNames, Test +from app.config import QueueNames, Test from app.dao.jobs_dao import dao_get_job_by_id from app.models import ( JOB_STATUS_ERROR, JOB_STATUS_FINISHED, JOB_STATUS_IN_PROGRESS, JOB_STATUS_PENDING, - NOTIFICATION_DELIVERED, - NOTIFICATION_PENDING_VIRUS_CHECK, ) from tests.app import load_example_csv from tests.app.db import create_job, create_notification, create_template @@ -259,28 +254,6 @@ def test_replay_created_notifications(notify_db_session, sample_service, mocker) queue="send-sms-tasks") -def test_replay_created_notifications_get_pdf_for_templated_letter_tasks_for_letters_not_ready_to_send( - sample_letter_template, mocker -): - mock_task = mocker.patch('app.celery.scheduled_tasks.get_pdf_for_templated_letter.apply_async') - create_notification(template=sample_letter_template, billable_units=0, - created_at=datetime.utcnow() - timedelta(hours=4)) - - create_notification(template=sample_letter_template, billable_units=0, - created_at=datetime.utcnow() - timedelta(minutes=20)) - notification_1 = create_notification(template=sample_letter_template, billable_units=0, - created_at=datetime.utcnow() - timedelta(hours=1, minutes=20)) - notification_2 = create_notification(template=sample_letter_template, billable_units=0, - created_at=datetime.utcnow() - timedelta(hours=5)) - - replay_created_notifications() - - calls = [call([str(notification_1.id)], queue=QueueNames.CREATE_LETTERS_PDF), - call([str(notification_2.id)], queue=QueueNames.CREATE_LETTERS_PDF), - ] - mock_task.assert_has_calls(calls, any_order=True) - - def test_check_job_status_task_does_not_raise_error(sample_template): create_job( template=sample_template, @@ -299,159 +272,6 @@ def test_check_job_status_task_does_not_raise_error(sample_template): check_job_status() -@freeze_time("2019-05-30 14:00:00") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_check_if_letters_still_pending_virus_check_restarts_scan_for_stuck_letters( - mocker, - sample_letter_template -): - mock_file_exists = mocker.patch('app.aws.s3.file_exists', return_value=True) - mock_create_ticket = mocker.spy(NotifySupportTicket, '__init__') - mock_celery = mocker.patch('app.celery.scheduled_tasks.notify_celery.send_task') - - create_notification( - template=sample_letter_template, - status=NOTIFICATION_PENDING_VIRUS_CHECK, - created_at=datetime.utcnow() - timedelta(seconds=5401), - reference='one' - ) - expected_filename = 'NOTIFY.ONE.D.2.C.20190530122959.PDF' - - check_if_letters_still_pending_virus_check() - - mock_file_exists.assert_called_once_with('test-letters-scan', expected_filename) - - mock_celery.assert_called_once_with( - name=TaskNames.SCAN_FILE, - kwargs={'filename': expected_filename}, - queue=QueueNames.ANTIVIRUS - ) - - assert mock_create_ticket.called is False - - -@freeze_time("2019-05-30 14:00:00") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_check_if_letters_still_pending_virus_check_raises_zendesk_if_files_cant_be_found( - mocker, - sample_letter_template -): - mock_file_exists = mocker.patch('app.aws.s3.file_exists', return_value=False) - mock_create_ticket = mocker.spy(NotifySupportTicket, '__init__') - mock_celery = mocker.patch('app.celery.scheduled_tasks.notify_celery.send_task') - mock_send_ticket_to_zendesk = mocker.patch( - 'app.celery.scheduled_tasks.zendesk_client.send_ticket_to_zendesk', - autospec=True, - ) - - create_notification(template=sample_letter_template, - status=NOTIFICATION_PENDING_VIRUS_CHECK, - created_at=datetime.utcnow() - timedelta(seconds=5400)) - create_notification(template=sample_letter_template, - status=NOTIFICATION_DELIVERED, - created_at=datetime.utcnow() - timedelta(seconds=6000)) - notification_1 = create_notification(template=sample_letter_template, - status=NOTIFICATION_PENDING_VIRUS_CHECK, - created_at=datetime.utcnow() - timedelta(seconds=5401), - reference='one') - notification_2 = create_notification(template=sample_letter_template, - status=NOTIFICATION_PENDING_VIRUS_CHECK, - created_at=datetime.utcnow() - timedelta(seconds=70000), - reference='two') - - check_if_letters_still_pending_virus_check() - - assert mock_file_exists.call_count == 2 - mock_file_exists.assert_has_calls([ - call('test-letters-scan', 'NOTIFY.ONE.D.2.C.20190530122959.PDF'), - call('test-letters-scan', 'NOTIFY.TWO.D.2.C.20190529183320.PDF'), - ], any_order=True) - assert mock_celery.called is False - - mock_create_ticket.assert_called_once_with( - ANY, - subject='[test] Letters still pending virus check', - message=ANY, - ticket_type='incident', - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - assert '2 precompiled letters have been pending-virus-check' in mock_create_ticket.call_args.kwargs['message'] - assert f'{(str(notification_1.id), notification_1.reference)}' in mock_create_ticket.call_args.kwargs['message'] - assert f'{(str(notification_2.id), notification_2.reference)}' in mock_create_ticket.call_args.kwargs['message'] - mock_send_ticket_to_zendesk.assert_called_once() - - -@freeze_time("2019-05-30 14:00:00") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_check_if_letters_still_in_created_during_bst(mocker, sample_letter_template): - mock_logger = mocker.patch('app.celery.tasks.current_app.logger.error') - mock_create_ticket = mocker.spy(NotifySupportTicket, '__init__') - mock_send_ticket_to_zendesk = mocker.patch( - 'app.celery.scheduled_tasks.zendesk_client.send_ticket_to_zendesk', - autospec=True, - ) - - create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 1, 12, 0)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 29, 16, 29)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 29, 16, 30)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 29, 17, 29)) - create_notification(template=sample_letter_template, status='delivered', created_at=datetime(2019, 5, 28, 10, 0)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 5, 30, 10, 0)) - - check_if_letters_still_in_created() - - message = "2 letters were created before 17.30 yesterday and still have 'created' status. " \ - "Follow runbook to resolve: " \ - "https://github.com/alphagov/notifications-manuals/wiki/Support-Runbook#deal-with-Letters-still-in-created." - - mock_logger.assert_called_once_with(message) - mock_create_ticket.assert_called_with( - ANY, - message=message, - subject="[test] Letters still in 'created' status", - ticket_type='incident', - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - mock_send_ticket_to_zendesk.assert_called_once() - - -@freeze_time("2019-01-30 14:00:00") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_check_if_letters_still_in_created_during_utc(mocker, sample_letter_template): - mock_logger = mocker.patch('app.celery.tasks.current_app.logger.error') - mock_create_ticket = mocker.spy(NotifySupportTicket, '__init__') - mock_send_ticket_to_zendesk = mocker.patch( - 'app.celery.scheduled_tasks.zendesk_client.send_ticket_to_zendesk', - autospec=True, - ) - - create_notification(template=sample_letter_template, created_at=datetime(2018, 12, 1, 12, 0)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 29, 17, 29)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 29, 17, 30)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 29, 18, 29)) - create_notification(template=sample_letter_template, status='delivered', created_at=datetime(2019, 1, 29, 10, 0)) - create_notification(template=sample_letter_template, created_at=datetime(2019, 1, 30, 10, 0)) - - check_if_letters_still_in_created() - - message = "2 letters were created before 17.30 yesterday and still have 'created' status. " \ - "Follow runbook to resolve: " \ - "https://github.com/alphagov/notifications-manuals/wiki/Support-Runbook#deal-with-Letters-still-in-created." - - mock_logger.assert_called_once_with(message) - mock_create_ticket.assert_called_once_with( - ANY, - message=message, - subject="[test] Letters still in 'created' status", - ticket_type='incident', - technical_ticket=True, - ticket_categories=['notify_letters'] - ) - mock_send_ticket_to_zendesk.assert_called_once() - - @pytest.mark.parametrize('offset', ( timedelta(days=1), pytest.param(timedelta(hours=23, minutes=59), marks=pytest.mark.xfail), diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index ee8d7ccf7..65b9c2335 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -9,7 +9,6 @@ from celery.exceptions import Retry from freezegun import freeze_time from notifications_utils.recipients import Row from notifications_utils.template import ( - LetterPrintTemplate, PlainTextEmailTemplate, SMSMessageTemplate, ) @@ -23,13 +22,11 @@ from app.celery.tasks import ( process_incomplete_job, process_incomplete_jobs, process_job, - process_returned_letters_list, process_row, s3, save_api_email, save_api_sms, save_email, - save_letter, save_sms, send_inbound_sms_to_service, ) @@ -41,13 +38,10 @@ from app.models import ( JOB_STATUS_FINISHED, JOB_STATUS_IN_PROGRESS, KEY_TYPE_NORMAL, - LETTER_TYPE, NOTIFICATION_CREATED, SMS_TYPE, Job, Notification, - NotificationHistory, - ReturnedLetter, ) from app.serialised_models import SerialisedService, SerialisedTemplate from app.utils import DATETIME_FORMAT @@ -57,9 +51,7 @@ from tests.app.db import ( create_api_key, create_inbound_sms, create_job, - create_letter_contact, create_notification, - create_notification_history, create_reply_to_email, create_service, create_service_inbound_api, @@ -67,7 +59,6 @@ from tests.app.db import ( create_template, create_user, ) -from tests.conftest import set_config_values class AnyStringWith(str): @@ -91,7 +82,6 @@ def test_should_have_decorated_tasks_functions(): assert process_job.__wrapped__.__name__ == 'process_job' assert save_sms.__wrapped__.__name__ == 'save_sms' assert save_email.__wrapped__.__name__ == 'save_email' - assert save_letter.__wrapped__.__name__ == 'save_letter' @pytest.fixture @@ -298,41 +288,6 @@ def test_should_process_email_job_with_sender_id(email_job_with_placeholders, mo ) -@freeze_time("2016-01-01 11:09:00.061258") -def test_should_process_letter_job(sample_letter_job, mocker): - csv = """address_line_1,address_line_2,address_line_3,address_line_4,postcode,name - A1,A2,A3,A4,A_POST,Alice - """ - s3_mock = mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', - return_value=(csv, {"sender_id": None})) - process_row_mock = mocker.patch('app.celery.tasks.process_row') - mocker.patch('app.celery.tasks.create_uuid', return_value="uuid") - - process_job(sample_letter_job.id) - - s3_mock.assert_called_once_with( - service_id=str(sample_letter_job.service.id), - job_id=str(sample_letter_job.id) - ) - - row_call = process_row_mock.mock_calls[0][1] - assert row_call[0].index == 0 - assert row_call[0].recipient == ['A1', 'A2', 'A3', 'A4', None, None, 'A_POST', None] - assert row_call[0].personalisation == { - 'addressline1': 'A1', - 'addressline2': 'A2', - 'addressline3': 'A3', - 'addressline4': 'A4', - 'postcode': 'A_POST' - } - assert row_call[2] == sample_letter_job - assert row_call[3] == sample_letter_job.service - - assert process_row_mock.call_count == 1 - - assert sample_letter_job.job_status == 'finished' - - def test_should_process_all_sms_job(sample_job_with_placeholdered_template, mocker): mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', @@ -365,8 +320,6 @@ def test_should_process_all_sms_job(sample_job_with_placeholdered_template, (SMS_TYPE, True, 'save_sms', 'research-mode-tasks'), (EMAIL_TYPE, False, 'save_email', 'database-tasks'), (EMAIL_TYPE, True, 'save_email', 'research-mode-tasks'), - (LETTER_TYPE, False, 'save_letter', 'database-tasks'), - (LETTER_TYPE, True, 'save_letter', 'research-mode-tasks'), ]) def test_process_row_sends_letter_task(template_type, research_mode, expected_function, expected_queue, mocker): mocker.patch('app.celery.tasks.create_uuid', return_value='noti_uuid') @@ -930,279 +883,6 @@ def test_save_sms_does_not_send_duplicate_and_does_not_put_in_retry_queue(sample assert not retry.called -@pytest.mark.parametrize('personalisation, expected_to, expected_normalised', ( - ({ - 'addressline1': 'Foo', - 'addressline2': 'Bar', - 'addressline3': 'Baz', - 'addressline4': 'Wibble', - 'addressline5': 'Wobble', - 'addressline6': 'Wubble', - 'postcode': 'SE1 2SA', - }, ( - 'Foo\n' - 'Bar\n' - 'Baz\n' - 'Wibble\n' - 'Wobble\n' - 'Wubble\n' - 'SE1 2SA' - ), ( - 'foobarbazwibblewobblewubblese12sa' - )), - ({ - # The address isn’t normalised when we store it in the - # `personalisation` column, but is normalised for storing in the - # `to` column - 'addressline2': ' Foo ', - 'addressline4': 'Bar', - 'addressline6': 'se12sa', - }, ( - 'Foo\n' - 'Bar\n' - 'SE1 2SA' - ), ( - 'foobarse12sa' - )), -)) -def test_save_letter_saves_letter_to_database( - mocker, - notify_db_session, - personalisation, - expected_to, - expected_normalised, -): - service = create_service() - contact_block = create_letter_contact(service=service, contact_block="Address contact", is_default=True) - template = create_template(service=service, template_type=LETTER_TYPE, reply_to=contact_block.id) - job = create_job(template=template) - - mocker.patch('app.celery.tasks.create_random_identifier', return_value="this-is-random-in-real-life") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - notification_json = _notification_json( - template=job.template, - to='This is ignored for letters', - personalisation=personalisation, - job_id=job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - created_at = datetime.utcnow() - - save_letter( - job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.to == expected_to - assert notification_db.normalised_to == expected_normalised - assert notification_db.job_id == job.id - assert notification_db.template_id == job.template.id - assert notification_db.template_version == job.template.version - assert notification_db.status == 'created' - assert notification_db.created_at >= created_at - assert notification_db.notification_type == 'letter' - assert notification_db.sent_at is None - assert notification_db.sent_by is None - assert notification_db.personalisation == personalisation - assert notification_db.reference == "this-is-random-in-real-life" - assert notification_db.reply_to_text == contact_block.contact_block - - -@pytest.mark.parametrize('last_line_of_address, postage, expected_postage, expected_international', - [('SW1 1AA', 'first', 'first', False), - ('SW1 1AA', 'second', 'second', False), - ('New Zealand', 'second', 'rest-of-world', True), - ('France', 'first', 'europe', True)]) -def test_save_letter_saves_letter_to_database_with_correct_postage( - mocker, notify_db_session, last_line_of_address, postage, expected_postage, expected_international -): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service=service, template_type=LETTER_TYPE, postage=postage) - letter_job = create_job(template=template) - - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - notification_json = _notification_json( - template=letter_job.template, - to='Foo', - personalisation={'addressline1': 'Foo', 'addressline2': 'Bar', 'postcode': last_line_of_address}, - job_id=letter_job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - save_letter( - letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.postage == expected_postage - assert notification_db.international == expected_international - - -@pytest.mark.parametrize('reference_paceholder,', [None, 'ref2']) -def test_save_letter_saves_letter_to_database_with_correct_client_reference( - mocker, notify_db_session, reference_paceholder -): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service=service, template_type=LETTER_TYPE) - letter_job = create_job(template=template) - - personalisation = {'addressline1': 'Foo', 'addressline2': 'Bar', 'postcode': 'SW1A 1AA'} - if reference_paceholder: - personalisation['reference'] = reference_paceholder - - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - notification_json = _notification_json( - template=letter_job.template, - to='Foo', - personalisation=personalisation, - job_id=letter_job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - save_letter( - letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.client_reference == reference_paceholder - - -def test_save_letter_saves_letter_to_database_with_formatted_postcode(mocker, notify_db_session): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service=service, template_type=LETTER_TYPE) - letter_job = create_job(template=template) - - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - notification_json = _notification_json( - template=letter_job.template, - to='Foo', - personalisation={'addressline1': 'Foo', 'addressline2': 'Bar', 'postcode': 'se1 64sa'}, - job_id=letter_job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - save_letter( - letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.personalisation["postcode"] == "se1 64sa" - - -def test_save_letter_saves_letter_to_database_right_reply_to(mocker, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Address contact", is_default=True) - template = create_template(service=service, template_type=LETTER_TYPE, reply_to=None) - job = create_job(template=template) - - mocker.patch('app.celery.tasks.create_random_identifier', return_value="this-is-random-in-real-life") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - personalisation = { - 'addressline1': 'Foo', - 'addressline2': 'Bar', - 'addressline3': 'Baz', - 'addressline4': 'Wibble', - 'addressline5': 'Wobble', - 'addressline6': 'Wubble', - 'postcode': 'SE1 3WS', - } - notification_json = _notification_json( - template=job.template, - to='Foo', - personalisation=personalisation, - job_id=job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - created_at = datetime.utcnow() - - save_letter( - job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - notification_db = Notification.query.one() - assert notification_db.id == notification_id - assert notification_db.to == ( - 'Foo\n' - 'Bar\n' - 'Baz\n' - 'Wibble\n' - 'Wobble\n' - 'Wubble\n' - 'SE1 3WS' - ) - assert notification_db.job_id == job.id - assert notification_db.template_id == job.template.id - assert notification_db.template_version == job.template.version - assert notification_db.status == 'created' - assert notification_db.created_at >= created_at - assert notification_db.notification_type == 'letter' - assert notification_db.sent_at is None - assert notification_db.sent_by is None - assert notification_db.personalisation == personalisation - assert notification_db.reference == "this-is-random-in-real-life" - assert not notification_db.reply_to_text - - -def test_save_letter_uses_template_reply_to_text(mocker, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block="Address contact", is_default=True) - template_contact = create_letter_contact( - service=service, - contact_block="Template address contact", - is_default=False - ) - template = create_template( - service=service, - template_type=LETTER_TYPE, - reply_to=template_contact.id - ) - - job = create_job(template=template) - - mocker.patch('app.celery.tasks.create_random_identifier', return_value="this-is-random-in-real-life") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - personalisation = { - 'addressline1': 'Foo', - 'addressline2': 'Bar', - 'postcode': 'Flob', - } - notification_json = _notification_json( - template=job.template, - to='Foo', - personalisation=personalisation, - job_id=job.id, - row_number=1 - ) - - save_letter( - job.service_id, - uuid.uuid4(), - encryption.encrypt(notification_json), - ) - - notification_db = Notification.query.one() - assert notification_db.reply_to_text == "Template address contact" - - def test_save_sms_uses_sms_sender_reply_to_text(mocker, notify_db_session): service = create_service_with_defined_sms_sender(sms_sender_value='2028675309') template = create_template(service=service) @@ -1241,115 +921,6 @@ def test_save_sms_uses_non_default_sms_sender_reply_to_text_if_provided(mocker, assert persisted_notification.reply_to_text == 'new-sender' -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -@pytest.mark.parametrize('env', ['staging', 'live']) -def test_save_letter_sets_delivered_letters_as_pdf_permission_in_research_mode_in_staging_live( - notify_api, mocker, notify_db_session, sample_letter_job, env): - sample_letter_job.service.research_mode = True - sample_reference = "this-is-random-in-real-life" - mock_create_fake_letter_response_file = mocker.patch( - 'app.celery.research_mode_tasks.create_fake_letter_response_file.apply_async') - mocker.patch('app.celery.tasks.create_random_identifier', return_value=sample_reference) - - personalisation = { - 'addressline1': 'Foo', - 'addressline2': 'Bar', - 'postcode': 'Flob', - } - notification_json = _notification_json( - template=sample_letter_job.template, - to='Foo', - personalisation=personalisation, - job_id=sample_letter_job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - - with set_config_values(notify_api, { - 'NOTIFY_ENVIRONMENT': env - }): - save_letter( - sample_letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - notification = Notification.query.filter(Notification.id == notification_id).one() - assert notification.status == 'delivered' - assert not mock_create_fake_letter_response_file.called - - -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -@pytest.mark.parametrize('env', ['development', 'preview']) -def test_save_letter_calls_create_fake_response_for_letters_in_research_mode_on_development_preview( - notify_api, mocker, notify_db_session, sample_letter_job, env): - sample_letter_job.service.research_mode = True - sample_reference = "this-is-random-in-real-life" - mock_create_fake_letter_response_file = mocker.patch( - 'app.celery.research_mode_tasks.create_fake_letter_response_file.apply_async') - mocker.patch('app.celery.tasks.create_random_identifier', return_value=sample_reference) - - personalisation = { - 'addressline1': 'Foo', - 'addressline2': 'Bar', - 'postcode': 'Flob', - } - notification_json = _notification_json( - template=sample_letter_job.template, - to='Foo', - personalisation=personalisation, - job_id=sample_letter_job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - - with set_config_values(notify_api, { - 'NOTIFY_ENVIRONMENT': env - }): - save_letter( - sample_letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - mock_create_fake_letter_response_file.assert_called_once_with( - (sample_reference,), - queue=QueueNames.RESEARCH_MODE - ) - - -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -def test_save_letter_calls_get_pdf_for_templated_letter_task_not_in_research( - mocker, notify_db_session, sample_letter_job): - mock_create_letters_pdf = mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - personalisation = { - 'addressline1': 'Foo', - 'addressline2': 'Bar', - 'postcode': 'Flob', - } - notification_json = _notification_json( - template=sample_letter_job.template, - to='Foo', - personalisation=personalisation, - job_id=sample_letter_job.id, - row_number=1 - ) - notification_id = uuid.uuid4() - - save_letter( - sample_letter_job.service_id, - notification_id, - encryption.encrypt(notification_json), - ) - - assert mock_create_letters_pdf.called - mock_create_letters_pdf.assert_called_once_with( - [str(notification_id)], - queue=QueueNames.CREATE_LETTERS_PDF - ) - - def test_should_cancel_job_if_service_is_inactive(sample_service, sample_job, mocker): @@ -1402,49 +973,6 @@ def test_get_sms_template_instance(mocker, sample_template, sample_job): ] -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -def test_get_letter_template_instance(mocker, sample_job): - mocker.patch( - 'app.celery.tasks.s3.get_job_and_metadata_from_s3', - return_value=('', {}), - ) - sample_contact_block = create_letter_contact( - service=sample_job.service, - contact_block='((reference number))' - ) - sample_template = create_template( - service=sample_job.service, - template_type=LETTER_TYPE, - reply_to=sample_contact_block.id, - ) - sample_job.template_id = sample_template.id - - ( - recipient_csv, - template, - _sender_id, - ) = get_recipient_csv_and_template_and_sender_id(sample_job) - - assert isinstance(template, LetterPrintTemplate) - assert template.contact_block == ( - '((reference number))' - ) - assert template.placeholders == { - 'reference number' - } - assert recipient_csv.placeholders == [ - 'reference number', - 'address line 1', - 'address line 2', - 'address line 3', - 'address line 4', - 'address line 5', - 'address line 6', - 'postcode', - 'address line 7', - ] - - def test_send_inbound_sms_to_service_post_https_request_to_service(notify_api, sample_service): inbound_api = create_service_inbound_api(service=sample_service, url="https://some.service.gov.uk/", bearer_token="something_unique") @@ -1719,28 +1247,6 @@ def test_process_incomplete_job_email(mocker, sample_email_template): assert mock_email_saver.call_count == 8 # There are 10 in the file and we've added two already -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -def test_process_incomplete_job_letter(mocker, sample_letter_template): - mocker.patch('app.celery.tasks.s3.get_job_and_metadata_from_s3', - return_value=(load_example_csv('multiple_letter'), {'sender_id': None})) - mock_letter_saver = mocker.patch('app.celery.tasks.save_letter.apply_async') - - job = create_job(template=sample_letter_template, notification_count=10, - created_at=datetime.utcnow() - timedelta(hours=2), - scheduled_for=datetime.utcnow() - timedelta(minutes=31), - processing_started=datetime.utcnow() - timedelta(minutes=31), - job_status=JOB_STATUS_ERROR) - - create_notification(sample_letter_template, job, 0) - create_notification(sample_letter_template, job, 1) - - assert Notification.query.filter(Notification.job_id == job.id).count() == 2 - - process_incomplete_job(str(job.id)) - - assert mock_letter_saver.call_count == 8 - - @freeze_time('2017-01-01') def test_process_incomplete_jobs_sets_status_to_in_progress_and_resets_processing_started_time(mocker, sample_template): mock_process_incomplete_job = mocker.patch('app.celery.tasks.process_incomplete_job') @@ -1767,47 +1273,6 @@ def test_process_incomplete_jobs_sets_status_to_in_progress_and_resets_processin assert mock_process_incomplete_job.mock_calls == [call(str(job1.id)), call(str(job2.id))] -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -def test_process_returned_letters_list(sample_letter_template): - create_notification(sample_letter_template, reference='ref1') - create_notification(sample_letter_template, reference='ref2') - - process_returned_letters_list(['ref1', 'ref2', 'unknown-ref']) - - notifications = Notification.query.all() - - assert [n.status for n in notifications] == ['returned-letter', 'returned-letter'] - assert all(n.updated_at for n in notifications) - - -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -def test_process_returned_letters_list_updates_history_if_notification_is_already_purged( - sample_letter_template -): - create_notification_history(sample_letter_template, reference='ref1') - create_notification_history(sample_letter_template, reference='ref2') - - process_returned_letters_list(['ref1', 'ref2', 'unknown-ref']) - - notifications = NotificationHistory.query.all() - - assert [n.status for n in notifications] == ['returned-letter', 'returned-letter'] - assert all(n.updated_at for n in notifications) - - -@pytest.mark.skip(reason="Needs updating for TTS: Remove mail") -def test_process_returned_letters_populates_returned_letters_table( - sample_letter_template -): - create_notification_history(sample_letter_template, reference='ref1') - create_notification_history(sample_letter_template, reference='ref2') - - process_returned_letters_list(['ref1', 'ref2', 'unknown-ref']) - - returned_letters = ReturnedLetter.query.all() - assert len(returned_letters) == 2 - - @freeze_time('2020-03-25 14:30') @pytest.mark.parametrize('notification_type', ['sms', 'email']) def test_save_api_email_or_sms(mocker, sample_service, notification_type): diff --git a/tests/app/conftest.py b/tests/app/conftest.py index ba4a6419b..b3a2ff1c1 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -23,7 +23,6 @@ from app.models import ( KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, - LETTER_TYPE, NOTIFICATION_STATUS_TYPES_COMPLETED, SERVICE_PERMISSION_TYPES, SMS_TYPE, @@ -49,7 +48,6 @@ from tests.app.db import ( create_inbound_number, create_invited_org_user, create_job, - create_letter_contact, create_notification, create_service, create_template, @@ -86,7 +84,6 @@ def create_sample_notification( rate_multiplier=1.0, scheduled_for=None, normalised_to=None, - postage=None, ): if created_at is None: created_at = datetime.utcnow() @@ -133,7 +130,6 @@ def create_sample_notification( "client_reference": client_reference, "rate_multiplier": rate_multiplier, "normalised_to": normalised_to, - "postage": postage, } if job_row_number is not None: data["job_row_number"] = job_row_number @@ -240,12 +236,6 @@ def _sample_service_full_permissions(notify_db_session): return service -@pytest.fixture(scope='function', name='sample_service_custom_letter_contact_block') -def _sample_service_custom_letter_contact_block(sample_service): - create_letter_contact(sample_service, contact_block='((contact block))') - return sample_service - - @pytest.fixture(scope='function') def sample_template(sample_user): service = create_service(service_permissions=[EMAIL_TYPE, SMS_TYPE], check_if_service_exists=True) @@ -306,17 +296,6 @@ def sample_template_without_email_permission(notify_db_session): return create_template(service, template_type=EMAIL_TYPE) -@pytest.fixture -def sample_letter_template(sample_service_full_permissions): - return create_template(sample_service_full_permissions, template_type=LETTER_TYPE, postage='second') - - -@pytest.fixture -def sample_trial_letter_template(sample_service_full_permissions): - sample_service_full_permissions.restricted = True - return create_template(sample_service_full_permissions, template_type=LETTER_TYPE) - - @pytest.fixture(scope='function') def sample_email_template_with_placeholders(sample_service): return create_template( @@ -409,25 +388,6 @@ def sample_scheduled_job(sample_template_with_placeholders): ) -@pytest.fixture -def sample_letter_job(sample_letter_template): - service = sample_letter_template.service - data = { - 'id': uuid.uuid4(), - 'service_id': service.id, - 'service': service, - 'template_id': sample_letter_template.id, - 'template_version': sample_letter_template.version, - 'original_file_name': 'some.csv', - 'notification_count': 1, - 'created_at': datetime.utcnow(), - 'created_by': service.created_by, - } - job = Job(**data) - dao_create_job(job) - return job - - @pytest.fixture(scope='function') def sample_notification_with_job(notify_db_session): service = create_service(check_if_service_exists=True) @@ -486,7 +446,6 @@ def sample_notification(notify_db_session): 'client_reference': None, 'rate_multiplier': 1.0, 'normalised_to': None, - 'postage': None, } notification = Notification(**data) @@ -495,20 +454,6 @@ def sample_notification(notify_db_session): return notification -@pytest.fixture -def sample_letter_notification(sample_letter_template): - address = { - 'address_line_1': 'A1', - 'address_line_2': 'A2', - 'address_line_3': 'A3', - 'address_line_4': 'A4', - 'address_line_5': 'A5', - 'address_line_6': 'A6', - 'postcode': 'A_POST' - } - return create_notification(sample_letter_template, reference='foo', personalisation=address) - - @pytest.fixture(scope='function') def sample_email_notification(notify_db_session): created_at = datetime.utcnow() @@ -818,31 +763,6 @@ def create_custom_template(service, user, template_config_name, template_type, c return template -@pytest.fixture(scope='function') -def letter_volumes_email_template(notify_service): - email_template_content = '\n'.join([ - "((total_volume)) letters (((total_sheets)) sheets) sent via Notify are coming in today''s batch. These include: ", # noqa - "", - "((first_class_volume)) first class letters (((first_class_sheets)) sheets).", - "((second_class_volume)) second class letters (((second_class_sheets)) sheets).", - "((international_volume)) international letters (((international_sheets)) sheets).", - "", - "Thanks", - "", - "GOV.​UK Notify team", - "https://www.gov.uk/notify" - ]) - - return create_custom_template( - service=notify_service, - user=notify_service.users[0], - template_config_name='LETTERS_VOLUME_EMAIL_TEMPLATE_ID', - content=email_template_content, - subject="Notify letter volume for ((date)): ((total_volume)) letters, ((total_sheets)) sheets", - template_type='email' - ) - - @pytest.fixture def notify_service(notify_db_session, sample_user): service = Service.query.get(current_app.config['NOTIFY_SERVICE_ID']) diff --git a/tests/app/dao/notification_dao/test_notification_dao.py b/tests/app/dao/notification_dao/test_notification_dao.py index 8ae304ebf..34aac0466 100644 --- a/tests/app/dao/notification_dao/test_notification_dao.py +++ b/tests/app/dao/notification_dao/test_notification_dao.py @@ -11,8 +11,6 @@ from app.dao.notifications_dao import ( dao_create_notification, dao_delete_notifications_by_id, dao_get_last_notification_added_for_job_id, - dao_get_letters_and_sheets_volume_by_postage, - dao_get_letters_to_be_printed, dao_get_notification_by_reference, dao_get_notification_count_for_job_id, dao_get_notification_history_by_reference, @@ -112,25 +110,6 @@ def test_should_update_status_by_id_if_created(sample_template, sample_notificat assert updated.status == 'failed' -def test_should_update_status_by_id_if_pending_virus_check(sample_letter_template): - notification = create_notification(template=sample_letter_template, status='pending-virus-check') - assert Notification.query.get(notification.id).status == 'pending-virus-check' - updated = update_notification_status_by_id(notification.id, 'cancelled') - assert Notification.query.get(notification.id).status == 'cancelled' - assert updated.status == 'cancelled' - - -def test_should_update_status_of_international_letter_to_cancelled(sample_letter_template): - notification = create_notification( - template=sample_letter_template, - international=True, - postage='europe', - ) - assert Notification.query.get(notification.id).international is True - update_notification_status_by_id(notification.id, 'cancelled') - assert Notification.query.get(notification.id).status == 'cancelled' - - def test_should_update_status_by_id_and_set_sent_by(sample_template): notification = create_notification(template=sample_template, status='sending') @@ -682,18 +661,6 @@ def test_dao_timeout_notifications_only_updates_for_older_notifications(sample_t assert Notification.query.get(pending.id).status == 'pending' -def test_dao_timeout_notifications_doesnt_affect_letters(sample_letter_template): - with freeze_time(datetime.utcnow() - timedelta(minutes=2)): - sending = create_notification(sample_letter_template, status='sending') - pending = create_notification(sample_letter_template, status='pending') - - temporary_failure_notifications = dao_timeout_notifications(datetime.utcnow()) - - assert len(temporary_failure_notifications) == 0 - assert Notification.query.get(sending.id).status == 'sending' - assert Notification.query.get(pending.id).status == 'pending' - - def test_should_return_notifications_excluding_jobs_by_default(sample_template, sample_job, sample_api_key): create_notification(sample_template, job=sample_job) without_job = create_notification(sample_template, api_key=sample_api_key) @@ -1196,7 +1163,6 @@ def test_dao_get_notifications_by_reference( service = create_service() sms_template = create_template(service=service) email_template = create_template(service=service, template_type='email') - letter_template = create_template(service=service, template_type='letter') sms = create_notification( template=sms_template, to_field='07711111111', @@ -1209,18 +1175,11 @@ def test_dao_get_notifications_by_reference( normalised_to='077@example.com', client_reference='77bB', ) - letter = create_notification( - template=letter_template, - to_field='123 Example Street\nXX1X 1XX', - normalised_to='123examplestreetxx1x1xx', - client_reference='77bB', - ) results = dao_get_notifications_by_recipient_or_reference(service.id, '77') - assert len(results.items) == 3 - assert results.items[0].id == letter.id - assert results.items[1].id == email.id - assert results.items[2].id == sms.id + assert len(results.items) == 2 + assert results.items[0].id == email.id + assert results.items[1].id == sms.id # If notification_type isn’t specified then we can’t normalise the # phone number to 4477… so this query will only find the email sent @@ -1259,21 +1218,6 @@ def test_dao_get_notifications_by_reference( results = dao_get_notifications_by_recipient_or_reference(service.id, 'aA', notification_type='email') assert len(results.items) == 0 - results = dao_get_notifications_by_recipient_or_reference(service.id, 'aA', notification_type='letter') - assert len(results.items) == 0 - - results = dao_get_notifications_by_recipient_or_reference(service.id, '123') - assert len(results.items) == 1 - assert results.items[0].id == letter.id - - results = dao_get_notifications_by_recipient_or_reference(service.id, 'xX 1x1 Xx') - assert len(results.items) == 1 - assert results.items[0].id == letter.id - - results = dao_get_notifications_by_recipient_or_reference(service.id, '77', notification_type='letter') - assert len(results.items) == 1 - assert results.items[0].id == letter.id - def test_dao_get_notifications_by_to_field_filters_status(sample_template): notification = create_notification( @@ -1450,48 +1394,33 @@ def test_dao_update_notifications_by_reference_returns_zero_when_no_notification assert updated_history_count == 0 -def test_dao_update_notifications_by_reference_set_returned_letter_status(sample_letter_template): - notification = create_notification(template=sample_letter_template, reference='ref') - - updated_count, updated_history_count = dao_update_notifications_by_reference( - references=['ref'], - update_dict={"status": "returned-letter"} - ) - - assert updated_count == 1 - assert updated_history_count == 0 - updated_notification = Notification.query.get(notification.id) - assert updated_notification.status == 'returned-letter' - assert updated_notification.updated_at <= datetime.utcnow() - - def test_dao_update_notifications_by_reference_updates_history_when_one_of_two_notifications_exists( - sample_letter_template + sample_template ): - notification1 = create_notification_history(template=sample_letter_template, reference='ref1') - notification2 = create_notification(template=sample_letter_template, reference='ref2') + notification1 = create_notification_history(template=sample_template, reference='ref1') + notification2 = create_notification(template=sample_template, reference='ref2') updated_count, updated_history_count = dao_update_notifications_by_reference( references=['ref1', 'ref2'], - update_dict={"status": "returned-letter"} + update_dict={"status": "delivered"} ) assert updated_count == 1 assert updated_history_count == 1 - assert Notification.query.get(notification2.id).status == 'returned-letter' - assert NotificationHistory.query.get(notification1.id).status == 'returned-letter' + assert Notification.query.get(notification2.id).status == 'delivered' + assert NotificationHistory.query.get(notification1.id).status == 'delivered' -def test_dao_get_notification_by_reference_with_one_match_returns_notification(sample_letter_template): - create_notification(template=sample_letter_template, reference='REF1') +def test_dao_get_notification_by_reference_with_one_match_returns_notification(sample_template): + create_notification(template=sample_template, reference='REF1') notification = dao_get_notification_by_reference('REF1') assert notification.reference == 'REF1' -def test_dao_get_notification_by_reference_with_multiple_matches_raises_error(sample_letter_template): - create_notification(template=sample_letter_template, reference='REF1') - create_notification(template=sample_letter_template, reference='REF1') +def test_dao_get_notification_by_reference_with_multiple_matches_raises_error(sample_template): + create_notification(template=sample_template, reference='REF1') + create_notification(template=sample_template, reference='REF1') with pytest.raises(SQLAlchemyError): dao_get_notification_by_reference('REF1') @@ -1503,19 +1432,19 @@ def test_dao_get_notification_by_reference_with_no_matches_raises_error(notify_d def test_dao_get_notification_history_by_reference_with_one_match_returns_notification( - sample_letter_template + sample_template ): - create_notification(template=sample_letter_template, reference='REF1') + create_notification(template=sample_template, reference='REF1') notification = dao_get_notification_history_by_reference('REF1') assert notification.reference == 'REF1' def test_dao_get_notification_history_by_reference_with_multiple_matches_raises_error( - sample_letter_template + sample_template ): - create_notification(template=sample_letter_template, reference='REF1') - create_notification(template=sample_letter_template, reference='REF1') + create_notification(template=sample_template, reference='REF1') + create_notification(template=sample_template, reference='REF1') with pytest.raises(SQLAlchemyError): dao_get_notification_history_by_reference('REF1') @@ -1527,7 +1456,7 @@ def test_dao_get_notification_history_by_reference_with_no_matches_raises_error( @pytest.mark.parametrize("notification_type", - ["letter", "email", "sms"] + ["email", "sms"] ) def test_notifications_not_yet_sent(sample_service, notification_type): older_than = 4 # number of seconds the notification can not be older than @@ -1546,7 +1475,7 @@ def test_notifications_not_yet_sent(sample_service, notification_type): @pytest.mark.parametrize("notification_type", - ["letter", "email", "sms"] + ["email", "sms"] ) def test_notifications_not_yet_sent_return_no_rows(sample_service, notification_type): older_than = 5 # number of seconds the notification can not be older than @@ -1563,76 +1492,6 @@ def test_notifications_not_yet_sent_return_no_rows(sample_service, notification_ assert len(results) == 0 -def test_letters_to_be_printed_sort_by_service(notify_db_session): - first_service = create_service(service_name='first service', service_id='3a5cea08-29fd-4bb9-b582-8dedd928b149') - second_service = create_service(service_name='second service', service_id='642bf33b-54b5-45f2-8c13-942a46616704') - first_template = create_template(service=first_service, template_type='letter', postage='second') - second_template = create_template(service=second_service, template_type='letter', postage='second') - letters_ordered_by_service_then_time = [ - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 9, 30)), - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 12, 30)), - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 13, 30)), - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 14, 30)), - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 15, 30)), - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 30)), - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 31)), - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 32)), - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 33)), - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 34)) - ] - - results = list( - dao_get_letters_to_be_printed(print_run_deadline=datetime(2020, 12, 1, 17, 30), postage='second', query_limit=4) - ) - assert [x.id for x in results] == [x.id for x in letters_ordered_by_service_then_time] - - -def test_letters_to_be_printed_does_not_include_letters_without_billable_units_set( - notify_db_session, sample_letter_template): - included_letter = create_notification( - template=sample_letter_template, created_at=datetime(2020, 12, 1, 9, 30), billable_units=3) - create_notification( - template=sample_letter_template, created_at=datetime(2020, 12, 1, 9, 31), billable_units=0) - - results = list( - dao_get_letters_to_be_printed(print_run_deadline=datetime(2020, 12, 1, 17, 30), postage='second', query_limit=4) - ) - assert len(results) == 1 - assert results[0].id == included_letter.id - - -def test_dao_get_letters_and_sheets_volume_by_postage(notify_db_session): - first_service = create_service(service_name='first service', service_id='3a5cea08-29fd-4bb9-b582-8dedd928b149') - second_service = create_service(service_name='second service', service_id='642bf33b-54b5-45f2-8c13-942a46616704') - first_template = create_template(service=first_service, template_type='letter', postage='second') - second_template = create_template(service=second_service, template_type='letter', postage='second') - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 9, 30), postage='first') - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 12, 30), postage='europe') - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 13, 30), postage='rest-of-world') - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 14, 30), billable_units=3) - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 14, 30), billable_units=0) - create_notification(template=first_template, created_at=datetime(2020, 12, 1, 15, 30)) - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 30), postage='first') - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 31), postage='first') - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 32)) - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 33)) - create_notification(template=second_template, created_at=datetime(2020, 12, 1, 8, 34)) - - results = dao_get_letters_and_sheets_volume_by_postage(print_run_deadline=datetime(2020, 12, 1, 17, 30)) - - assert len(results) == 4 - - expected_results = [ - {'letters_count': 1, 'sheets_count': 1, 'postage': 'europe'}, - {'letters_count': 3, 'sheets_count': 3, 'postage': 'first'}, - {'letters_count': 1, 'sheets_count': 1, 'postage': 'rest-of-world'}, - {'letters_count': 5, 'sheets_count': 7, 'postage': 'second'} - ] - - for result in results: - assert result._asdict() in expected_results - - @pytest.mark.parametrize('created_at_utc,date_to_check,expected_count', [ # Clocks change on the 27th of March 2022, so the query needs to look at the # time range 00:00 - 23:00 (UTC) thereafter. diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index ca141c445..6e940ef0a 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -1,11 +1,7 @@ import uuid from datetime import datetime, timedelta -import boto3 -import pytest -from flask import current_app from freezegun import freeze_time -from moto import mock_s3 from app.dao.notifications_dao import ( insert_notification_history_delete_notifications, @@ -26,50 +22,6 @@ from tests.app.db import ( ) -@mock_s3 -@freeze_time('2019-09-01 04:30') -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_move_notifications_deletes_letters_from_s3(sample_letter_template, mocker): - s3 = boto3.client('s3', region_name='eu-west-1') - bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] - s3.create_bucket( - Bucket=bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - eight_days_ago = datetime.utcnow() - timedelta(days=8) - create_notification(template=sample_letter_template, status='delivered', - reference='LETTER_REF', created_at=eight_days_ago, sent_at=eight_days_ago) - filename = "{}/NOTIFY.LETTER_REF.D.2.C.{}.PDF".format( - str(eight_days_ago.date()), - eight_days_ago.strftime('%Y%m%d%H%M%S') - ) - s3.put_object(Bucket=bucket_name, Key=filename, Body=b'foo') - - move_notifications_to_notification_history('letter', sample_letter_template.service_id, datetime(2020, 1, 2)) - - with pytest.raises(s3.exceptions.NoSuchKey): - s3.get_object(Bucket=bucket_name, Key=filename) - - -@mock_s3 -@freeze_time('2019-09-01 04:30') -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_move_notifications_copes_if_letter_not_in_s3(sample_letter_template, mocker): - s3 = boto3.client('s3', region_name='eu-west-1') - s3.create_bucket( - Bucket=current_app.config['LETTERS_PDF_BUCKET_NAME'], - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - eight_days_ago = datetime.utcnow() - timedelta(days=8) - create_notification(template=sample_letter_template, status='delivered', sent_at=eight_days_ago) - - move_notifications_to_notification_history('letter', sample_letter_template.service_id, datetime(2020, 1, 2)) - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 1 - - def test_move_notifications_does_nothing_if_notification_history_row_already_exists( sample_email_template, mocker ): @@ -90,95 +42,6 @@ def test_move_notifications_does_nothing_if_notification_history_row_already_exi assert history[0].status == 'delivered' -@pytest.mark.parametrize( - 'notification_status', ['validation-failed', 'virus-scan-failed'] -) -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_move_notifications_deletes_letters_not_sent_and_in_final_state_from_table_but_not_s3( - sample_service, mocker, notification_status -): - mock_s3_object = mocker.patch("app.dao.notifications_dao.find_letter_pdf_in_s3").return_value - letter_template = create_template(service=sample_service, template_type='letter') - create_notification( - template=letter_template, - status=notification_status, - reference='LETTER_REF', - created_at=datetime.utcnow() - timedelta(days=14) - ) - assert Notification.query.count() == 1 - assert NotificationHistory.query.count() == 0 - - move_notifications_to_notification_history('letter', sample_service.id, datetime.utcnow()) - - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 1 - mock_s3_object.assert_not_called() - - -@mock_s3 -@freeze_time('2020-12-24 04:30') -@pytest.mark.parametrize('notification_status', ['delivered', 'returned-letter', 'technical-failure']) -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_move_notifications_deletes_letters_sent_and_in_final_state_from_table_and_s3( - sample_service, mocker, notification_status -): - bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] - s3 = boto3.client('s3', region_name='eu-west-1') - s3.create_bucket( - Bucket=bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - letter_template = create_template(service=sample_service, template_type='letter') - eight_days_ago = datetime.utcnow() - timedelta(days=8) - create_notification( - template=letter_template, - status=notification_status, - reference='LETTER_REF', - created_at=eight_days_ago, - sent_at=eight_days_ago - ) - assert Notification.query.count() == 1 - assert NotificationHistory.query.count() == 0 - - filename = "{}/NOTIFY.LETTER_REF.D.2.C.{}.PDF".format( - str(eight_days_ago.date()), - eight_days_ago.strftime('%Y%m%d%H%M%S') - ) - s3.put_object(Bucket=bucket_name, Key=filename, Body=b'foo') - - move_notifications_to_notification_history('letter', sample_service.id, datetime.utcnow()) - - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 1 - - with pytest.raises(s3.exceptions.NoSuchKey): - s3.get_object(Bucket=bucket_name, Key=filename) - - -@pytest.mark.parametrize('notification_status', ['pending-virus-check', 'created', 'sending']) -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_move_notifications_does_not_delete_letters_not_yet_in_final_state( - sample_service, mocker, notification_status -): - mock_s3_object = mocker.patch("app.dao.notifications_dao.find_letter_pdf_in_s3").return_value - letter_template = create_template(service=sample_service, template_type='letter') - create_notification( - template=letter_template, - status=notification_status, - reference='LETTER_REF', - created_at=datetime.utcnow() - timedelta(days=8), - ) - assert Notification.query.count() == 1 - assert NotificationHistory.query.count() == 0 - - move_notifications_to_notification_history('letter', sample_service.id, datetime.utcnow()) - - assert Notification.query.count() == 1 - assert NotificationHistory.query.count() == 0 - mock_s3_object.assert_not_called() - - def test_move_notifications_only_moves_notifications_older_than_provided_timestamp(sample_template): delete_time = datetime(2020, 6, 1, 12) one_second_before = delete_time - timedelta(seconds=1) @@ -224,14 +87,12 @@ def test_move_notifications_only_moves_for_given_notification_type(sample_servic sms_template = create_template(sample_service, 'sms') email_template = create_template(sample_service, 'email') - letter_template = create_template(sample_service, 'letter') create_notification(sms_template, created_at=one_second_before) create_notification(email_template, created_at=one_second_before) - create_notification(letter_template, created_at=one_second_before) result = move_notifications_to_notification_history('sms', sample_service.id, delete_time) assert result == 1 - assert {x.notification_type for x in Notification.query} == {'email', 'letter'} + assert {x.notification_type for x in Notification.query} == {'email'} assert NotificationHistory.query.one().notification_type == 'sms' diff --git a/tests/app/dao/test_daily_sorted_letter_dao.py b/tests/app/dao/test_daily_sorted_letter_dao.py deleted file mode 100644 index a762fc8bc..000000000 --- a/tests/app/dao/test_daily_sorted_letter_dao.py +++ /dev/null @@ -1,55 +0,0 @@ -from datetime import date - -from app.dao.daily_sorted_letter_dao import ( - dao_create_or_update_daily_sorted_letter, - dao_get_daily_sorted_letter_by_billing_day, -) -from app.models import DailySortedLetter -from tests.app.db import create_daily_sorted_letter - - -def test_dao_get_daily_sorted_letter_by_billing_day(notify_db_session): - billing_day = date(2018, 2, 1) - other_day = date(2017, 9, 8) - - daily_sorted_letters = create_daily_sorted_letter(billing_day=billing_day) - - assert dao_get_daily_sorted_letter_by_billing_day(billing_day) == daily_sorted_letters - assert not dao_get_daily_sorted_letter_by_billing_day(other_day) - - -def test_dao_create_or_update_daily_sorted_letter_creates_a_new_entry(notify_db_session): - billing_day = date(2018, 2, 1) - dsl = DailySortedLetter(billing_day=billing_day, - file_name="Notify-201802011234.rs.txt", - unsorted_count=2, - sorted_count=0) - dao_create_or_update_daily_sorted_letter(dsl) - - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(billing_day) - - assert daily_sorted_letter.billing_day == billing_day - assert daily_sorted_letter.unsorted_count == 2 - assert daily_sorted_letter.sorted_count == 0 - assert not daily_sorted_letter.updated_at - - -def test_dao_create_or_update_daily_sorted_letter_updates_an_existing_entry( - notify_db_session -): - create_daily_sorted_letter(billing_day=date(2018, 1, 18), - file_name="Notify-20180118123.rs.txt", - unsorted_count=2, - sorted_count=3) - - dsl = DailySortedLetter(billing_day=date(2018, 1, 18), - file_name="Notify-20180118123.rs.txt", - unsorted_count=5, - sorted_count=17) - dao_create_or_update_daily_sorted_letter(dsl) - - daily_sorted_letter = dao_get_daily_sorted_letter_by_billing_day(dsl.billing_day) - - assert daily_sorted_letter.unsorted_count == 5 - assert daily_sorted_letter.sorted_count == 17 - assert daily_sorted_letter.updated_at diff --git a/tests/app/dao/test_fact_billing_dao.py b/tests/app/dao/test_fact_billing_dao.py index f9383cd39..3ba91f2d4 100644 --- a/tests/app/dao/test_fact_billing_dao.py +++ b/tests/app/dao/test_fact_billing_dao.py @@ -12,8 +12,6 @@ from app.dao.fact_billing_dao import ( fetch_billing_totals_for_year, fetch_daily_sms_provider_volumes_for_platform, fetch_daily_volumes_for_platform, - fetch_letter_costs_and_totals_for_all_services, - fetch_letter_line_items_for_all_services, fetch_monthly_billing_for_year, fetch_sms_billing_for_all_services, fetch_sms_free_allowance_remainder_until_date, @@ -28,7 +26,6 @@ from app.models import NOTIFICATION_STATUS_TYPES, FactBilling from tests.app.db import ( create_annual_billing, create_ft_billing, - create_letter_rate, create_notification, create_notification_history, create_organisation, @@ -44,21 +41,18 @@ def set_up_yearly_data(): service = create_service() sms_template = create_template(service=service, template_type="sms") email_template = create_template(service=service, template_type="email") - letter_template = create_template(service=service, template_type="letter") # use different rates for adjacent financial years to make sure the query # doesn't accidentally bleed over into them for dt in (date(2016, 3, 31), date(2017, 4, 1)): create_ft_billing(local_date=dt, template=sms_template, rate=0.163) create_ft_billing(local_date=dt, template=email_template, rate=0, billable_unit=0) - create_ft_billing(local_date=dt, template=letter_template, rate=0.31, postage='second') # a selection of dates that represent the extreme ends of the financial year # and some arbitrary dates in between for dt in (date(2016, 4, 1), date(2016, 4, 29), date(2017, 2, 6), date(2017, 3, 31)): create_ft_billing(local_date=dt, template=sms_template, rate=0.162) create_ft_billing(local_date=dt, template=email_template, rate=0, billable_unit=0) - create_ft_billing(local_date=dt, template=letter_template, rate=0.30, postage='second') return service @@ -66,21 +60,10 @@ def set_up_yearly_data(): def set_up_yearly_data_variable_rates(): service = create_service() sms_template = create_template(service=service, template_type="sms") - letter_template = create_template(service=service, template_type="letter") create_ft_billing(local_date='2018-05-16', template=sms_template, rate=0.162) create_ft_billing(local_date='2018-05-17', template=sms_template, rate_multiplier=2, rate=0.0150, billable_unit=2) create_ft_billing(local_date='2018-05-16', template=sms_template, rate_multiplier=2, rate=0.162, billable_unit=2) - create_ft_billing(local_date='2018-05-16', template=letter_template, rate=0.33, postage='second') - - create_ft_billing( - local_date='2018-05-17', - template=letter_template, - rate=0.36, - notifications_sent=2, - billable_unit=4, # 2 pages each - postage='second' - ) return service @@ -97,32 +80,28 @@ def test_fetch_billing_data_for_today_includes_data_with_the_right_key_type(noti assert results[0].notifications_sent == 2 -@pytest.mark.parametrize("notification_type", ["email", "sms", "letter"]) +@pytest.mark.parametrize("notification_type", ["email", "sms"]) def test_fetch_billing_data_for_day_only_calls_query_for_permission_type(notify_db_session, notification_type): service = create_service(service_permissions=[notification_type]) email_template = create_template(service=service, template_type="email") sms_template = create_template(service=service, template_type="sms") - letter_template = create_template(service=service, template_type="letter") create_notification(template=email_template, status='delivered') create_notification(template=sms_template, status='delivered') - create_notification(template=letter_template, status='delivered') today = convert_utc_to_local_timezone(datetime.utcnow()) results = fetch_billing_data_for_day(process_day=today.date(), check_permissions=True) assert len(results) == 1 -@pytest.mark.parametrize("notification_type", ["email", "sms", "letter"]) +@pytest.mark.parametrize("notification_type", ["email", "sms"]) def test_fetch_billing_data_for_day_only_calls_query_for_all_channels(notify_db_session, notification_type): service = create_service(service_permissions=[notification_type]) email_template = create_template(service=service, template_type="email") sms_template = create_template(service=service, template_type="sms") - letter_template = create_template(service=service, template_type="letter") create_notification(template=email_template, status='delivered') create_notification(template=sms_template, status='delivered') - create_notification(template=letter_template, status='delivered') today = convert_utc_to_local_timezone(datetime.utcnow()) results = fetch_billing_data_for_day(process_day=today.date(), check_permissions=False) - assert len(results) == 3 + assert len(results) == 2 @freeze_time('2018-04-02 01:20:00') @@ -200,15 +179,12 @@ def test_fetch_billing_data_for_day_is_grouped_by_rate_mulitplier(notify_db_sess def test_fetch_billing_data_for_day_is_grouped_by_international(notify_db_session): service = create_service() sms_template = create_template(service=service) - letter_template = create_template(template_type='letter', service=service) create_notification(template=sms_template, status='delivered', international=True) create_notification(template=sms_template, status='delivered', international=False) - create_notification(template=letter_template, status='delivered', international=True) - create_notification(template=letter_template, status='delivered', international=False) today = convert_utc_to_local_timezone(datetime.utcnow()) results = fetch_billing_data_for_day(today.date()) - assert len(results) == 4 + assert len(results) == 2 assert all(result.notifications_sent == 1 for result in results) @@ -216,77 +192,17 @@ def test_fetch_billing_data_for_day_is_grouped_by_notification_type(notify_db_se service = create_service() sms_template = create_template(service=service, template_type='sms') email_template = create_template(service=service, template_type='email') - letter_template = create_template(service=service, template_type='letter') create_notification(template=sms_template, status='delivered') create_notification(template=sms_template, status='delivered') create_notification(template=sms_template, status='delivered') create_notification(template=email_template, status='delivered') create_notification(template=email_template, status='delivered') - create_notification(template=letter_template, status='delivered') today = convert_utc_to_local_timezone(datetime.utcnow()) results = fetch_billing_data_for_day(today.date()) - assert len(results) == 3 + assert len(results) == 2 notification_types = [x.notification_type for x in results] - assert len(notification_types) == 3 - - -def test_fetch_billing_data_for_day_groups_by_postage(notify_db_session): - service = create_service() - letter_template = create_template(service=service, template_type='letter') - email_template = create_template(service=service, template_type='email') - create_notification(template=letter_template, status='delivered', postage='first') - create_notification(template=letter_template, status='delivered', postage='first') - create_notification(template=letter_template, status='delivered', postage='second') - create_notification(template=letter_template, status='delivered', postage='europe') - create_notification(template=letter_template, status='delivered', postage='rest-of-world') - create_notification(template=email_template, status='delivered') - - today = convert_utc_to_local_timezone(datetime.utcnow()) - results = fetch_billing_data_for_day(today.date()) - assert len(results) == 5 - - -def test_fetch_billing_data_for_day_groups_by_sent_by(notify_db_session): - service = create_service() - letter_template = create_template(service=service, template_type='letter') - email_template = create_template(service=service, template_type='email') - create_notification(template=letter_template, status='delivered', postage='second', sent_by='dvla') - create_notification(template=letter_template, status='delivered', postage='second', sent_by='dvla') - create_notification(template=letter_template, status='delivered', postage='second', sent_by=None) - create_notification(template=email_template, status='delivered') - - today = convert_utc_to_local_timezone(datetime.utcnow()) - results = fetch_billing_data_for_day(today.date()) - assert len(results) == 2 - - -def test_fetch_billing_data_for_day_groups_by_page_count(notify_db_session): - service = create_service() - letter_template = create_template(service=service, template_type='letter') - email_template = create_template(service=service, template_type='email') - create_notification(template=letter_template, status='delivered', postage='second', billable_units=1) - create_notification(template=letter_template, status='delivered', postage='second', billable_units=1) - create_notification(template=letter_template, status='delivered', postage='second', billable_units=2) - create_notification(template=email_template, status='delivered') - - today = convert_utc_to_local_timezone(datetime.utcnow()) - results = fetch_billing_data_for_day(today.date()) - assert len(results) == 3 - - -def test_fetch_billing_data_for_day_sets_postage_for_emails_and_sms_to_none(notify_db_session): - service = create_service() - sms_template = create_template(service=service, template_type='sms') - email_template = create_template(service=service, template_type='email') - create_notification(template=sms_template, status='delivered') - create_notification(template=email_template, status='delivered') - - today = convert_utc_to_local_timezone(datetime.utcnow()) - results = fetch_billing_data_for_day(today.date()) - assert len(results) == 2 - assert results[0].postage == 'none' - assert results[1].postage == 'none' + assert len(notification_types) == 2 def test_fetch_billing_data_for_day_returns_empty_list(notify_db_session): @@ -295,6 +211,7 @@ def test_fetch_billing_data_for_day_returns_empty_list(notify_db_session): assert results == [] +# TODO: ready for reactivation? @pytest.mark.skip(reason="Needs updating for TTS: Timezone handling") def test_fetch_billing_data_for_day_uses_correct_table(notify_db_session): service = create_service() @@ -332,35 +249,26 @@ def test_fetch_billing_data_for_day_bills_correctly_for_status(notify_db_session service = create_service() sms_template = create_template(service=service, template_type='sms') email_template = create_template(service=service, template_type='email') - letter_template = create_template(service=service, template_type='letter') for status in NOTIFICATION_STATUS_TYPES: create_notification(template=sms_template, status=status) create_notification(template=email_template, status=status) - create_notification(template=letter_template, status=status) today = convert_utc_to_local_timezone(datetime.utcnow()) results = fetch_billing_data_for_day(process_day=today.date(), service_id=service.id) sms_results = [x for x in results if x.notification_type == 'sms'] email_results = [x for x in results if x.notification_type == 'email'] - letter_results = [x for x in results if x.notification_type == 'letter'] # we expect as many rows as we check for notification types assert 6 == sms_results[0].notifications_sent assert 4 == email_results[0].notifications_sent - assert 3 == letter_results[0].notifications_sent def test_get_rates_for_billing(notify_db_session): create_rate(start_date=datetime.utcnow(), value=12, notification_type='email') create_rate(start_date=datetime.utcnow(), value=22, notification_type='sms') create_rate(start_date=datetime.utcnow(), value=33, notification_type='email') - create_letter_rate(start_date=datetime.utcnow(), rate=0.66, post_class='first') - create_letter_rate(start_date=datetime.utcnow(), rate=0.33, post_class='second') - create_letter_rate(start_date=datetime.utcnow(), rate=0.84, post_class='europe') - create_letter_rate(start_date=datetime.utcnow(), rate=0.84, post_class='rest-of-world') - non_letter_rates, letter_rates = get_rates_for_billing() + rates = get_rates_for_billing() - assert len(non_letter_rates) == 3 - assert len(letter_rates) == 4 + assert len(rates) == 3 @freeze_time('2017-06-01 12:00') @@ -368,57 +276,21 @@ def test_get_rate(notify_db_session): create_rate(start_date=datetime(2017, 5, 30, 23, 0), value=1.2, notification_type='email') create_rate(start_date=datetime(2017, 5, 30, 23, 0), value=2.2, notification_type='sms') create_rate(start_date=datetime(2017, 5, 30, 23, 0), value=3.3, notification_type='email') - create_letter_rate(start_date=datetime(2017, 5, 30, 23, 0), rate=0.66, post_class='first') - create_letter_rate(start_date=datetime(2017, 5, 30, 23, 0), rate=0.3, post_class='second') - non_letter_rates, letter_rates = get_rates_for_billing() - rate = get_rate(non_letter_rates=non_letter_rates, letter_rates=letter_rates, notification_type='sms', - date=date(2017, 6, 1)) - letter_rate = get_rate(non_letter_rates=non_letter_rates, letter_rates=letter_rates, - notification_type='letter', - crown=True, - letter_page_count=1, - date=date(2017, 6, 1)) + rates = get_rates_for_billing() + rate = get_rate(rates, notification_type='sms', date=date(2017, 6, 1)) assert rate == 2.2 - assert letter_rate == Decimal('0.3') -@pytest.mark.parametrize("letter_post_class,expected_rate", [ - ("first", "0.61"), - ("second", "0.35"), - ("europe", "0.92"), - ("rest-of-world", "1.05"), -]) -def test_get_rate_filters_letters_by_post_class(notify_db_session, letter_post_class, expected_rate): - create_letter_rate(start_date=datetime(2017, 5, 30, 23, 0), sheet_count=2, rate=0.61, post_class='first') - create_letter_rate(start_date=datetime(2017, 5, 30, 23, 0), sheet_count=2, rate=0.35, post_class='second') - create_letter_rate(start_date=datetime(2017, 5, 30, 23, 0), sheet_count=2, rate=0.92, post_class='europe') - create_letter_rate(start_date=datetime(2017, 5, 30, 23, 0), sheet_count=2, rate=1.05, post_class='rest-of-world') - - non_letter_rates, letter_rates = get_rates_for_billing() - rate = get_rate(non_letter_rates, letter_rates, "letter", datetime(2018, 10, 1), True, 2, letter_post_class) - assert rate == Decimal(expected_rate) - - -@pytest.mark.parametrize("date,expected_rate", [(datetime(2018, 9, 30), '0.33'), (datetime(2018, 10, 1), '0.35')]) +@pytest.mark.parametrize("date,expected_rate", [(datetime(2018, 9, 30), 1.2), (datetime(2018, 10, 1), 2.2)]) def test_get_rate_chooses_right_rate_depending_on_date(notify_db_session, date, expected_rate): - create_letter_rate(start_date=datetime(2016, 1, 1, 0, 0), sheet_count=2, rate=0.33, post_class='second') - create_letter_rate(start_date=datetime(2018, 9, 30, 23, 0), sheet_count=2, rate=0.35, post_class='second') + create_rate(start_date=datetime(2016, 1, 1, 0, 0), value=1.2, notification_type='sms') + create_rate(start_date=datetime(2018, 9, 30, 23, 0), value=2.2, notification_type='sms') - non_letter_rates, letter_rates = get_rates_for_billing() - rate = get_rate(non_letter_rates, letter_rates, "letter", date, True, 2, "second") - assert rate == Decimal(expected_rate) - - -def test_get_rate_for_letters_when_page_count_is_zero(notify_db_session): - non_letter_rates, letter_rates = get_rates_for_billing() - letter_rate = get_rate(non_letter_rates=non_letter_rates, letter_rates=letter_rates, - notification_type='letter', - crown=True, - letter_page_count=0, - date=datetime.utcnow()) - assert letter_rate == 0 + rates = get_rates_for_billing() + rate = get_rate(rates, "sms", date, True) + assert rate == expected_rate def test_fetch_monthly_billing_for_year(notify_db_session): @@ -426,7 +298,7 @@ def test_fetch_monthly_billing_for_year(notify_db_session): create_annual_billing(service_id=service.id, free_sms_fragment_limit=1, financial_year_start=2016) results = fetch_monthly_billing_for_year(service.id, 2016) - assert len(results) == 9 # 3 billed months for each type + assert len(results) == 6 # 3 billed months for each type assert str(results[0].month) == "2016-04-01" assert results[0].notification_type == 'email' @@ -438,26 +310,17 @@ def test_fetch_monthly_billing_for_year(notify_db_session): assert results[0].charged_units == 0 assert str(results[1].month) == "2016-04-01" - assert results[1].notification_type == 'letter' + assert results[1].notification_type == 'sms' assert results[1].notifications_sent == 2 assert results[1].chargeable_units == 2 - assert results[1].rate == Decimal('0.30') - assert results[1].cost == Decimal('0.60') - assert results[1].free_allowance_used == 0 - assert results[1].charged_units == 2 - - assert str(results[2].month) == "2016-04-01" - assert results[2].notification_type == 'sms' - assert results[2].notifications_sent == 2 - assert results[2].chargeable_units == 2 - assert results[2].rate == Decimal('0.162') + assert results[1].rate == Decimal('0.162') # free allowance is 1 - assert results[2].cost == Decimal('0.162') - assert results[2].free_allowance_used == 1 - assert results[2].charged_units == 1 + assert results[1].cost == Decimal('0.162') + assert results[1].free_allowance_used == 1 + assert results[1].charged_units == 1 - assert str(results[3].month) == "2017-02-01" - assert str(results[8].month) == "2017-03-01" + assert str(results[2].month) == "2017-02-01" + assert str(results[5].month) == "2017-03-01" def test_fetch_monthly_billing_for_year_variable_rates(notify_db_session): @@ -466,45 +329,27 @@ def test_fetch_monthly_billing_for_year_variable_rates(notify_db_session): results = fetch_monthly_billing_for_year(service.id, 2018) # Test data is only for the month of May - assert len(results) == 4 + assert len(results) == 2 assert str(results[0].month) == "2018-05-01" - assert results[0].notification_type == 'letter' + assert results[0].notification_type == 'sms' assert results[0].notifications_sent == 1 - assert results[0].chargeable_units == 1 - assert results[0].rate == Decimal('0.33') - assert results[0].cost == Decimal('0.33') - assert results[0].free_allowance_used == 0 - assert results[0].charged_units == 1 + assert results[0].chargeable_units == 4 + assert results[0].rate == Decimal('0.015') + # 1 free units on the 17th + assert results[0].cost == Decimal('0.045') + assert results[0].free_allowance_used == 1 + assert results[0].charged_units == 3 assert str(results[1].month) == "2018-05-01" - assert results[1].notification_type == 'letter' + assert results[1].notification_type == 'sms' assert results[1].notifications_sent == 2 - assert results[1].chargeable_units == 2 - assert results[1].rate == Decimal('0.36') - assert results[1].cost == Decimal('0.72') - assert results[1].free_allowance_used == 0 - assert results[1].charged_units == 2 - - assert str(results[2].month) == "2018-05-01" - assert results[2].notification_type == 'sms' - assert results[2].notifications_sent == 1 - assert results[2].chargeable_units == 4 - assert results[2].rate == Decimal('0.015') - # 1 free units on the 17th - assert results[2].cost == Decimal('0.045') - assert results[2].free_allowance_used == 1 - assert results[2].charged_units == 3 - - assert str(results[3].month) == "2018-05-01" - assert results[3].notification_type == 'sms' - assert results[3].notifications_sent == 2 - assert results[3].chargeable_units == 5 - assert results[3].rate == Decimal('0.162') + assert results[1].chargeable_units == 5 + assert results[1].rate == Decimal('0.162') # 5 free units on the 16th - assert results[3].cost == Decimal('0') - assert results[3].free_allowance_used == 5 - assert results[3].charged_units == 0 + assert results[1].cost == Decimal('0') + assert results[1].free_allowance_used == 5 + assert results[1].charged_units == 0 @freeze_time('2018-08-01 13:30:00') @@ -532,7 +377,7 @@ def test_fetch_billing_totals_for_year(notify_db_session): create_annual_billing(service_id=service.id, free_sms_fragment_limit=1000, financial_year_start=2016) results = fetch_billing_totals_for_year(service_id=service.id, year=2016) - assert len(results) == 3 + assert len(results) == 2 assert results[0].notification_type == 'email' assert results[0].notifications_sent == 4 assert results[0].chargeable_units == 0 @@ -541,21 +386,13 @@ def test_fetch_billing_totals_for_year(notify_db_session): assert results[0].free_allowance_used == 0 assert results[0].charged_units == 0 - assert results[1].notification_type == 'letter' + assert results[1].notification_type == 'sms' assert results[1].notifications_sent == 4 assert results[1].chargeable_units == 4 - assert results[1].rate == Decimal('0.3') - assert results[1].cost == Decimal('1.2') - assert results[1].free_allowance_used == 0 - assert results[1].charged_units == 4 - - assert results[2].notification_type == 'sms' - assert results[2].notifications_sent == 4 - assert results[2].chargeable_units == 4 - assert results[2].rate == Decimal('0.162') - assert results[2].cost == Decimal('0') - assert results[2].free_allowance_used == 4 - assert results[2].charged_units == 0 + assert results[1].rate == Decimal('0.162') + assert results[1].cost == Decimal('0') + assert results[1].free_allowance_used == 4 + assert results[1].charged_units == 0 def test_fetch_billing_totals_for_year_uses_current_annual_billing(notify_db_session): @@ -578,40 +415,25 @@ def test_fetch_billing_totals_for_year_variable_rates(notify_db_session): create_annual_billing(service_id=service.id, free_sms_fragment_limit=6, financial_year_start=2018) results = fetch_billing_totals_for_year(service_id=service.id, year=2018) - assert len(results) == 4 - assert results[0].notification_type == 'letter' + assert len(results) == 2 + + assert results[0].notification_type == 'sms' assert results[0].notifications_sent == 1 - assert results[0].chargeable_units == 1 - assert results[0].rate == Decimal('0.33') - assert results[0].cost == Decimal('0.33') - assert results[0].free_allowance_used == 0 - assert results[0].charged_units == 1 - - assert results[1].notification_type == 'letter' - assert results[1].notifications_sent == 2 - assert results[1].chargeable_units == 2 - assert results[1].rate == Decimal('0.36') - assert results[1].cost == Decimal('0.72') - assert results[1].free_allowance_used == 0 - assert results[1].charged_units == 2 - - assert results[2].notification_type == 'sms' - assert results[2].notifications_sent == 1 - assert results[2].chargeable_units == 4 - assert results[2].rate == Decimal('0.015') + assert results[0].chargeable_units == 4 + assert results[0].rate == Decimal('0.015') # 1 free unit on the 17th - assert results[2].cost == Decimal('0.045') - assert results[2].free_allowance_used == 1 - assert results[2].charged_units == 3 + assert results[0].cost == Decimal('0.045') + assert results[0].free_allowance_used == 1 + assert results[0].charged_units == 3 - assert results[3].notification_type == 'sms' - assert results[3].notifications_sent == 2 - assert results[3].chargeable_units == 5 - assert results[3].rate == Decimal('0.162') + assert results[1].notification_type == 'sms' + assert results[1].notifications_sent == 2 + assert results[1].chargeable_units == 5 + assert results[1].rate == Decimal('0.162') # 5 free units on the 16th - assert results[3].cost == Decimal('0') - assert results[3].free_allowance_used == 5 - assert results[3].charged_units == 0 + assert results[1].cost == Decimal('0') + assert results[1].free_allowance_used == 5 + assert results[1].charged_units == 0 def test_delete_billing_data(notify_db_session): @@ -776,72 +598,6 @@ def test_fetch_sms_billing_for_all_services_without_an_organisation_appears(noti assert [dict(result) for result in results] == expected_results -def test_fetch_letter_costs_and_totals_for_all_services(notify_db_session): - fixtures = set_up_usage_data(datetime(2019, 6, 1)) - - results = fetch_letter_costs_and_totals_for_all_services(datetime(2019, 6, 1), datetime(2019, 9, 30)) - - assert len(results) == 3 - assert results[0] == ( - fixtures["org_1"].name, fixtures["org_1"].id, - fixtures["service_1_sms_and_letter"].name, fixtures["service_1_sms_and_letter"].id, - 8, Decimal('3.40') - ) - assert results[1] == ( - fixtures["org_for_service_with_letters"].name, fixtures["org_for_service_with_letters"].id, - fixtures["service_with_letters"].name, fixtures["service_with_letters"].id, - 22, Decimal('14.00') - ) - assert results[2] == ( - None, None, - fixtures["service_with_letters_without_org"].name, fixtures["service_with_letters_without_org"].id, - 18, Decimal('24.45') - ) - - -def test_fetch_letter_line_items_for_all_service(notify_db_session): - fixtures = set_up_usage_data(datetime(2019, 6, 1)) - - results = fetch_letter_line_items_for_all_services(datetime(2019, 6, 1), datetime(2019, 9, 30)) - - assert len(results) == 7 - assert results[0] == ( - fixtures["org_1"].name, fixtures["org_1"].id, - fixtures["service_1_sms_and_letter"].name, fixtures["service_1_sms_and_letter"].id, - Decimal('0.45'), 'second', 6 - ) - assert results[1] == ( - fixtures["org_1"].name, fixtures["org_1"].id, - fixtures["service_1_sms_and_letter"].name, fixtures["service_1_sms_and_letter"].id, - Decimal("0.35"), 'first', 2 - ) - assert results[2] == ( - fixtures["org_for_service_with_letters"].name, fixtures["org_for_service_with_letters"].id, - fixtures["service_with_letters"].name, fixtures["service_with_letters"].id, - Decimal("0.65"), 'second', 20 - ) - assert results[3] == ( - fixtures["org_for_service_with_letters"].name, fixtures["org_for_service_with_letters"].id, - fixtures["service_with_letters"].name, fixtures["service_with_letters"].id, - Decimal("0.50"), 'first', 2 - ) - assert results[4] == ( - None, None, - fixtures["service_with_letters_without_org"].name, fixtures["service_with_letters_without_org"].id, - Decimal("0.35"), 'second', 2 - ) - assert results[5] == ( - None, None, - fixtures["service_with_letters_without_org"].name, fixtures["service_with_letters_without_org"].id, - Decimal("0.50"), 'first', 1 - ) - assert results[6] == ( - None, None, - fixtures["service_with_letters_without_org"].name, fixtures["service_with_letters_without_org"].id, - Decimal("1.55"), 'international', 15 - ) - - @freeze_time('2019-06-01 13:30') def test_fetch_usage_year_for_organisation(notify_db_session): fixtures = set_up_usage_data(datetime(2019, 5, 1)) @@ -865,7 +621,6 @@ def test_fetch_usage_year_for_organisation(notify_db_session): assert first_row['sms_remainder'] == 5 # because there are 5 billable units assert first_row['chargeable_billable_sms'] == 0 assert first_row['sms_cost'] == 0.0 - assert first_row['letter_cost'] == 3.4 assert first_row['emails_sent'] == 0 second_row = results[str(service_with_emails_for_org.id)] @@ -875,7 +630,6 @@ def test_fetch_usage_year_for_organisation(notify_db_session): assert second_row['sms_remainder'] == 0 assert second_row['chargeable_billable_sms'] == 0 assert second_row['sms_cost'] == 0 - assert second_row['letter_cost'] == 0 assert second_row['emails_sent'] == 1100 third_row = results[str(fixtures["service_with_out_ft_billing_this_year"].id)] @@ -885,12 +639,10 @@ def test_fetch_usage_year_for_organisation(notify_db_session): assert third_row['sms_remainder'] == 10 assert third_row['chargeable_billable_sms'] == 0 assert third_row['sms_cost'] == 0 - assert third_row['letter_cost'] == 0 assert third_row['emails_sent'] == 0 def test_fetch_usage_year_for_organisation_populates_ft_billing_for_today(notify_db_session): - create_letter_rate(start_date=datetime.utcnow() - timedelta(days=1)) create_rate(start_date=datetime.utcnow() - timedelta(days=1), value=0.65, notification_type='sms') new_org = create_organisation(name='New organisation') service = create_service() @@ -1005,7 +757,6 @@ def test_fetch_usage_year_for_organisation_only_returns_data_for_live_services(n trial_service = create_service(restricted=True, service_name='trial_service') email_template = create_template(service=trial_service, template_type='email') trial_sms_template = create_template(service=trial_service, template_type='sms') - trial_letter_template = create_template(service=trial_service, template_type='letter') dao_add_service_to_organisation(service=live_service, organisation_id=org.id) dao_add_service_to_organisation(service=trial_service, organisation_id=org.id) create_ft_billing(local_date=datetime.utcnow().date(), template=sms_template, rate=0.0158, @@ -1014,8 +765,6 @@ def test_fetch_usage_year_for_organisation_only_returns_data_for_live_services(n notifications_sent=100) create_ft_billing(local_date=datetime.utcnow().date(), template=trial_sms_template, billable_unit=200, rate=0.0158, notifications_sent=100) - create_ft_billing(local_date=datetime.utcnow().date(), template=trial_letter_template, billable_unit=40, rate=0.30, - notifications_sent=20) create_annual_billing(service_id=live_service.id, free_sms_fragment_limit=0, financial_year_start=2019) create_annual_billing(service_id=trial_service.id, free_sms_fragment_limit=0, financial_year_start=2019) @@ -1129,24 +878,19 @@ def test_query_organisation_sms_usage_for_year_handles_multiple_rates(notify_db_ def test_fetch_daily_volumes_for_platform( - notify_db_session, sample_template, sample_email_template, sample_letter_template + notify_db_session, sample_template, sample_email_template ): create_ft_billing(local_date='2022-02-03', template=sample_template, notifications_sent=10, billable_unit=10) create_ft_billing(local_date='2022-02-03', template=sample_template, notifications_sent=10, billable_unit=30, international=True) create_ft_billing(local_date='2022-02-03', template=sample_email_template, notifications_sent=10) - create_ft_billing(local_date='2022-02-03', template=sample_letter_template, notifications_sent=5, - billable_unit=5, rate=0.39) - create_ft_billing(local_date='2022-02-03', template=sample_letter_template, notifications_sent=5, - billable_unit=10, rate=0.44) create_ft_billing(local_date='2022-02-04', template=sample_template, notifications_sent=20, billable_unit=40) create_ft_billing(local_date='2022-02-04', template=sample_template, notifications_sent=10, billable_unit=20, rate_multiplier=3) create_ft_billing(local_date='2022-02-04', template=sample_email_template, notifications_sent=50) - create_ft_billing(local_date='2022-02-04', template=sample_letter_template, notifications_sent=20, billable_unit=40) results = fetch_daily_volumes_for_platform(start_date='2022-02-03', end_date='2022-02-04') @@ -1156,16 +900,12 @@ def test_fetch_daily_volumes_for_platform( assert results[0].sms_fragment_totals == 40 assert results[0].sms_chargeable_units == 40 assert results[0].email_totals == 10 - assert results[0].letter_totals == 10 - assert results[0].letter_sheet_totals == 15 assert results[1].local_date == '2022-02-04' assert results[1].sms_totals == 30 assert results[1].sms_fragment_totals == 60 assert results[1].sms_chargeable_units == 100 assert results[1].email_totals == 50 - assert results[1].letter_totals == 20 - assert results[1].letter_sheet_totals == 40 def test_fetch_daily_sms_provider_volumes_for_platform_groups_values_by_provider( @@ -1234,11 +974,9 @@ def test_fetch_daily_sms_provider_volumes_for_platform_for_platform_searches_dat def test_fetch_daily_sms_provider_volumes_for_platform_for_platform_only_returns_sms( sample_template, sample_email_template, - sample_letter_template ): create_ft_billing('2022-02-01', sample_template, notifications_sent=1) create_ft_billing('2022-02-01', sample_email_template, notifications_sent=2) - create_ft_billing('2022-02-01', sample_letter_template, notifications_sent=4) results = fetch_daily_sms_provider_volumes_for_platform(start_date='2022-02-01', end_date='2022-02-01') @@ -1252,16 +990,13 @@ def test_fetch_volumes_by_service(notify_db_session): results = fetch_volumes_by_service(start_date=datetime(2022, 2, 1), end_date=datetime(2022, 2, 28)) # since we are using a pre-set up fixture, we only care about some of the results - assert len(results) == 7 + assert len(results) == 5 assert results[0].service_name == 'a - with sms and letter' assert results[0].organisation_name == 'Org for a - with sms and letter' assert results[0].free_allowance == 10 assert results[0].sms_notifications == 2 assert results[0].sms_chargeable_units == 3 assert results[0].email_totals == 0 - assert results[0].letter_totals == 4 - assert results[0].letter_sheet_totals == 6 - assert float(results[0].letter_cost) == 1.6 assert results[1].service_name == 'f - without ft_billing' assert results[1].organisation_name == 'Org for a - with sms and letter' @@ -1269,26 +1004,17 @@ def test_fetch_volumes_by_service(notify_db_session): assert results[1].sms_notifications == 0 assert results[1].sms_chargeable_units == 0 assert results[1].email_totals == 0 - assert results[1].letter_totals == 0 - assert results[1].letter_sheet_totals == 0 - assert float(results[1].letter_cost) == 0 - assert results[4].service_name == 'b - chargeable sms' + assert results[3].service_name == 'b - chargeable sms' + assert not results[3].organisation_name + assert results[3].free_allowance == 10 + assert results[3].sms_notifications == 2 + assert results[3].sms_chargeable_units == 3 + assert results[3].email_totals == 0 + + assert results[4].service_name == 'e - sms within allowance' assert not results[4].organisation_name assert results[4].free_allowance == 10 - assert results[4].sms_notifications == 2 - assert results[4].sms_chargeable_units == 3 + assert results[4].sms_notifications == 1 + assert results[4].sms_chargeable_units == 2 assert results[4].email_totals == 0 - assert results[4].letter_totals == 0 - assert results[4].letter_sheet_totals == 0 - assert float(results[4].letter_cost) == 0 - - assert results[6].service_name == 'e - sms within allowance' - assert not results[6].organisation_name - assert results[6].free_allowance == 10 - assert results[6].sms_notifications == 1 - assert results[6].sms_chargeable_units == 2 - assert results[6].email_totals == 0 - assert results[6].letter_totals == 0 - assert results[6].letter_sheet_totals == 0 - assert float(results[6].letter_cost) == 0 diff --git a/tests/app/dao/test_fact_notification_status_dao.py b/tests/app/dao/test_fact_notification_status_dao.py index 3b5205648..162bb2723 100644 --- a/tests/app/dao/test_fact_notification_status_dao.py +++ b/tests/app/dao/test_fact_notification_status_dao.py @@ -21,7 +21,6 @@ from app.models import ( EMAIL_TYPE, KEY_TYPE_TEAM, KEY_TYPE_TEST, - LETTER_TYPE, NOTIFICATION_CREATED, NOTIFICATION_DELIVERED, NOTIFICATION_FAILED, @@ -144,7 +143,6 @@ def test_fetch_notification_status_for_service_for_today_and_7_previous_days(not create_ft_notification_status(date(2018, 10, 25), 'sms', service_1, count=8) create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, notification_status='created') create_ft_notification_status(date(2018, 10, 29), 'email', service_1, count=3) - create_ft_notification_status(date(2018, 10, 26), 'letter', service_1, count=5) create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) create_notification(sms_template_2, created_at=datetime(2018, 10, 31, 11, 0, 0)) @@ -159,23 +157,19 @@ def test_fetch_notification_status_for_service_for_today_and_7_previous_days(not key=lambda x: (x.notification_type, x.status) ) - assert len(results) == 4 + assert len(results) == 3 assert results[0].notification_type == 'email' assert results[0].status == 'delivered' assert results[0].count == 4 - assert results[1].notification_type == 'letter' - assert results[1].status == 'delivered' - assert results[1].count == 5 + assert results[1].notification_type == 'sms' + assert results[1].status == 'created' + assert results[1].count == 3 assert results[2].notification_type == 'sms' - assert results[2].status == 'created' - assert results[2].count == 3 - - assert results[3].notification_type == 'sms' - assert results[3].status == 'delivered' - assert results[3].count == 19 + assert results[2].status == 'delivered' + assert results[2].count == 19 @freeze_time('2018-10-31T18:00:00') @@ -193,7 +187,6 @@ def test_fetch_notification_status_by_template_for_service_for_today_and_7_previ create_ft_notification_status(date(2018, 10, 25), 'sms', service_1, count=8) create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, notification_status='created') create_ft_notification_status(date(2018, 10, 29), 'email', service_1, count=3) - create_ft_notification_status(date(2018, 10, 26), 'letter', service_1, count=5) create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) create_notification(sms_template, created_at=datetime(2018, 10, 31, 12, 0, 0), status='delivered') @@ -208,7 +201,6 @@ def test_fetch_notification_status_by_template_for_service_for_today_and_7_previ assert [ ('email Template Name', False, mock.ANY, 'email', 'delivered', 1), ('email Template Name', False, mock.ANY, 'email', 'delivered', 3), - ('letter Template Name', False, mock.ANY, 'letter', 'delivered', 5), ('sms Template 1', False, mock.ANY, 'sms', 'created', 1), ('sms Template Name', False, mock.ANY, 'sms', 'created', 1), ('sms Template 1', False, mock.ANY, 'sms', 'delivered', 1), @@ -220,11 +212,11 @@ def test_fetch_notification_status_by_template_for_service_for_today_and_7_previ @pytest.mark.parametrize( - "start_date, end_date, expected_email, expected_letters, expected_sms, expected_created_sms", + "start_date, end_date, expected_email, expected_sms, expected_created_sms", [ - (29, 30, 3, 10, 10, 1), # not including today - (29, 31, 4, 10, 11, 2), # today included - (26, 31, 4, 15, 11, 2), + (29, 30, 3, 10, 1), # not including today + (29, 31, 4, 11, 2), # today included + (26, 31, 4, 11, 2), ] ) @@ -234,7 +226,6 @@ def test_fetch_notification_status_totals_for_all_services( start_date, end_date, expected_email, - expected_letters, expected_sms, expected_created_sms ): @@ -246,23 +237,19 @@ def test_fetch_notification_status_totals_for_all_services( key=lambda x: (x.notification_type, x.status) ) - assert len(results) == 4 + assert len(results) == 3 assert results[0].notification_type == 'email' assert results[0].status == 'delivered' assert results[0].count == expected_email - assert results[1].notification_type == 'letter' - assert results[1].status == 'delivered' - assert results[1].count == expected_letters + assert results[1].notification_type == 'sms' + assert results[1].status == 'created' + assert results[1].count == expected_created_sms assert results[2].notification_type == 'sms' - assert results[2].status == 'created' - assert results[2].count == expected_created_sms - - assert results[3].notification_type == 'sms' - assert results[3].status == 'delivered' - assert results[3].count == expected_sms + assert results[2].status == 'delivered' + assert results[2].count == expected_sms @freeze_time('2018-04-21 14:00') @@ -302,16 +289,13 @@ def test_fetch_notification_status_totals_for_all_services_works_in_est( def set_up_data(): service_2 = create_service(service_name='service_2') - create_template(service=service_2, template_type=LETTER_TYPE) service_1 = create_service(service_name='service_1') sms_template = create_template(service=service_1, template_type=SMS_TYPE) email_template = create_template(service=service_1, template_type=EMAIL_TYPE) create_ft_notification_status(date(2018, 10, 24), 'sms', service_1, count=8) - create_ft_notification_status(date(2018, 10, 26), 'letter', service_1, count=5) create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, count=10) create_ft_notification_status(date(2018, 10, 29), 'sms', service_1, notification_status='created') create_ft_notification_status(date(2018, 10, 29), 'email', service_1, count=3) - create_ft_notification_status(date(2018, 10, 29), 'letter', service_2, count=10) create_notification(service_1.templates[0], created_at=datetime(2018, 10, 30, 12, 0, 0), status='delivered') create_notification(sms_template, created_at=datetime(2018, 10, 31, 11, 0, 0)) @@ -340,7 +324,7 @@ def test_fetch_stats_for_all_services_by_date_range(notify_db_session): service_1, service_2 = set_up_data() results = fetch_stats_for_all_services_by_date_range(start_date=date(2018, 10, 29), end_date=date(2018, 10, 31)) - assert len(results) == 5 + assert len(results) == 4 assert results[0].service_id == service_1.id assert results[0].notification_type == 'email' @@ -358,21 +342,15 @@ def test_fetch_stats_for_all_services_by_date_range(notify_db_session): assert results[2].count == 11 assert results[3].service_id == service_2.id - assert results[3].notification_type == 'letter' - assert results[3].status == 'delivered' - assert results[3].count == 10 - - assert results[4].service_id == service_2.id - assert not results[4].notification_type - assert not results[4].status - assert not results[4].count + assert not results[3].notification_type + assert not results[3].status + assert not results[3].count @freeze_time('2018-03-30 14:00') def test_fetch_monthly_template_usage_for_service(sample_service): template_one = create_template(service=sample_service, template_type='sms', template_name='a') template_two = create_template(service=sample_service, template_type='email', template_name='b') - template_three = create_template(service=sample_service, template_type='letter', template_name='c') create_ft_notification_status(local_date=date(2017, 12, 10), service=sample_service, @@ -390,10 +368,10 @@ def test_fetch_monthly_template_usage_for_service(sample_service): create_ft_notification_status(local_date=date(2018, 3, 1), service=sample_service, - template=template_three, + template=template_two, count=5) - create_notification(template=template_three, created_at=datetime.utcnow() - timedelta(days=1)) - create_notification(template=template_three, created_at=datetime.utcnow()) + create_notification(template=template_two, created_at=datetime.utcnow() - timedelta(days=1)) + create_notification(template=template_two, created_at=datetime.utcnow()) results = fetch_monthly_template_usage_for_service( datetime(2017, 4, 1), datetime(2018, 3, 31), sample_service.id ) @@ -402,14 +380,12 @@ def test_fetch_monthly_template_usage_for_service(sample_service): assert results[0].template_id == template_one.id assert results[0].name == template_one.name - assert results[0].is_precompiled_letter is False assert results[0].template_type == template_one.template_type assert results[0].month == 12 assert results[0].year == 2017 assert results[0].count == 6 assert results[1].template_id == template_two.id assert results[1].name == template_two.name - assert results[1].is_precompiled_letter is False assert results[1].template_type == template_two.template_type assert results[1].month == 12 assert results[1].year == 2017 @@ -417,16 +393,14 @@ def test_fetch_monthly_template_usage_for_service(sample_service): assert results[2].template_id == template_one.id assert results[2].name == template_one.name - assert results[2].is_precompiled_letter is False assert results[2].template_type == template_one.template_type assert results[2].month == 1 assert results[2].year == 2018 assert results[2].count == 4 - assert results[3].template_id == template_three.id - assert results[3].name == template_three.name - assert results[3].is_precompiled_letter is False - assert results[3].template_type == template_three.template_type + assert results[3].template_id == template_two.id + assert results[3].name == template_two.name + assert results[3].template_type == template_two.template_type assert results[3].month == 3 assert results[3].year == 2018 assert results[3].count == 6 @@ -459,14 +433,12 @@ def test_fetch_monthly_template_usage_for_service_does_join_to_notifications_if_ assert results[0].template_id == template_one.id assert results[0].name == template_one.name - assert results[0].is_precompiled_letter == template_one.is_precompiled_letter assert results[0].template_type == template_one.template_type assert results[0].month == 2 assert results[0].year == 2018 assert results[0].count == 20 assert results[1].template_id == template_two.id assert results[1].name == template_two.name - assert results[1].is_precompiled_letter == template_two.is_precompiled_letter assert results[1].template_type == template_two.template_type assert results[1].month == 2 assert results[1].year == 2018 @@ -516,7 +488,7 @@ def test_fetch_monthly_notification_statuses_per_service(notify_db_session): service_one = create_service(service_name='service one', service_id=UUID('e4e34c4e-73c1-4802-811c-3dd273f21da4')) service_two = create_service(service_name='service two', service_id=UUID('b19d7aad-6f09-4198-8b62-f6cf126b87e5')) - create_ft_notification_status(date(2019, 4, 30), notification_type='letter', service=service_one, + create_ft_notification_status(date(2019, 4, 30), notification_type='sms', service=service_one, notification_status=NOTIFICATION_DELIVERED) create_ft_notification_status(date(2019, 3, 1), notification_type='email', service=service_one, notification_status=NOTIFICATION_SENDING, count=4) @@ -526,28 +498,27 @@ def test_fetch_monthly_notification_statuses_per_service(notify_db_session): notification_status=NOTIFICATION_TECHNICAL_FAILURE, count=2) create_ft_notification_status(date(2019, 3, 7), notification_type='email', service=service_one, notification_status=NOTIFICATION_FAILED, count=1) - create_ft_notification_status(date(2019, 3, 10), notification_type='letter', service=service_two, + create_ft_notification_status(date(2019, 3, 10), notification_type='sms', service=service_two, notification_status=NOTIFICATION_PERMANENT_FAILURE, count=1) - create_ft_notification_status(date(2019, 3, 10), notification_type='letter', service=service_two, + create_ft_notification_status(date(2019, 3, 10), notification_type='sms', service=service_two, notification_status=NOTIFICATION_PERMANENT_FAILURE, count=1) create_ft_notification_status(date(2019, 3, 13), notification_type='sms', service=service_one, notification_status=NOTIFICATION_SENT, count=1) - create_ft_notification_status(date(2019, 4, 1), notification_type='letter', service=service_two, + create_ft_notification_status(date(2019, 4, 1), notification_type='sms', service=service_two, notification_status=NOTIFICATION_TEMPORARY_FAILURE, count=10) - create_ft_notification_status(date(2019, 3, 31), notification_type='letter', service=service_one, + create_ft_notification_status(date(2019, 3, 31), notification_type='sms', service=service_one, notification_status=NOTIFICATION_DELIVERED) results = fetch_monthly_notification_statuses_per_service(date(2019, 3, 1), date(2019, 4, 30)) - assert len(results) == 6 + assert len(results) == 5 # column order: date, service_id, service_name, notifaction_type, count_sending, count_delivered, # count_technical_failure, count_temporary_failure, count_permanent_failure, count_sent - assert [x for x in results[0]] == [date(2019, 3, 1), service_two.id, 'service two', 'letter', 0, 0, 0, 0, 2, 0] + assert [x for x in results[0]] == [date(2019, 3, 1), service_two.id, 'service two', 'sms', 0, 0, 0, 0, 2, 0] assert [x for x in results[1]] == [date(2019, 3, 1), service_one.id, 'service one', 'email', 5, 0, 3, 0, 0, 0] - assert [x for x in results[2]] == [date(2019, 3, 1), service_one.id, 'service one', 'letter', 0, 1, 0, 0, 0, 0] - assert [x for x in results[3]] == [date(2019, 3, 1), service_one.id, 'service one', 'sms', 0, 0, 0, 0, 0, 1] - assert [x for x in results[4]] == [date(2019, 4, 1), service_two.id, 'service two', 'letter', 0, 0, 0, 10, 0, 0] - assert [x for x in results[5]] == [date(2019, 4, 1), service_one.id, 'service one', 'letter', 0, 1, 0, 0, 0, 0] + assert [x for x in results[2]] == [date(2019, 3, 1), service_one.id, 'service one', 'sms', 0, 1, 0, 0, 0, 1] + assert [x for x in results[3]] == [date(2019, 4, 1), service_two.id, 'service two', 'sms', 0, 0, 0, 10, 0, 0] + assert [x for x in results[4]] == [date(2019, 4, 1), service_one.id, 'service one', 'sms', 0, 1, 0, 0, 0, 0] @freeze_time('2019-04-10 14:00') @@ -578,7 +549,6 @@ def test_fetch_monthly_notification_statuses_per_service_for_rows_that_should_be def test_get_total_notifications_for_date_range(sample_service): template_sms = create_template(service=sample_service, template_type='sms', template_name='a') template_email = create_template(service=sample_service, template_type='email', template_name='b') - template_letter = create_template(service=sample_service, template_type='letter', template_name='c') create_ft_notification_status(local_date=date(2021, 2, 28), service=template_email.service, template=template_email, @@ -587,10 +557,6 @@ def test_get_total_notifications_for_date_range(sample_service): service=template_sms.service, template=template_sms, count=20) - create_ft_notification_status(local_date=date(2021, 2, 28), - service=template_letter.service, - template=template_letter, - count=3) create_ft_notification_status(local_date=date(2021, 3, 1), service=template_email.service, @@ -600,15 +566,11 @@ def test_get_total_notifications_for_date_range(sample_service): service=template_sms.service, template=template_sms, count=20) - create_ft_notification_status(local_date=date(2021, 3, 1), - service=template_letter.service, - template=template_letter, - count=3) results = get_total_notifications_for_date_range(start_date=datetime(2021, 3, 1), end_date=datetime(2021, 3, 1)) assert len(results) == 1 - assert results[0] == ("2021-03-01", 15, 20, 3) + assert results[0] == ("2021-03-01", 15, 20) @pytest.mark.skip(reason="Need a better way to test variable DST date") diff --git a/tests/app/dao/test_jobs_dao.py b/tests/app/dao/test_jobs_dao.py index c64345aac..d0e65e346 100644 --- a/tests/app/dao/test_jobs_dao.py +++ b/tests/app/dao/test_jobs_dao.py @@ -7,8 +7,6 @@ from freezegun import freeze_time from sqlalchemy.exc import IntegrityError from app.dao.jobs_dao import ( - can_letter_job_be_cancelled, - dao_cancel_letter_job, dao_create_job, dao_get_future_scheduled_job_by_id_and_service_id, dao_get_job_by_service_id_and_job_id, @@ -20,13 +18,7 @@ from app.dao.jobs_dao import ( find_jobs_with_missing_rows, find_missing_row_for_job, ) -from app.models import ( - EMAIL_TYPE, - JOB_STATUS_FINISHED, - LETTER_TYPE, - SMS_TYPE, - Job, -) +from app.models import JOB_STATUS_FINISHED, SMS_TYPE, Job from tests.app.db import ( create_job, create_notification, @@ -307,37 +299,18 @@ def test_get_jobs_for_service_doesnt_return_test_messages( assert jobs == [sample_job] -@freeze_time('2016-10-31 10:00:00') -def test_should_get_jobs_seven_days_old_filters_type(sample_service): - eight_days_ago = datetime.utcnow() - timedelta(days=8) - letter_template = create_template(sample_service, template_type=LETTER_TYPE) - sms_template = create_template(sample_service, template_type=SMS_TYPE) - email_template = create_template(sample_service, template_type=EMAIL_TYPE) - - job_to_remain = create_job(letter_template, created_at=eight_days_ago) - create_job(sms_template, created_at=eight_days_ago) - create_job(email_template, created_at=eight_days_ago) - - jobs = dao_get_jobs_older_than_data_retention( - notification_types=[EMAIL_TYPE, SMS_TYPE] - ) - - assert len(jobs) == 2 - assert job_to_remain.id not in [job.id for job in jobs] - - @freeze_time('2016-10-31 10:00:00') def test_should_get_jobs_seven_days_old_by_scheduled_for_date(sample_service): six_days_ago = datetime.utcnow() - timedelta(days=6) eight_days_ago = datetime.utcnow() - timedelta(days=8) - letter_template = create_template(sample_service, template_type=LETTER_TYPE) + sms_template = create_template(sample_service, template_type=SMS_TYPE) - create_job(letter_template, created_at=eight_days_ago) - create_job(letter_template, created_at=eight_days_ago, scheduled_for=eight_days_ago) - job_to_remain = create_job(letter_template, created_at=eight_days_ago, scheduled_for=six_days_ago) + create_job(sms_template, created_at=eight_days_ago) + create_job(sms_template, created_at=eight_days_ago, scheduled_for=eight_days_ago) + job_to_remain = create_job(sms_template, created_at=eight_days_ago, scheduled_for=six_days_ago) jobs = dao_get_jobs_older_than_data_retention( - notification_types=[LETTER_TYPE] + notification_types=[SMS_TYPE] ) assert len(jobs) == 2 @@ -359,98 +332,6 @@ def assert_job_stat(job, result, sent, delivered, failed): assert result.failed == failed -@freeze_time('2019-06-13 13:00') -def test_dao_cancel_letter_job_cancels_job_and_returns_number_of_cancelled_notifications( - sample_letter_template -): - job = create_job(template=sample_letter_template, notification_count=1, job_status='finished') - notification = create_notification(template=job.template, job=job, status='created') - result = dao_cancel_letter_job(job) - assert result == 1 - assert notification.status == 'cancelled' - assert job.job_status == 'cancelled' - - -@freeze_time('2019-06-13 13:00') -def test_can_letter_job_be_cancelled_returns_true_if_job_can_be_cancelled(sample_letter_template): - job = create_job(template=sample_letter_template, notification_count=1, job_status='finished') - create_notification(template=job.template, job=job, status='created') - result, errors = can_letter_job_be_cancelled(job) - assert result - assert not errors - - -@freeze_time('2019-06-13 13:00') -def test_can_letter_job_be_cancelled_returns_false_and_error_message_if_notification_status_sending( - sample_letter_template -): - job = create_job(template=sample_letter_template, notification_count=2, job_status='finished') - create_notification(template=job.template, job=job, status='sending') - create_notification(template=job.template, job=job, status='created') - result, errors = can_letter_job_be_cancelled(job) - assert not result - assert errors == "It’s too late to cancel sending, these letters have already been sent." - - -def test_can_letter_job_be_cancelled_returns_false_and_error_message_if_letters_already_sent_to_dvla( - sample_letter_template -): - with freeze_time('2019-06-13 13:00'): - job = create_job(template=sample_letter_template, notification_count=1, job_status='finished') - letter = create_notification(template=job.template, job=job, status='created') - - with freeze_time('2019-06-13 22:32'): - result, errors = can_letter_job_be_cancelled(job) - assert not result - assert errors == "It’s too late to cancel sending, these letters have already been sent." - assert letter.status == 'created' - assert job.job_status == 'finished' - - -@freeze_time('2019-06-13 13:00') -def test_can_letter_job_be_cancelled_returns_false_and_error_message_if_not_a_letter_job( - sample_template -): - job = create_job(template=sample_template, notification_count=1, job_status='finished') - create_notification(template=job.template, job=job, status='created') - result, errors = can_letter_job_be_cancelled(job) - assert not result - assert errors == "Only letter jobs can be cancelled through this endpoint. This is not a letter job." - - -@freeze_time('2019-06-13 13:00') -def test_can_letter_job_be_cancelled_returns_false_and_error_message_if_job_not_finished( - sample_letter_template -): - job = create_job(template=sample_letter_template, notification_count=1, job_status="in progress") - create_notification(template=job.template, job=job, status='created') - result, errors = can_letter_job_be_cancelled(job) - assert not result - assert errors == "We are still processing these letters, please try again in a minute." - - -@freeze_time('2019-06-13 13:00') -def test_can_letter_job_be_cancelled_returns_false_and_error_message_if_notifications_not_in_db_yet( - sample_letter_template -): - job = create_job(template=sample_letter_template, notification_count=2, job_status='finished') - create_notification(template=job.template, job=job, status='created') - result, errors = can_letter_job_be_cancelled(job) - assert not result - assert errors == "We are still processing these letters, please try again in a minute." - - -def test_can_letter_job_be_cancelled_respects_bst(sample_letter_template): - job = create_job(template=sample_letter_template, created_at=datetime(2020, 4, 9, 23, 30), job_status='finished') - create_notification(template=job.template, job=job, status='created', created_at=datetime(2020, 4, 9, 23, 32)) - - with freeze_time('2020-04-10 10:00'): - result, errors = can_letter_job_be_cancelled(job) - - assert not errors - assert result - - def test_find_jobs_with_missing_rows(sample_email_template): healthy_job = create_job(template=sample_email_template, notification_count=3, diff --git a/tests/app/dao/test_letter_branding_dao.py b/tests/app/dao/test_letter_branding_dao.py deleted file mode 100644 index 6122f2dcc..000000000 --- a/tests/app/dao/test_letter_branding_dao.py +++ /dev/null @@ -1,65 +0,0 @@ -import uuid - -import pytest -from sqlalchemy.exc import SQLAlchemyError - -from app.dao.letter_branding_dao import ( - dao_create_letter_branding, - dao_get_all_letter_branding, - dao_get_letter_branding_by_id, - dao_update_letter_branding, -) -from app.models import LetterBranding -from tests.app.db import create_letter_branding - - -def test_dao_get_letter_branding_by_id(notify_db_session): - letter_branding = create_letter_branding() - result = dao_get_letter_branding_by_id(letter_branding.id) - - assert result == letter_branding - - -def test_dao_get_letter_brand_by_id_raises_exception_if_does_not_exist(notify_db_session): - with pytest.raises(expected_exception=SQLAlchemyError): - dao_get_letter_branding_by_id(uuid.uuid4()) - - -def test_dao_get_all_letter_branding(notify_db_session): - hm_gov = create_letter_branding() - test_branding = create_letter_branding( - name='test branding', filename='test-branding', - ) - - results = dao_get_all_letter_branding() - - assert hm_gov in results - assert test_branding in results - assert len(results) == 2 - - -def test_dao_get_all_letter_branding_returns_empty_list_if_no_brands_exist(notify_db_session): - assert dao_get_all_letter_branding() == [] - - -def test_dao_create_letter_branding(notify_db_session): - data = { - 'name': 'test-logo', - 'filename': 'test-logo' - } - assert LetterBranding.query.count() == 0 - dao_create_letter_branding(LetterBranding(**data)) - - assert LetterBranding.query.count() == 1 - - new_letter_branding = LetterBranding.query.first() - assert new_letter_branding.name == data['name'] - assert new_letter_branding.filename == data['name'] - - -def test_dao_update_letter_branding(notify_db_session): - create_letter_branding(name='original') - letter_branding = LetterBranding.query.first() - assert letter_branding.name == 'original' - dao_update_letter_branding(letter_branding.id, name='new name') - assert LetterBranding.query.first().name == 'new name' diff --git a/tests/app/dao/test_organisation_dao.py b/tests/app/dao/test_organisation_dao.py index 3b7dc8ca4..100778636 100644 --- a/tests/app/dao/test_organisation_dao.py +++ b/tests/app/dao/test_organisation_dao.py @@ -20,7 +20,6 @@ from app.models import Organisation, Service from tests.app.db import ( create_domain, create_email_branding, - create_letter_branding, create_organisation, create_service, create_user, @@ -60,7 +59,6 @@ def test_update_organisation(notify_db_session): organisation = Organisation.query.one() user = create_user() email_branding = create_email_branding() - letter_branding = create_letter_branding() data = { 'name': 'new name', @@ -70,7 +68,6 @@ def test_update_organisation(notify_db_session): "agreement_signed_at": datetime.datetime.utcnow(), "agreement_signed_by_id": user.id, "agreement_signed_version": 999.99, - "letter_branding_id": letter_branding.id, "email_branding_id": email_branding.id, } @@ -122,12 +119,10 @@ def test_update_organisation_does_not_update_the_service_if_certain_attributes_n sample_organisation, ): email_branding = create_email_branding() - letter_branding = create_letter_branding() sample_service.organisation_type = 'state' sample_organisation.organisation_type = 'federal' sample_organisation.email_branding = email_branding - sample_organisation.letter_branding = letter_branding sample_organisation.services.append(sample_service) db.session.commit() @@ -144,9 +139,6 @@ def test_update_organisation_does_not_update_the_service_if_certain_attributes_n assert sample_organisation.email_branding == email_branding assert sample_service.email_branding is None - assert sample_organisation.letter_branding == letter_branding - assert sample_service.letter_branding is None - def test_update_organisation_updates_the_service_org_type_if_org_type_is_provided( sample_service, @@ -173,18 +165,14 @@ def test_update_organisation_updates_the_service_branding_if_branding_is_provide sample_organisation, ): email_branding = create_email_branding() - letter_branding = create_letter_branding() sample_organisation.services.append(sample_service) db.session.commit() dao_update_organisation(sample_organisation.id, email_branding_id=email_branding.id) - dao_update_organisation(sample_organisation.id, letter_branding_id=letter_branding.id) assert sample_organisation.email_branding == email_branding - assert sample_organisation.letter_branding == letter_branding assert sample_service.email_branding == email_branding - assert sample_service.letter_branding == letter_branding def test_update_organisation_does_not_override_service_branding( @@ -193,22 +181,16 @@ def test_update_organisation_does_not_override_service_branding( ): email_branding = create_email_branding() custom_email_branding = create_email_branding(name='custom') - letter_branding = create_letter_branding() - custom_letter_branding = create_letter_branding(name='custom', filename='custom') sample_service.email_branding = custom_email_branding - sample_service.letter_branding = custom_letter_branding sample_organisation.services.append(sample_service) db.session.commit() dao_update_organisation(sample_organisation.id, email_branding_id=email_branding.id) - dao_update_organisation(sample_organisation.id, letter_branding_id=letter_branding.id) assert sample_organisation.email_branding == email_branding - assert sample_organisation.letter_branding == letter_branding assert sample_service.email_branding == custom_email_branding - assert sample_service.letter_branding == custom_letter_branding def test_update_organisation_updates_services_with_new_crown_type( diff --git a/tests/app/dao/test_permissions_dao.py b/tests/app/dao/test_permissions_dao.py index eec84c8ff..a799eb512 100644 --- a/tests/app/dao/test_permissions_dao.py +++ b/tests/app/dao/test_permissions_dao.py @@ -4,13 +4,12 @@ from tests.app.db import create_service def test_get_permissions_by_user_id_returns_all_permissions(sample_service): permissions = permission_dao.get_permissions_by_user_id(user_id=sample_service.users[0].id) - assert len(permissions) == 8 + assert len(permissions) == 7 assert sorted(["manage_users", "manage_templates", "manage_settings", "send_texts", "send_emails", - "send_letters", "manage_api_keys", "view_activity"]) == sorted([i.permission for i in permissions]) @@ -20,6 +19,6 @@ def test_get_permissions_by_user_id_returns_only_active_service(sample_user): inactive_service = create_service(user=sample_user, service_name="Inactive service", active=False) permissions = permission_dao.get_permissions_by_user_id(user_id=sample_user.id) - assert len(permissions) == 8 + assert len(permissions) == 7 assert active_service in [i.service for i in permissions] assert inactive_service not in [i.service for i in permissions] diff --git a/tests/app/dao/test_returned_letters_dao.py b/tests/app/dao/test_returned_letters_dao.py deleted file mode 100644 index ca3046831..000000000 --- a/tests/app/dao/test_returned_letters_dao.py +++ /dev/null @@ -1,248 +0,0 @@ -import uuid -from datetime import date, datetime, timedelta - -from freezegun import freeze_time - -from app.dao.returned_letters_dao import ( - fetch_most_recent_returned_letter, - fetch_recent_returned_letter_count, - fetch_returned_letter_summary, - fetch_returned_letters, - insert_or_update_returned_letters, -) -from app.models import NOTIFICATION_RETURNED_LETTER, ReturnedLetter -from tests.app.db import ( - create_notification, - create_notification_history, - create_returned_letter, - create_service, -) - - -def test_insert_or_update_returned_letters_inserts(sample_letter_template): - notification = create_notification(template=sample_letter_template, - reference='ref1') - history = create_notification_history(template=sample_letter_template, - reference='ref2') - - assert ReturnedLetter.query.count() == 0 - - insert_or_update_returned_letters(['ref1', 'ref2']) - - returned_letters = ReturnedLetter.query.all() - - assert len(returned_letters) == 2 - returned_letters_ = [x.notification_id for x in returned_letters] - assert notification.id in returned_letters_ - assert history.id in returned_letters_ - - -def test_insert_or_update_returned_letters_updates(sample_letter_template): - notification = create_notification(template=sample_letter_template, - reference='ref1') - history = create_notification_history(template=sample_letter_template, - reference='ref2') - - assert ReturnedLetter.query.count() == 0 - with freeze_time('2019-12-09 13:30'): - insert_or_update_returned_letters(['ref1', 'ref2']) - returned_letters = ReturnedLetter.query.all() - assert len(returned_letters) == 2 - for x in returned_letters: - assert x.reported_at == date(2019, 12, 9) - assert x.created_at == datetime(2019, 12, 9, 13, 30) - assert not x.updated_at - assert x.notification_id in [notification.id, history.id] - - with freeze_time('2019-12-10 14:20'): - insert_or_update_returned_letters(['ref1', 'ref2']) - returned_letters = ReturnedLetter.query.all() - assert len(returned_letters) == 2 - for x in returned_letters: - assert x.reported_at == date(2019, 12, 10) - assert x.created_at == datetime(2019, 12, 9, 13, 30) - assert x.updated_at == datetime(2019, 12, 10, 14, 20) - assert x.notification_id in [notification.id, history.id] - - -def test_insert_or_update_returned_letters_when_no_notification(notify_db_session): - insert_or_update_returned_letters(['ref1']) - assert ReturnedLetter.query.count() == 0 - - -def test_insert_or_update_returned_letters_for_history_only(sample_letter_template): - history_1 = create_notification_history(template=sample_letter_template, - reference='ref1') - history_2 = create_notification_history(template=sample_letter_template, - reference='ref2') - - assert ReturnedLetter.query.count() == 0 - insert_or_update_returned_letters(['ref1', 'ref2']) - returned_letters = ReturnedLetter.query.all() - assert len(returned_letters) == 2 - for x in returned_letters: - assert x.notification_id in [history_1.id, history_2.id] - - -def test_insert_or_update_returned_letters_with_duplicates_in_reference_list(sample_letter_template): - notification_1 = create_notification(template=sample_letter_template, - reference='ref1') - notification_2 = create_notification(template=sample_letter_template, - reference='ref2') - - assert ReturnedLetter.query.count() == 0 - insert_or_update_returned_letters(['ref1', 'ref2', 'ref1', 'ref2']) - returned_letters = ReturnedLetter.query.all() - assert len(returned_letters) == 2 - for x in returned_letters: - assert x.notification_id in [notification_1.id, notification_2.id] - - -def test_get_returned_letter_count(sample_service): - # Before 7 days – don’t count - create_returned_letter( - sample_service, - reported_at=datetime(2001, 1, 1) - ) - create_returned_letter( - sample_service, - reported_at=datetime(2010, 11, 1, 23, 59, 59), - ) - # In the last 7 days – count - create_returned_letter( - sample_service, - reported_at=datetime(2010, 11, 2, 0, 0, 0), - ) - create_returned_letter( - sample_service, - reported_at=datetime(2010, 11, 8, 10, 0), - ) - create_returned_letter( - sample_service, - reported_at=datetime(2010, 11, 8, 10, 0), - ) - # Different service – don’t count - create_returned_letter( - create_service(service_id=uuid.uuid4(), service_name='Other service'), - reported_at=datetime(2010, 11, 8, 10, 0), - ) - - with freeze_time('2010-11-08 10:10'): - result = fetch_recent_returned_letter_count(sample_service.id) - - assert result.returned_letter_count == 3 - - -def test_fetch_most_recent_returned_letter_for_service(sample_service): - # Older - create_returned_letter( - sample_service, - reported_at=datetime(2009, 9, 9, 9, 9), - ) - # Newer - create_returned_letter( - sample_service, - reported_at=datetime(2010, 10, 10, 10, 10), - ) - # Newest, but different service - create_returned_letter( - create_service(service_id=uuid.uuid4(), service_name='Other service'), - reported_at=datetime(2011, 11, 11, 11, 11), - ) - result = fetch_most_recent_returned_letter(sample_service.id) - - assert str(result.reported_at) == '2010-10-10' - - -def test_get_returned_letter_summary(sample_service): - now = datetime.utcnow() - create_returned_letter(sample_service, reported_at=now) - create_returned_letter(sample_service, reported_at=now) - - results = fetch_returned_letter_summary(sample_service.id) - - assert len(results) == 1 - - assert results[0].returned_letter_count == 2 - assert results[0].reported_at == now.date() - - -def test_get_returned_letter_summary_orders_by_reported_at(sample_service): - now = datetime.utcnow() - last_month = datetime.utcnow() - timedelta(days=30) - create_returned_letter(sample_service, reported_at=now) - create_returned_letter(sample_service, reported_at=now) - create_returned_letter(sample_service, reported_at=now) - create_returned_letter(sample_service, reported_at=last_month) - create_returned_letter(sample_service, reported_at=last_month) - create_returned_letter() # returned letter for a different service - - results = fetch_returned_letter_summary(sample_service.id) - - assert len(results) == 2 - assert results[0].reported_at == now.date() - assert results[0].returned_letter_count == 3 - assert results[1].reported_at == last_month.date() - assert results[1].returned_letter_count == 2 - - -def test_fetch_returned_letters_from_notifications_and_notification_history(sample_letter_template): - today = datetime.now() - last_month = datetime.now() - timedelta(days=30) - - letter_1 = create_notification(template=sample_letter_template, client_reference='letter_1', - status=NOTIFICATION_RETURNED_LETTER, - created_at=datetime.utcnow() - timedelta(days=1)) - returned_letter_1 = create_returned_letter(service=sample_letter_template.service, reported_at=today, - notification_id=letter_1.id) - letter_2 = create_notification_history(template=sample_letter_template, client_reference='letter_2', - status=NOTIFICATION_RETURNED_LETTER, created_at=datetime.utcnow()) - returned_letter_2 = create_returned_letter(service=sample_letter_template.service, reported_at=today, - notification_id=letter_2.id) - letter_3 = create_notification_history(template=sample_letter_template, client_reference='letter_3', - status=NOTIFICATION_RETURNED_LETTER) - create_returned_letter(service=sample_letter_template.service, reported_at=last_month, - notification_id=letter_3.id) - - results = fetch_returned_letters(service_id=sample_letter_template.service_id, report_date=today.date()) - - assert len(results) == 2 - assert results[0] == (letter_2.id, returned_letter_2.reported_at, letter_2.client_reference, letter_2.created_at, - sample_letter_template.name, letter_2.template_id, letter_2.template_version, False, None, - None, None, None, None, None) - assert results[1] == (letter_1.id, returned_letter_1.reported_at, letter_1.client_reference, letter_1.created_at, - sample_letter_template.name, letter_1.template_id, letter_1.template_version, False, - letter_1.api_key_id, None, None, None, None, None) - - -def test_fetch_returned_letters_with_jobs(sample_letter_job): - today = datetime.now() - letter_1 = create_notification_history(template=sample_letter_job.template, client_reference='letter_1', - status=NOTIFICATION_RETURNED_LETTER, - job=sample_letter_job, job_row_number=20, - created_at=datetime.utcnow() - timedelta(minutes=1)) - returned_letter_1 = create_returned_letter(service=sample_letter_job.service, reported_at=today, - notification_id=letter_1.id) - - results = fetch_returned_letters(service_id=sample_letter_job.service_id, report_date=today.date()) - assert len(results) == 1 - assert results[0] == (letter_1.id, returned_letter_1.reported_at, letter_1.client_reference, letter_1.created_at, - sample_letter_job.template.name, letter_1.template_id, letter_1.template_version, False, None, - None, None, None, sample_letter_job.original_file_name, 21) - - -def test_fetch_returned_letters_with_create_by_user(sample_letter_template): - today = datetime.now() - letter_1 = create_notification_history(template=sample_letter_template, client_reference='letter_1', - status=NOTIFICATION_RETURNED_LETTER, - created_at=datetime.utcnow() - timedelta(minutes=1), - created_by_id=sample_letter_template.service.users[0].id) - returned_letter_1 = create_returned_letter(service=sample_letter_template.service, reported_at=today, - notification_id=letter_1.id) - - results = fetch_returned_letters(service_id=sample_letter_template.service_id, report_date=today.date()) - assert len(results) == 1 - assert results[0] == (letter_1.id, returned_letter_1.reported_at, letter_1.client_reference, letter_1.created_at, - sample_letter_template.name, letter_1.template_id, letter_1.template_version, False, None, - letter_1.created_by_id, sample_letter_template.service.users[0].name, - sample_letter_template.service.users[0].email_address, None, None) diff --git a/tests/app/dao/test_service_data_retention_dao.py b/tests/app/dao/test_service_data_retention_dao.py index 6997b5e26..41327c894 100644 --- a/tests/app/dao/test_service_data_retention_dao.py +++ b/tests/app/dao/test_service_data_retention_dao.py @@ -17,27 +17,23 @@ from tests.app.db import create_service, create_service_data_retention def test_fetch_service_data_retention(sample_service): email_data_retention = insert_service_data_retention(sample_service.id, 'email', 3) - letter_data_retention = insert_service_data_retention(sample_service.id, 'letter', 30) sms_data_retention = insert_service_data_retention(sample_service.id, 'sms', 5) list_of_data_retention = fetch_service_data_retention(sample_service.id) - assert len(list_of_data_retention) == 3 + assert len(list_of_data_retention) == 2 assert list_of_data_retention[0] == email_data_retention assert list_of_data_retention[1] == sms_data_retention - assert list_of_data_retention[2] == letter_data_retention def test_fetch_service_data_retention_only_returns_row_for_service(sample_service): another_service = create_service(service_name="Another service") email_data_retention = insert_service_data_retention(sample_service.id, 'email', 3) - letter_data_retention = insert_service_data_retention(sample_service.id, 'letter', 30) insert_service_data_retention(another_service.id, 'sms', 5) list_of_data_retention = fetch_service_data_retention(sample_service.id) - assert len(list_of_data_retention) == 2 + assert len(list_of_data_retention) == 1 assert list_of_data_retention[0] == email_data_retention - assert list_of_data_retention[1] == letter_data_retention def test_fetch_service_data_retention_returns_empty_list_when_no_rows_for_service(sample_service): @@ -136,10 +132,7 @@ def test_update_service_data_retention_does_not_update_row_if_data_retention_is_ @pytest.mark.parametrize('notification_type, alternate', [('sms', 'email'), - ('email', 'sms'), - ('letter', 'email'), - ('letter', 'sms')] - ) + ('email', 'sms')]) def test_fetch_service_data_retention_by_notification_type(sample_service, notification_type, alternate): data_retention = create_service_data_retention(service=sample_service, notification_type=notification_type) create_service_data_retention(service=sample_service, notification_type=alternate) diff --git a/tests/app/dao/test_service_letter_contact_dao.py b/tests/app/dao/test_service_letter_contact_dao.py deleted file mode 100644 index 4a996dcb4..000000000 --- a/tests/app/dao/test_service_letter_contact_dao.py +++ /dev/null @@ -1,248 +0,0 @@ -import uuid - -import pytest -from sqlalchemy.exc import SQLAlchemyError - -from app.dao.service_letter_contact_dao import ( - add_letter_contact_for_service, - archive_letter_contact, - dao_get_letter_contact_by_id, - dao_get_letter_contacts_by_service_id, - update_letter_contact, -) -from app.models import ServiceLetterContact -from tests.app.db import create_letter_contact, create_service, create_template - - -def test_dao_get_letter_contacts_by_service_id(notify_db_session): - service = create_service() - default_letter_contact = create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - second_letter_contact = create_letter_contact(service=service, contact_block='Cardiff, CA1 2DB', is_default=False) - third_letter_contact = create_letter_contact(service=service, contact_block='London, E1 8QS', is_default=False) - - results = dao_get_letter_contacts_by_service_id(service_id=service.id) - - assert len(results) == 3 - assert default_letter_contact == results[0] - assert third_letter_contact == results[1] - assert second_letter_contact == results[2] - - -def test_dao_get_letter_contacts_by_service_id_does_not_return_archived_contacts(notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - create_letter_contact(service=service, contact_block='Cardiff, CA1 2DB', is_default=False) - archived_contact = create_letter_contact( - service=service, - contact_block='London, E1 8QS', - is_default=False, - archived=True - ) - - results = dao_get_letter_contacts_by_service_id(service_id=service.id) - - assert len(results) == 2 - assert archived_contact not in results - - -def test_add_letter_contact_for_service_creates_additional_letter_contact_for_service(notify_db_session): - service = create_service() - - create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - add_letter_contact_for_service(service_id=service.id, contact_block='Swansea, SN1 3CC', is_default=False) - - results = dao_get_letter_contacts_by_service_id(service_id=service.id) - - assert len(results) == 2 - - assert results[0].contact_block == 'Edinburgh, ED1 1AA' - assert results[0].is_default - assert not results[0].archived - - assert results[1].contact_block == 'Swansea, SN1 3CC' - assert not results[1].is_default - assert not results[1].archived - - -def test_add_another_letter_contact_as_default_overrides_existing(notify_db_session): - service = create_service() - - create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - add_letter_contact_for_service(service_id=service.id, contact_block='Swansea, SN1 3CC', is_default=True) - - results = dao_get_letter_contacts_by_service_id(service_id=service.id) - - assert len(results) == 2 - - assert results[0].contact_block == 'Swansea, SN1 3CC' - assert results[0].is_default - - assert results[1].contact_block == 'Edinburgh, ED1 1AA' - assert not results[1].is_default - - -def test_add_letter_contact_does_not_override_default(notify_db_session): - service = create_service() - - add_letter_contact_for_service(service_id=service.id, contact_block='Edinburgh, ED1 1AA', is_default=True) - add_letter_contact_for_service(service_id=service.id, contact_block='Swansea, SN1 3CC', is_default=False) - - results = dao_get_letter_contacts_by_service_id(service_id=service.id) - - assert len(results) == 2 - - assert results[0].contact_block == 'Edinburgh, ED1 1AA' - assert results[0].is_default - - assert results[1].contact_block == 'Swansea, SN1 3CC' - assert not results[1].is_default - - -def test_add_letter_contact_with_no_default_is_fine(notify_db_session): - service = create_service() - letter_contact = add_letter_contact_for_service( - service_id=service.id, - contact_block='Swansea, SN1 3CC', - is_default=False - ) - assert service.letter_contacts == [letter_contact] - - -def test_add_letter_contact_when_multiple_defaults_exist_raises_exception(notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - create_letter_contact(service=service, contact_block='Aberdeen, AB12 23X') - - with pytest.raises(Exception): - add_letter_contact_for_service(service_id=service.id, contact_block='Swansea, SN1 3CC', is_default=False) - - -def test_can_update_letter_contact(notify_db_session): - service = create_service() - letter_contact = create_letter_contact(service=service, contact_block='Aberdeen, AB12 23X') - - update_letter_contact( - service_id=service.id, - letter_contact_id=letter_contact.id, - contact_block='Warwick, W14 TSR', - is_default=True - ) - - updated_letter_contact = ServiceLetterContact.query.get(letter_contact.id) - - assert updated_letter_contact.contact_block == 'Warwick, W14 TSR' - assert updated_letter_contact.updated_at - assert updated_letter_contact.is_default - - -def test_update_letter_contact_as_default_overides_existing_default(notify_db_session): - service = create_service() - - create_letter_contact(service=service, contact_block='Aberdeen, AB12 23X') - second_letter_contact = create_letter_contact(service=service, contact_block='Swansea, SN1 3CC', is_default=False) - - update_letter_contact( - service_id=service.id, - letter_contact_id=second_letter_contact.id, - contact_block='Warwick, W14 TSR', - is_default=True - ) - - results = dao_get_letter_contacts_by_service_id(service_id=service.id) - assert len(results) == 2 - - assert results[0].contact_block == 'Warwick, W14 TSR' - assert results[0].is_default - - assert results[1].contact_block == 'Aberdeen, AB12 23X' - assert not results[1].is_default - - -def test_update_letter_contact_unset_default_for_only_letter_contact_is_fine(notify_db_session): - service = create_service() - only_letter_contact = create_letter_contact(service=service, contact_block='Aberdeen, AB12 23X') - update_letter_contact( - service_id=service.id, - letter_contact_id=only_letter_contact.id, - contact_block='Warwick, W14 TSR', - is_default=False - ) - assert only_letter_contact.is_default is False - - -def test_archive_letter_contact(notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block='Aberdeen, AB12 23X') - letter_contact = create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA', is_default=False) - - archive_letter_contact(service.id, letter_contact.id) - - assert letter_contact.archived - assert letter_contact.updated_at is not None - - -def test_archive_letter_contact_does_not_archive_a_letter_contact_for_a_different_service( - notify_db_session, - sample_service, -): - service = create_service(service_name="First service") - letter_contact = create_letter_contact( - service=sample_service, - contact_block='Edinburgh, ED1 1AA', - is_default=False) - - with pytest.raises(SQLAlchemyError): - archive_letter_contact(service.id, letter_contact.id) - - assert not letter_contact.archived - - -def test_archive_letter_contact_can_archive_a_service_default_letter_contact(notify_db_session): - service = create_service() - letter_contact = create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - archive_letter_contact(service.id, letter_contact.id) - assert letter_contact.archived is True - - -def test_archive_letter_contact_does_dissociates_template_defaults_before_archiving(notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - template_default = create_letter_contact(service=service, contact_block='Aberdeen, AB12 23X', is_default=False) - associated_template_1 = create_template(service=service, template_type='letter', reply_to=template_default.id) - associated_template_2 = create_template(service=service, template_type='letter', reply_to=template_default.id) - - assert associated_template_1.reply_to == template_default.id - assert associated_template_2.reply_to == template_default.id - assert template_default.archived is False - - archive_letter_contact(service.id, template_default.id) - - assert associated_template_1.reply_to is None - assert associated_template_2.reply_to is None - assert template_default.archived is True - - -def test_dao_get_letter_contact_by_id(sample_service): - letter_contact = create_letter_contact(service=sample_service, contact_block='Aberdeen, AB12 23X') - result = dao_get_letter_contact_by_id(service_id=sample_service.id, letter_contact_id=letter_contact.id) - assert result == letter_contact - - -def test_dao_get_letter_contact_by_id_raises_sqlalchemy_error_when_letter_contact_does_not_exist(sample_service): - with pytest.raises(SQLAlchemyError): - dao_get_letter_contact_by_id(service_id=sample_service.id, letter_contact_id=uuid.uuid4()) - - -def test_dao_get_letter_contact_by_id_raises_sqlalchemy_error_when_letter_contact_is_archived(sample_service): - archived_contact = create_letter_contact( - service=sample_service, - contact_block='Aberdeen, AB12 23X', - archived=True) - with pytest.raises(SQLAlchemyError): - dao_get_letter_contact_by_id(service_id=sample_service.id, letter_contact_id=archived_contact.id) - - -def test_dao_get_letter_contact_by_id_raises_sqlalchemy_error_when_service_does_not_exist(sample_service): - letter_contact = create_letter_contact(service=sample_service, contact_block='Some address') - with pytest.raises(SQLAlchemyError): - dao_get_letter_contact_by_id(service_id=uuid.uuid4(), letter_contact_id=letter_contact.id) diff --git a/tests/app/dao/test_service_permissions_dao.py b/tests/app/dao/test_service_permissions_dao.py index f1cc125a0..394658583 100644 --- a/tests/app/dao/test_service_permissions_dao.py +++ b/tests/app/dao/test_service_permissions_dao.py @@ -8,7 +8,6 @@ from app.models import ( EMAIL_TYPE, INBOUND_SMS_TYPE, INTERNATIONAL_SMS_TYPE, - LETTER_TYPE, SMS_TYPE, ) from tests.app.db import create_service, create_service_permission @@ -29,15 +28,14 @@ def test_create_service_permission(service_without_permissions): def test_fetch_service_permissions_gets_service_permissions(service_without_permissions): - create_service_permission(service_id=service_without_permissions.id, permission=LETTER_TYPE) create_service_permission(service_id=service_without_permissions.id, permission=INTERNATIONAL_SMS_TYPE) create_service_permission(service_id=service_without_permissions.id, permission=SMS_TYPE) service_permissions = dao_fetch_service_permissions(service_without_permissions.id) - assert len(service_permissions) == 3 + assert len(service_permissions) == 2 assert all(sp.service_id == service_without_permissions.id for sp in service_permissions) - assert all(sp.permission in [LETTER_TYPE, INTERNATIONAL_SMS_TYPE, SMS_TYPE] for sp in service_permissions) + assert all(sp.permission in [INTERNATIONAL_SMS_TYPE, SMS_TYPE] for sp in service_permissions) def test_remove_service_permission(service_without_permissions): diff --git a/tests/app/dao/test_services_dao.py b/tests/app/dao/test_services_dao.py index afc5dfc38..0be894ef2 100644 --- a/tests/app/dao/test_services_dao.py +++ b/tests/app/dao/test_services_dao.py @@ -14,10 +14,7 @@ from app.dao.inbound_numbers_dao import ( dao_set_inbound_number_to_service, ) from app.dao.organisation_dao import dao_add_service_to_organisation -from app.dao.service_permissions_dao import ( - dao_add_service_permission, - dao_remove_service_permission, -) +from app.dao.service_permissions_dao import dao_remove_service_permission from app.dao.service_user_dao import ( dao_get_service_user, dao_update_service_user, @@ -46,14 +43,11 @@ from app.dao.services_dao import ( from app.dao.users_dao import create_user_code, save_model_user from app.models import ( EMAIL_TYPE, - INTERNATIONAL_LETTERS, INTERNATIONAL_SMS_TYPE, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, - LETTER_TYPE, SMS_TYPE, - UPLOAD_LETTERS, ApiKey, InvitedUser, Job, @@ -73,11 +67,9 @@ from app.models import ( from tests.app.db import ( create_annual_billing, create_api_key, - create_email_branding, create_ft_billing, create_inbound_number, create_invited_user, - create_letter_branding, create_notification, create_notification_history, create_organisation, @@ -92,7 +84,6 @@ from tests.app.db import ( def test_create_service(notify_db_session): user = create_user() - create_letter_branding() assert Service.query.count() == 0 service = Service(name="service_name", email_from="email_from", @@ -112,7 +103,6 @@ def test_create_service(notify_db_session): assert user in service_db.users assert service_db.organisation_type == 'federal' assert service_db.crown is None - assert not service.letter_branding assert not service.organisation_id @@ -140,59 +130,10 @@ def test_create_service_with_organisation(notify_db_session): assert user in service_db.users assert service_db.organisation_type == 'state' assert service_db.crown is None - assert not service.letter_branding assert service.organisation_id == organisation.id assert service.organisation == organisation -@pytest.mark.parametrize('email_address, organisation_type', ( - ("test@example.gov.uk", 'nhs_central'), - ("test@example.gov.uk", 'nhs_local'), - ("test@example.gov.uk", 'nhs_gp'), - ("test@nhs.net", 'nhs_local'), - ("test@nhs.net", 'local'), - ("test@nhs.net", 'central'), - ("test@nhs.uk", 'central'), - ("test@example.nhs.uk", 'central'), - ("TEST@NHS.UK", 'central'), -)) -@pytest.mark.parametrize('branding_name_to_create, expected_branding', ( - ('NHS', True), - # Need to check that nothing breaks in environments that don’t have - # the NHS branding set up - ('SHN', False), -)) -@pytest.mark.skip(reason='Update for TTS') -def test_create_nhs_service_get_default_branding_based_on_email_address( - notify_db_session, - branding_name_to_create, - expected_branding, - email_address, - organisation_type, -): - user = create_user(email=email_address) - letter_branding = create_letter_branding(name=branding_name_to_create) - email_branding = create_email_branding(name=branding_name_to_create) - - service = Service( - name="service_name", - email_from="email_from", - message_limit=1000, - restricted=False, - organisation_type=organisation_type, - created_by=user, - ) - dao_create_service(service, user) - service_db = Service.query.one() - - if expected_branding: - assert service_db.letter_branding == letter_branding - assert service_db.email_branding == email_branding - else: - assert service_db.letter_branding is None - assert service_db.email_branding is None - - def test_cannot_create_two_services_with_same_name(notify_db_session): user = create_user() assert Service.query.count() == 0 @@ -337,7 +278,7 @@ def test_should_remove_user_from_service(notify_db_session): def test_removing_a_user_from_a_service_deletes_their_permissions(sample_user, sample_service): - assert len(Permission.query.all()) == 8 + assert len(Permission.query.all()) == 7 dao_remove_user_from_service(sample_service, sample_user) @@ -457,8 +398,6 @@ def test_dao_fetch_live_services_data(sample_user): create_service(service_name='not_active', active=False) create_service(service_name='not_live', count_as_live=False) email_template = create_template(service=service, template_type='email') - template_letter_1 = create_template(service=service, template_type='letter') - template_letter_2 = create_template(service=service_2, template_type='letter') dao_add_service_to_organisation(service=service, organisation_id=org.id) # two sms billing records for 1st service within current financial year: create_ft_billing(local_date='2019-04-20', template=sms_template) @@ -467,10 +406,6 @@ def test_dao_fetch_live_services_data(sample_user): create_ft_billing(local_date='2018-04-20', template=sms_template) # one email billing record for 1st service within current financial year: create_ft_billing(local_date='2019-04-20', template=email_template) - # one letter billing record for 1st service within current financial year: - create_ft_billing(local_date='2019-04-15', template=template_letter_1) - # one letter billing record for 2nd service within current financial year: - create_ft_billing(local_date='2019-04-16', template=template_letter_2) # 1st service: billing from 2018 and 2019 create_annual_billing(service.id, 500, 2018) @@ -488,19 +423,15 @@ def test_dao_fetch_live_services_data(sample_user): 'organisation_type': 'federal', 'consent_to_research': None, 'contact_name': 'Test User', 'contact_email': 'notify@digital.cabinet-office.gov.uk', 'contact_mobile': '+12028675309', 'live_date': datetime(2014, 4, 20, 10, 0), 'sms_volume_intent': None, 'email_volume_intent': None, - 'letter_volume_intent': None, 'sms_totals': 2, 'email_totals': 1, 'letter_totals': 1, - 'free_sms_fragment_limit': 100}, + 'sms_totals': 2, 'email_totals': 1, 'free_sms_fragment_limit': 100}, {'service_id': mock.ANY, 'service_name': 'third', 'organisation_name': None, 'consent_to_research': None, 'organisation_type': None, 'contact_name': None, 'contact_email': None, 'contact_mobile': None, 'live_date': datetime(2016, 4, 20, 10, 0), 'sms_volume_intent': None, - 'email_volume_intent': None, 'letter_volume_intent': None, - 'sms_totals': 0, 'email_totals': 0, 'letter_totals': 0, - 'free_sms_fragment_limit': 200}, + 'email_volume_intent': None, 'sms_totals': 0, 'email_totals': 0, 'free_sms_fragment_limit': 200}, {'service_id': mock.ANY, 'service_name': 'second', 'organisation_name': None, 'consent_to_research': None, 'contact_name': 'Test User', 'contact_email': 'notify@digital.cabinet-office.gov.uk', 'contact_mobile': '+12028675309', 'live_date': datetime(2017, 4, 20, 10, 0), 'sms_volume_intent': None, - 'organisation_type': None, 'email_volume_intent': None, 'letter_volume_intent': None, - 'sms_totals': 0, 'email_totals': 0, 'letter_totals': 1, + 'organisation_type': None, 'email_volume_intent': None, 'sms_totals': 0, 'email_totals': 0, 'free_sms_fragment_limit': 300} ] @@ -521,16 +452,16 @@ def test_create_service_returns_service_with_default_permissions(notify_db_sessi service = dao_fetch_service_by_id(service.id) _assert_service_permissions(service.permissions, ( - SMS_TYPE, EMAIL_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS + SMS_TYPE, EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, )) @pytest.mark.parametrize("permission_to_remove, permissions_remaining", [ (SMS_TYPE, ( - EMAIL_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS + EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, )), (EMAIL_TYPE, ( - SMS_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS + SMS_TYPE, INTERNATIONAL_SMS_TYPE, )), ]) def test_remove_permission_from_service_by_id_returns_service_with_correct_permissions( @@ -547,37 +478,14 @@ def test_removing_all_permission_returns_service_with_no_permissions(notify_db_s service = create_service() dao_remove_service_permission(service_id=service.id, permission=SMS_TYPE) dao_remove_service_permission(service_id=service.id, permission=EMAIL_TYPE) - dao_remove_service_permission(service_id=service.id, permission=LETTER_TYPE) dao_remove_service_permission(service_id=service.id, permission=INTERNATIONAL_SMS_TYPE) - dao_remove_service_permission(service_id=service.id, permission=UPLOAD_LETTERS) - dao_remove_service_permission(service_id=service.id, permission=INTERNATIONAL_LETTERS) service = dao_fetch_service_by_id(service.id) assert len(service.permissions) == 0 -def test_create_service_by_id_adding_and_removing_letter_returns_service_without_letter(service_factory): - service = service_factory.get('testing', email_from='testing') - - dao_remove_service_permission(service_id=service.id, permission=LETTER_TYPE) - dao_add_service_permission(service_id=service.id, permission=LETTER_TYPE) - - service = dao_fetch_service_by_id(service.id) - _assert_service_permissions(service.permissions, ( - SMS_TYPE, EMAIL_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS - )) - - dao_remove_service_permission(service_id=service.id, permission=LETTER_TYPE) - service = dao_fetch_service_by_id(service.id) - - _assert_service_permissions(service.permissions, ( - SMS_TYPE, EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS - )) - - def test_create_service_creates_a_history_record_with_current_data(notify_db_session): user = create_user() - create_letter_branding() assert Service.query.count() == 0 assert Service.get_history_model().query.count() == 0 service = Service(name="service_name", @@ -640,11 +548,13 @@ def test_update_service_permission_creates_a_history_record_with_current_data(no created_by=user) dao_create_service(service, user, service_permissions=[ SMS_TYPE, - EMAIL_TYPE, + # EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, ]) - service.permissions.append(ServicePermission(service_id=service.id, permission='letter')) + assert Service.query.count() == 1 + + service.permissions.append(ServicePermission(service_id=service.id, permission=EMAIL_TYPE)) dao_update_service(service) assert Service.query.count() == 1 @@ -655,7 +565,7 @@ def test_update_service_permission_creates_a_history_record_with_current_data(no assert service_from_db.version == 2 _assert_service_permissions(service.permissions, ( - SMS_TYPE, EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, LETTER_TYPE, + SMS_TYPE, EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, )) permission = [p for p in service.permissions if p.permission == 'sms'][0] @@ -668,7 +578,7 @@ def test_update_service_permission_creates_a_history_record_with_current_data(no service_from_db = Service.query.first() assert service_from_db.version == 3 _assert_service_permissions(service.permissions, ( - EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, LETTER_TYPE, + EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, )) history = Service.get_history_model().query.filter_by(name='service_name').order_by('version').all() @@ -709,7 +619,7 @@ def test_delete_service_and_associated_objects(notify_db_session): user.organisations = [organisation] assert ServicePermission.query.count() == len(( - SMS_TYPE, EMAIL_TYPE, LETTER_TYPE, INTERNATIONAL_SMS_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS + SMS_TYPE, EMAIL_TYPE, INTERNATIONAL_SMS_TYPE, )) delete_service_and_all_associated_db_objects(service) @@ -742,7 +652,7 @@ def test_add_existing_user_to_another_service_doesnot_change_old_permissions(not dao_create_service(service_one, user) assert user.id == service_one.users[0].id test_user_permissions = Permission.query.filter_by(service=service_one, user=user).all() - assert len(test_user_permissions) == 8 + assert len(test_user_permissions) == 7 other_user = User( name='Other Test User', @@ -760,23 +670,23 @@ def test_add_existing_user_to_another_service_doesnot_change_old_permissions(not assert other_user.id == service_two.users[0].id other_user_permissions = Permission.query.filter_by(service=service_two, user=other_user).all() - assert len(other_user_permissions) == 8 + assert len(other_user_permissions) == 7 other_user_service_one_permissions = Permission.query.filter_by(service=service_one, user=other_user).all() assert len(other_user_service_one_permissions) == 0 # adding the other_user to service_one should leave all other_user permissions on service_two intact permissions = [] - for p in ['send_emails', 'send_texts', 'send_letters']: + for p in ['send_emails', 'send_texts']: permissions.append(Permission(permission=p)) dao_add_user_to_service(service_one, other_user, permissions=permissions) other_user_service_one_permissions = Permission.query.filter_by(service=service_one, user=other_user).all() - assert len(other_user_service_one_permissions) == 3 + assert len(other_user_service_one_permissions) == 2 other_user_service_two_permissions = Permission.query.filter_by(service=service_two, user=other_user).all() - assert len(other_user_service_two_permissions) == 8 + assert len(other_user_service_two_permissions) == 7 def test_fetch_stats_filters_on_service(notify_db_session): @@ -1167,14 +1077,6 @@ def _assert_service_permissions(service_permissions, expected): assert set(expected) == set(p.permission for p in service_permissions) -def create_email_sms_letter_template(): - service = create_service() - template_one = create_template(service=service, template_name='1', template_type='email') - template_two = create_template(service=service, template_name='2', template_type='sms') - template_three = create_template(service=service, template_name='3', template_type='letter') - return template_one, template_three, template_two - - @freeze_time("2019-12-02 12:00:00.000000") def test_dao_find_services_sending_to_tv_numbers(notify_db_session, fake_uuid): service_1 = create_service(service_name="Service 1", service_id=fake_uuid) diff --git a/tests/app/dao/test_templates_dao.py b/tests/app/dao/test_templates_dao.py index a02fee373..17ac05b37 100644 --- a/tests/app/dao/test_templates_dao.py +++ b/tests/app/dao/test_templates_dao.py @@ -11,16 +11,14 @@ from app.dao.templates_dao import ( dao_get_template_versions, dao_redact_template, dao_update_template, - dao_update_template_reply_to, ) from app.models import Template, TemplateHistory, TemplateRedacted -from tests.app.db import create_letter_contact, create_template +from tests.app.db import create_template @pytest.mark.parametrize('template_type, subject', [ ('sms', None), ('email', 'subject'), - ('letter', 'subject'), ]) def test_create_template(sample_service, sample_user, template_type, subject): data = { @@ -30,8 +28,6 @@ def test_create_template(sample_service, sample_user, template_type, subject): 'service': sample_service, 'created_by': sample_user } - if template_type == 'letter': - data['postage'] = 'second' if subject: data.update({'subject': subject}) template = Template(**data) @@ -54,24 +50,6 @@ def test_create_template_creates_redact_entry(sample_service): assert redacted.updated_by_id == sample_service.created_by_id -def test_create_template_with_reply_to(sample_service, sample_user): - letter_contact = create_letter_contact(sample_service, 'Edinburgh, ED1 1AA') - - data = { - 'name': 'Sample Template', - 'template_type': "letter", - 'content': "Template content", - 'service': sample_service, - 'created_by': sample_user, - 'reply_to': letter_contact.id, - 'postage': 'second' - } - template = Template(**data) - dao_create_template(template) - - assert dao_get_all_templates_for_service(sample_service.id)[0].reply_to == letter_contact.id - - def test_update_template(sample_service, sample_user): data = { 'name': 'Sample Template', @@ -90,88 +68,6 @@ def test_update_template(sample_service, sample_user): assert dao_get_all_templates_for_service(sample_service.id)[0].name == 'new name' -def test_dao_update_template_reply_to_none_to_some(sample_service, sample_user): - letter_contact = create_letter_contact(sample_service, 'Edinburgh, ED1 1AA') - - data = { - 'name': 'Sample Template', - 'template_type': "letter", - 'content': "Template content", - 'service': sample_service, - 'created_by': sample_user, - 'postage': 'second' - } - template = Template(**data) - dao_create_template(template) - created = Template.query.get(template.id) - assert created.reply_to is None - assert created.service_letter_contact_id is None - - dao_update_template_reply_to(template_id=template.id, - reply_to=letter_contact.id) - - updated = Template.query.get(template.id) - assert updated.reply_to == letter_contact.id - assert updated.version == 2 - assert updated.updated_at - - template_history = TemplateHistory.query.filter_by(id=created.id, version=2).one() - assert template_history.service_letter_contact_id == letter_contact.id - assert template_history.updated_at == updated.updated_at - - -def test_dao_update_template_reply_to_some_to_some(sample_service, sample_user): - letter_contact = create_letter_contact(sample_service, 'Edinburgh, ED1 1AA') - letter_contact_2 = create_letter_contact(sample_service, 'London, N1 1DE') - - data = { - 'name': 'Sample Template', - 'template_type': "letter", - 'content': "Template content", - 'service': sample_service, - 'created_by': sample_user, - 'service_letter_contact_id': letter_contact.id, - 'postage': 'second', - } - template = Template(**data) - dao_create_template(template) - created = Template.query.get(template.id) - dao_update_template_reply_to(template_id=created.id, reply_to=letter_contact_2.id) - updated = Template.query.get(template.id) - assert updated.reply_to == letter_contact_2.id - assert updated.version == 2 - assert updated.updated_at - - updated_history = TemplateHistory.query.filter_by(id=created.id, version=2).one() - assert updated_history.service_letter_contact_id == letter_contact_2.id - assert updated_history.updated_at == updated_history.updated_at - - -def test_dao_update_template_reply_to_some_to_none(sample_service, sample_user): - letter_contact = create_letter_contact(sample_service, 'Edinburgh, ED1 1AA') - data = { - 'name': 'Sample Template', - 'template_type': "letter", - 'content': "Template content", - 'service': sample_service, - 'created_by': sample_user, - 'service_letter_contact_id': letter_contact.id, - 'postage': 'second' - } - template = Template(**data) - dao_create_template(template) - created = Template.query.get(template.id) - dao_update_template_reply_to(template_id=created.id, reply_to=None) - updated = Template.query.get(template.id) - assert updated.reply_to is None - assert updated.version == 2 - assert updated.updated_at - - history = TemplateHistory.query.filter_by(id=created.id, version=2).one() - assert history.service_letter_contact_id is None - assert history.updated_at == updated.updated_at - - def test_redact_template(sample_template): redacted = TemplateRedacted.query.one() assert redacted.template_id == sample_template.id diff --git a/tests/app/dao/test_uploads_dao.py b/tests/app/dao/test_uploads_dao.py index 97823dd7c..ccb40d310 100644 --- a/tests/app/dao/test_uploads_dao.py +++ b/tests/app/dao/test_uploads_dao.py @@ -1,11 +1,9 @@ from datetime import datetime, timedelta +import pytest from freezegun import freeze_time -from app.dao.uploads_dao import ( - dao_get_uploaded_letters_by_print_date, - dao_get_uploads_by_service_id, -) +from app.dao.uploads_dao import dao_get_uploads_by_service_id from app.models import JOB_STATUS_IN_PROGRESS, LETTER_TYPE from tests.app.db import ( create_job, @@ -38,10 +36,10 @@ def create_uploaded_template(service): subject='Pre-compiled PDF', content="", hidden=True, - postage="second", ) +@pytest.mark.skip(reason="Investigate what remains after removing letters") @freeze_time("2020-02-02 09:00") # GMT time def test_get_uploads_for_service(sample_template): create_service_data_retention(sample_template.service, 'sms', days_of_retention=9) @@ -119,6 +117,7 @@ def test_get_uploads_for_service(sample_template): assert uploads_from_db[1] != other_uploads_from_db[1] +@pytest.mark.skip(reason="Investigate what remains after removing letters") @freeze_time("2020-02-02 18:00") def test_get_uploads_for_service_groups_letters(sample_template): letter_template = create_uploaded_template(sample_template.service) @@ -158,6 +157,7 @@ def test_get_uploads_for_service_groups_letters(sample_template): ] +@pytest.mark.skip(reason="Investigate what remains after removing letters") def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template): create_job(sample_template, job_status='scheduled') create_job(sample_template, job_status='cancelled') @@ -167,6 +167,7 @@ def test_get_uploads_does_not_return_cancelled_jobs_or_letters(sample_template): assert len(dao_get_uploads_by_service_id(sample_template.service_id).items) == 0 +@pytest.mark.skip(reason="Investigate what remains after removing letters") def test_get_uploads_orders_by_created_at_desc(sample_template): letter_template = create_uploaded_template(sample_template.service) @@ -187,6 +188,7 @@ def test_get_uploads_orders_by_created_at_desc(sample_template): ] +@pytest.mark.skip(reason="Investigate what remains after removing letters") def test_get_uploads_orders_by_processing_started_desc(sample_template): days_ago = datetime.utcnow() - timedelta(days=3) upload_1 = create_job(sample_template, processing_started=datetime.utcnow() - timedelta(days=1), @@ -203,6 +205,7 @@ def test_get_uploads_orders_by_processing_started_desc(sample_template): assert results[1].id == upload_2.id +@pytest.mark.skip(reason="Investigate what remains after removing letters") @freeze_time("2020-10-27 16:15") # GMT time def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_template): letter_template = create_uploaded_template(sample_template.service) @@ -227,6 +230,7 @@ def test_get_uploads_orders_by_processing_started_and_created_at_desc(sample_tem assert results[3].id is None +@pytest.mark.skip(reason="Investigate what remains after removing letters") @freeze_time('2020-04-02 14:00') # Few days after the clocks go forward def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_template): letter_template = create_uploaded_template(sample_template.service) @@ -276,6 +280,7 @@ def test_get_uploads_only_gets_uploads_within_service_retention_period(sample_te assert results[3].created_at == upload_4.created_at.replace(hour=21, minute=30, second=0, microsecond=0) +@pytest.mark.skip(reason="Investigate what remains after removing letters") @freeze_time('2020-02-02 14:00') def test_get_uploads_is_paginated(sample_template): letter_template = create_uploaded_template(sample_template.service) @@ -314,59 +319,7 @@ def test_get_uploads_is_paginated(sample_template): assert results.items[0].upload_type == 'job' +@pytest.mark.skip(reason="Investigate what remains after removing letters") def test_get_uploads_returns_empty_list(sample_service): items = dao_get_uploads_by_service_id(sample_service.id).items assert items == [] - - -@freeze_time('2020-02-02 14:00') -def test_get_uploaded_letters_by_print_date(sample_template): - letter_template = create_uploaded_template(sample_template.service) - - # Letters for the previous day’s run - for _ in range(3): - create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow().replace(day=1, hour=22, minute=29, second=59) - ) - - # Letters from yesterday that rolled into today’s run - for _ in range(30): - create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow().replace(day=1, hour=22, minute=30, second=0) - ) - - # Letters that just made today’s run - for _ in range(30): - create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow().replace(hour=22, minute=29, second=59) - ) - - # Letters that just missed today’s run - for _ in range(3): - create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow().replace(hour=22, minute=30, second=0) - ) - - result = dao_get_uploaded_letters_by_print_date( - sample_template.service_id, - datetime.utcnow(), - ) - assert result.total == 60 - assert len(result.items) == 50 - assert result.has_next is True - assert result.has_prev is False - - result = dao_get_uploaded_letters_by_print_date( - sample_template.service_id, - datetime.utcnow(), - page=10, - page_size=2, - ) - assert result.total == 60 - assert len(result.items) == 2 - assert result.has_next is True - assert result.has_prev is True diff --git a/tests/app/dao/test_users_dao.py b/tests/app/dao/test_users_dao.py index 21d6d6558..823795ded 100644 --- a/tests/app/dao/test_users_dao.py +++ b/tests/app/dao/test_users_dao.py @@ -255,7 +255,7 @@ def test_user_can_be_archived_if_the_other_service_members_have_the_manage_setti create_permissions(user_1, sample_service, 'manage_settings') create_permissions(user_2, sample_service, 'manage_settings', 'view_activity') - create_permissions(user_3, sample_service, 'manage_settings', 'send_emails', 'send_letters', 'send_texts') + create_permissions(user_3, sample_service, 'manage_settings', 'send_emails', 'send_texts') assert len(sample_service.users) == 3 assert user_can_be_archived(user_1) @@ -290,7 +290,7 @@ def test_user_cannot_be_archived_if_the_other_service_members_do_not_have_the_ma create_permissions(active_user, sample_service, 'manage_settings') create_permissions(pending_user, sample_service, 'view_activity') - create_permissions(inactive_user, sample_service, 'send_emails', 'send_letters', 'send_texts') + create_permissions(inactive_user, sample_service, 'send_emails', 'send_texts') assert len(sample_service.users) == 3 assert not user_can_be_archived(active_user) diff --git a/tests/app/db.py b/tests/app/db.py index edcfd6cd7..9ae0bae3a 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -1,6 +1,6 @@ import random import uuid -from datetime import date, datetime, timedelta +from datetime import datetime, timedelta from app import db from app.dao import fact_processing_time_dao @@ -29,13 +29,11 @@ from app.dao.users_dao import save_model_user from app.models import ( EMAIL_TYPE, KEY_TYPE_NORMAL, - LETTER_TYPE, MOBILE_TYPE, SMS_TYPE, AnnualBilling, ApiKey, Complaint, - DailySortedLetter, Domain, EmailBranding, FactBilling, @@ -46,21 +44,17 @@ from app.models import ( InvitedOrganisationUser, InvitedUser, Job, - LetterBranding, - LetterRate, Notification, NotificationHistory, Organisation, Permission, Rate, - ReturnedLetter, Service, ServiceCallbackApi, ServiceContactList, ServiceEmailReplyTo, ServiceGuestList, ServiceInboundApi, - ServiceLetterContact, ServicePermission, ServiceSmsSender, Template, @@ -202,7 +196,6 @@ def create_template( hidden=False, archived=False, folder=None, - postage=None, process_type='normal', contact_block_id=None ): @@ -217,10 +210,6 @@ def create_template( 'folder': folder, 'process_type': process_type, } - if template_type == LETTER_TYPE: - data["postage"] = postage or "second" - if contact_block_id: - data['service_letter_contact_id'] = contact_block_id if template_type != SMS_TYPE: data['subject'] = subject template = Template(**data) @@ -256,7 +245,6 @@ def create_notification( one_off=False, reply_to_text=None, created_by_id=None, - postage=None, document_download_count=None, ): assert job or template @@ -279,9 +267,6 @@ def create_notification( if not api_key: api_key = create_api_key(template.service, key_type=key_type) - if template.template_type == 'letter' and postage is None: - postage = 'second' - data = { 'id': uuid.uuid4(), 'to': to_field, @@ -311,7 +296,6 @@ def create_notification( 'normalised_to': normalised_to, 'reply_to_text': reply_to_text, 'created_by_id': created_by_id, - 'postage': postage, 'document_download_count': document_download_count, } notification = Notification(**data) @@ -338,7 +322,6 @@ def create_notification_history( international=False, phone_prefix=None, created_by_id=None, - postage=None, id=None ): assert job or template @@ -352,9 +335,6 @@ def create_notification_history( sent_at = sent_at or datetime.utcnow() updated_at = updated_at or datetime.utcnow() - if template.template_type == 'letter' and postage is None: - postage = 'second' - data = { 'id': id or uuid.uuid4(), 'job_id': job and job.id, @@ -380,7 +360,6 @@ def create_notification_history( 'international': international, 'phone_prefix': phone_prefix, 'created_by_id': created_by_id, - 'postage': postage } notification_history = NotificationHistory(**data) db.session.add(notification_history) @@ -523,23 +502,6 @@ def create_rate(start_date, value, notification_type): return rate -def create_letter_rate(start_date=None, end_date=None, crown=True, sheet_count=1, rate=0.33, post_class='second'): - if start_date is None: - start_date = datetime(2016, 1, 1) - rate = LetterRate( - id=uuid.uuid4(), - start_date=start_date, - end_date=end_date, - crown=crown, - sheet_count=sheet_count, - rate=rate, - post_class=post_class - ) - db.session.add(rate) - db.session.commit() - return rate - - def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None): id_ = uuid.uuid4() @@ -613,26 +575,6 @@ def create_service_sms_sender( return service_sms_sender -def create_letter_contact( - service, - contact_block, - is_default=True, - archived=False -): - data = { - 'service': service, - 'contact_block': contact_block, - 'is_default': is_default, - 'archived': archived, - } - letter_content = ServiceLetterContact(**data) - - db.session.add(letter_content) - db.session.commit() - - return letter_content - - def create_annual_billing( service_id, free_sms_fragment_limit, financial_year_start ): @@ -699,23 +641,6 @@ def create_invited_org_user(organisation, invited_by, email_address='invite@exam return invited_org_user -def create_daily_sorted_letter(billing_day=None, - file_name="Notify-20180118123.rs.txt", - unsorted_count=0, - sorted_count=0): - daily_sorted_letter = DailySortedLetter( - billing_day=billing_day or date(2018, 1, 18), - file_name=file_name, - unsorted_count=unsorted_count, - sorted_count=sorted_count - ) - - db.session.add(daily_sorted_letter) - db.session.commit() - - return daily_sorted_letter - - def create_ft_billing(local_date, template, *, @@ -725,7 +650,6 @@ def create_ft_billing(local_date, rate=0, billable_unit=1, notifications_sent=1, - postage='none' ): data = FactBilling(local_date=local_date, service_id=template.service_id, @@ -736,8 +660,7 @@ def create_ft_billing(local_date, international=international, rate=rate, billable_units=billable_unit, - notifications_sent=notifications_sent, - postage=postage) + notifications_sent=notifications_sent,) db.session.add(data) db.session.commit() return data @@ -931,21 +854,12 @@ def create_template_folder(service, name='foo', parent=None): return tf -def create_letter_branding(name='HM Government', filename='hm-government'): - test_domain_branding = LetterBranding(name=name, - filename=filename, - ) - db.session.add(test_domain_branding) - db.session.commit() - return test_domain_branding - - def set_up_usage_data(start_date): year = int(start_date.strftime('%Y')) one_week_earlier = start_date - timedelta(days=7) two_days_later = start_date + timedelta(days=2) one_week_later = start_date + timedelta(days=7) - one_month_later = start_date + timedelta(days=31) + # one_month_later = start_date + timedelta(days=31) # service with sms and letters: service_1_sms_and_letter = create_service( @@ -955,7 +869,6 @@ def set_up_usage_data(start_date): billing_contact_email_addresses="service@billing.contact email@addresses.gov.uk", billing_reference="service billing reference" ) - letter_template_1 = create_template(service=service_1_sms_and_letter, template_type='letter') sms_template_1 = create_template(service=service_1_sms_and_letter, template_type='sms') create_annual_billing( service_id=service_1_sms_and_letter.id, free_sms_fragment_limit=10, financial_year_start=year @@ -976,13 +889,6 @@ def set_up_usage_data(start_date): create_ft_billing(local_date=start_date, template=sms_template_1, billable_unit=2, rate=0.11) create_ft_billing(local_date=two_days_later, template=sms_template_1, billable_unit=1, rate=0.11) - create_ft_billing(local_date=one_week_later, template=letter_template_1, - notifications_sent=2, billable_unit=2, rate=.35, postage='first') - create_ft_billing(local_date=one_month_later, template=letter_template_1, - notifications_sent=4, billable_unit=8, rate=.45, postage='second') - create_ft_billing(local_date=one_week_later, template=letter_template_1, - notifications_sent=2, billable_unit=4, rate=.45, postage='second') - # service with emails only: service_with_emails = create_service(service_name='b - emails') email_template = create_template(service=service_with_emails, template_type='email') @@ -994,44 +900,6 @@ def set_up_usage_data(start_date): create_ft_billing(local_date=start_date, template=email_template, notifications_sent=10) - # service with letters: - service_with_letters = create_service(service_name='c - letters only') - letter_template_3 = create_template(service=service_with_letters, template_type='letter') - org_for_service_with_letters = create_organisation( - name="Org for {}".format(service_with_letters.name), - purchase_order_number="org3 purchase order number", - billing_contact_names="org3 billing contact names", - billing_contact_email_addresses="org3@billing.contact email@addresses.gov.uk", - billing_reference="org3 billing reference" - ) - dao_add_service_to_organisation(service=service_with_letters, organisation_id=org_for_service_with_letters.id) - create_annual_billing(service_id=service_with_letters.id, free_sms_fragment_limit=0, financial_year_start=year) - - create_ft_billing(local_date=start_date, template=letter_template_3, - notifications_sent=2, billable_unit=3, rate=.50, postage='first') - create_ft_billing(local_date=one_week_later, template=letter_template_3, - notifications_sent=8, billable_unit=5, rate=.65, postage='second') - create_ft_billing(local_date=one_month_later, template=letter_template_3, - notifications_sent=12, billable_unit=5, rate=.65, postage='second') - - # service with letters, without an organisation: - service_with_letters_without_org = create_service(service_name='d - service without org') - letter_template_4 = create_template(service=service_with_letters_without_org, template_type='letter') - create_annual_billing( - service_id=service_with_letters_without_org.id, - free_sms_fragment_limit=0, - financial_year_start=year - ) - - create_ft_billing(local_date=two_days_later, template=letter_template_4, - notifications_sent=7, billable_unit=4, rate=1.55, postage='rest-of-world') - create_ft_billing(local_date=two_days_later, template=letter_template_4, - notifications_sent=8, billable_unit=4, rate=1.55, postage='europe') - create_ft_billing(local_date=two_days_later, template=letter_template_4, - notifications_sent=2, billable_unit=1, rate=.35, postage='second') - create_ft_billing(local_date=two_days_later, template=letter_template_4, - notifications_sent=1, billable_unit=1, rate=.50, postage='first') - # service with chargeable SMS, without an organisation service_with_sms_without_org = create_service( service_name='b - chargeable sms', @@ -1077,30 +945,12 @@ def set_up_usage_data(start_date): "service_1_sms_and_letter": service_1_sms_and_letter, "org_2": org_2, "service_with_emails": service_with_emails, - "org_for_service_with_letters": org_for_service_with_letters, - "service_with_letters": service_with_letters, - "service_with_letters_without_org": service_with_letters_without_org, "service_with_sms_without_org": service_with_sms_without_org, "service_with_sms_within_allowance": service_with_sms_within_allowance, "service_with_out_ft_billing_this_year": service_with_out_ft_billing_this_year, } -def create_returned_letter(service=None, reported_at=None, notification_id=None): - if not service: - service = create_service(service_name='a - with sms and letter') - returned_letter = ReturnedLetter( - service_id=service.id, - reported_at=reported_at or datetime.utcnow(), - notification_id=notification_id or uuid.uuid4(), - created_at=datetime.utcnow(), - ) - - db.session.add(returned_letter) - db.session.commit() - return returned_letter - - def create_service_contact_list( service=None, original_file_name='EmergencyContactList.xls', diff --git a/tests/app/job/test_rest.py b/tests/app/job/test_rest.py index 5d4e6e075..0b7041ef0 100644 --- a/tests/app/job/test_rest.py +++ b/tests/app/job/test_rest.py @@ -78,59 +78,6 @@ def test_cant_cancel_normal_job(client, sample_job, mocker): assert mock_update.call_count == 0 -@freeze_time('2019-06-13 13:00') -def test_cancel_letter_job_updates_notifications_and_job_to_cancelled(sample_letter_template, admin_request, mocker): - job = create_job(template=sample_letter_template, notification_count=1, job_status='finished') - create_notification(template=job.template, job=job, status='created') - - mock_get_job = mocker.patch('app.job.rest.dao_get_job_by_service_id_and_job_id', return_value=job) - mock_can_letter_job_be_cancelled = mocker.patch( - 'app.job.rest.can_letter_job_be_cancelled', return_value=(True, None) - ) - mock_dao_cancel_letter_job = mocker.patch('app.job.rest.dao_cancel_letter_job', return_value=1) - - response = admin_request.post( - 'job.cancel_letter_job', - service_id=job.service_id, - job_id=job.id, - ) - - mock_get_job.assert_called_once_with(job.service_id, str(job.id)) - mock_can_letter_job_be_cancelled.assert_called_once_with(job) - mock_dao_cancel_letter_job.assert_called_once_with(job) - - assert response == 1 - - -@freeze_time('2019-06-13 13:00') -def test_cancel_letter_job_does_not_call_cancel_if_can_letter_job_be_cancelled_returns_False( - sample_letter_template, admin_request, mocker -): - job = create_job(template=sample_letter_template, notification_count=2, job_status='finished') - create_notification(template=job.template, job=job, status='sending') - create_notification(template=job.template, job=job, status='created') - - mock_get_job = mocker.patch('app.job.rest.dao_get_job_by_service_id_and_job_id', return_value=job) - error_message = "Sorry, it's too late, letters have already been sent." - mock_can_letter_job_be_cancelled = mocker.patch( - 'app.job.rest.can_letter_job_be_cancelled', return_value=(False, error_message) - ) - mock_dao_cancel_letter_job = mocker.patch('app.job.rest.dao_cancel_letter_job') - - response = admin_request.post( - 'job.cancel_letter_job', - service_id=job.service_id, - job_id=job.id, - _expected_status=400 - ) - - mock_get_job.assert_called_once_with(job.service_id, str(job.id)) - mock_can_letter_job_be_cancelled.assert_called_once_with(job) - mock_dao_cancel_letter_job.assert_not_called - - assert response["message"] == "Sorry, it's too late, letters have already been sent." - - def test_create_unscheduled_job(client, sample_template, mocker, fake_uuid): mocker.patch('app.celery.tasks.process_job.apply_async') mocker.patch('app.job.rest.get_job_metadata_from_s3', return_value={ @@ -321,31 +268,6 @@ def test_create_job_returns_400_if_file_is_invalid( mock_job_dao.assert_not_called() -def test_create_job_returns_403_if_letter_template_type_and_service_in_trial( - client, fake_uuid, sample_trial_letter_template, mocker -): - mocker.patch('app.job.rest.get_job_metadata_from_s3', return_value={ - 'template_id': str(sample_trial_letter_template.id), - 'original_file_name': 'thisisatest.csv', - 'notification_count': '1', - }) - data = { - 'id': fake_uuid, - 'created_by': str(sample_trial_letter_template.created_by.id), - } - mock_job_dao = mocker.patch("app.dao.jobs_dao.dao_create_job") - auth_header = create_admin_authorization_header() - response = client.post('/service/{}/job'.format(sample_trial_letter_template.service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert response.status_code == 403 - resp_json = json.loads(response.get_data(as_text=True)) - assert resp_json['result'] == 'error' - assert resp_json['message'] == "Create letter job is not allowed for service in trial mode " - mock_job_dao.assert_not_called() - - @freeze_time("2016-01-01 11:09:00.061258") def test_should_not_create_scheduled_job_more_then_96_hours_in_the_future(client, sample_template, mocker, fake_uuid): scheduled_date = (datetime.utcnow() + timedelta(hours=96, minutes=1)).isoformat() diff --git a/tests/app/letter_branding/__init__.py b/tests/app/letter_branding/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/app/letter_branding/test_letter_branding_rest.py b/tests/app/letter_branding/test_letter_branding_rest.py deleted file mode 100644 index 2b957aa85..000000000 --- a/tests/app/letter_branding/test_letter_branding_rest.py +++ /dev/null @@ -1,80 +0,0 @@ -import json -import uuid - -from app.models import LetterBranding -from tests import create_admin_authorization_header -from tests.app.db import create_letter_branding - - -def test_get_all_letter_brands(client, notify_db_session): - hm_gov = create_letter_branding() - test_branding = create_letter_branding( - name='test branding', filename='test-branding', - ) - response = client.get('/letter-branding', headers=[create_admin_authorization_header()]) - assert response.status_code == 200 - json_response = json.loads(response.get_data(as_text=True)) - assert len(json_response) == 2 - for brand in json_response: - if brand['id'] == str(hm_gov.id): - assert hm_gov.serialize() == brand - elif brand['id'] == str(test_branding.id): - assert test_branding.serialize() == brand - else: - raise AssertionError() - - -def test_get_letter_branding_by_id(client, notify_db_session): - hm_gov = create_letter_branding() - create_letter_branding( - name='test domain', filename='test-domain' - ) - response = client.get('/letter-branding/{}'.format(hm_gov.id), headers=[create_admin_authorization_header()]) - - assert response.status_code == 200 - assert json.loads(response.get_data(as_text=True)) == hm_gov.serialize() - - -def test_get_letter_branding_by_id_returns_404_if_does_not_exist(client, notify_db_session): - response = client.get('/letter-branding/{}'.format(uuid.uuid4()), headers=[create_admin_authorization_header()]) - assert response.status_code == 404 - - -def test_create_letter_branding(client, notify_db_session): - form = { - 'name': 'super brand', - 'filename': 'super-brand' - } - - response = client.post( - '/letter-branding', - data=json.dumps(form), - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()], - ) - - assert response.status_code == 201 - json_response = json.loads(response.get_data(as_text=True)) - letter_brand = LetterBranding.query.get(json_response['id']) - assert letter_brand.name == form['name'] - assert letter_brand.filename == form['filename'] - - -def test_update_letter_branding_returns_400_when_integrity_error_is_thrown( - client, notify_db_session -): - create_letter_branding(name='duplicate', filename='duplicate') - brand_to_update = create_letter_branding(name='super brand', filename='super brand') - form = { - 'name': 'duplicate', - 'filename': 'super-brand', - } - - response = client.post( - '/letter-branding/{}'.format(brand_to_update.id), - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()], - data=json.dumps(form) - ) - - assert response.status_code == 400 - json_resp = json.loads(response.get_data(as_text=True)) - assert json_resp['message'] == {"name": ["Name already in use"]} diff --git a/tests/app/letters/__init__.py b/tests/app/letters/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/app/letters/test_letter_utils.py b/tests/app/letters/test_letter_utils.py deleted file mode 100644 index deb3639f3..000000000 --- a/tests/app/letters/test_letter_utils.py +++ /dev/null @@ -1,443 +0,0 @@ -from datetime import datetime - -import boto3 -import dateutil -import pytest -from flask import current_app -from freezegun import freeze_time -from moto import mock_s3 - -from app.letters.utils import ( - LetterPDFNotFound, - ScanErrorType, - find_letter_pdf_in_s3, - generate_letter_pdf_filename, - get_billable_units_for_letter_page_count, - get_bucket_name_and_prefix_for_notification, - get_folder_name, - get_letter_pdf_and_metadata, - letter_print_day, - move_failed_pdf, - move_sanitised_letter_to_test_or_live_pdf_bucket, - upload_letter_pdf, -) -from app.models import ( - KEY_TYPE_NORMAL, - KEY_TYPE_TEST, - NOTIFICATION_VALIDATION_FAILED, - PRECOMPILED_TEMPLATE_NAME, -) -from tests.app.db import create_notification - -FROZEN_DATE_TIME = "2018-03-14 17:00:00" - - -@pytest.skip(reason="Skipping letter-related functionality for now", allow_module_level=True) -@pytest.fixture(name='sample_precompiled_letter_notification') -def _sample_precompiled_letter_notification(sample_letter_notification): - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - sample_letter_notification.reference = 'foo' - with freeze_time(FROZEN_DATE_TIME): - sample_letter_notification.created_at = datetime.utcnow() - sample_letter_notification.updated_at = datetime.utcnow() - return sample_letter_notification - - -@pytest.fixture(name='sample_precompiled_letter_notification_using_test_key') -def _sample_precompiled_letter_notification_using_test_key(sample_precompiled_letter_notification): - sample_precompiled_letter_notification.key_type = KEY_TYPE_TEST - return sample_precompiled_letter_notification - - -@mock_s3 -def test_find_letter_pdf_in_s3_returns_object(sample_notification): - bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] - s3 = boto3.client('s3', region_name='eu-west-1') - s3.create_bucket( - Bucket=bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - _, prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - s3.put_object(Bucket=bucket_name, Key=f'{prefix}-and-then-some', Body=b'f') - - assert find_letter_pdf_in_s3(sample_notification).key == f'{prefix}-and-then-some' - - -@mock_s3 -def test_find_letter_pdf_in_s3_raises_if_not_found(sample_notification): - bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] - s3 = boto3.client('s3', region_name='eu-west-1') - s3.create_bucket( - Bucket=bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - with pytest.raises(LetterPDFNotFound): - find_letter_pdf_in_s3(sample_notification) - - -@pytest.mark.parametrize('created_at,folder', [ - (datetime(2017, 1, 1, 17, 29), '2017-01-01'), - (datetime(2017, 1, 1, 17, 31), '2017-01-02'), -]) -def test_get_bucket_name_and_prefix_for_notification_valid_notification(sample_notification, created_at, folder): - sample_notification.created_at = created_at - sample_notification.updated_at = created_at - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config['LETTERS_PDF_BUCKET_NAME'] - assert bucket_prefix == '{folder}/NOTIFY.{reference}'.format( - folder=folder, - reference=sample_notification.reference - ).upper() - - -def test_get_bucket_name_and_prefix_for_notification_is_tomorrow_after_17_30(sample_notification): - sample_notification.created_at = datetime(2019, 8, 1, 17, 35) - sample_notification.sent_at = datetime(2019, 8, 2, 17, 45) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config['LETTERS_PDF_BUCKET_NAME'] - assert bucket_prefix == '{folder}/NOTIFY.{reference}'.format( - folder='2019-08-02', - reference=sample_notification.reference - ).upper() - - -def test_get_bucket_name_and_prefix_for_notification_is_today_before_17_30(sample_notification): - sample_notification.created_at = datetime(2019, 8, 1, 12, 00) - sample_notification.updated_at = datetime(2019, 8, 2, 12, 00) - sample_notification.sent_at = datetime(2019, 8, 3, 12, 00) - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_notification) - - assert bucket == current_app.config['LETTERS_PDF_BUCKET_NAME'] - assert bucket_prefix == '{folder}/NOTIFY.{reference}'.format( - folder='2019-08-01', - reference=sample_notification.reference - ).upper() - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_precompiled_letter_using_test_key( - sample_precompiled_letter_notification_using_test_key -): - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification( - sample_precompiled_letter_notification_using_test_key) - - assert bucket == current_app.config['TEST_LETTERS_BUCKET_NAME'] - assert bucket_prefix == 'NOTIFY.{}'.format( - sample_precompiled_letter_notification_using_test_key.reference).upper() - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_notification_templated_letter_using_test_key(sample_letter_notification): - sample_letter_notification.key_type = KEY_TYPE_TEST - - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_letter_notification) - - assert bucket == current_app.config['TEST_LETTERS_BUCKET_NAME'] - assert bucket_prefix == 'NOTIFY.{}'.format(sample_letter_notification.reference).upper() - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_failed_validation(sample_precompiled_letter_notification): - sample_precompiled_letter_notification.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification(sample_precompiled_letter_notification) - - assert bucket == current_app.config['INVALID_PDF_BUCKET_NAME'] - assert bucket_prefix == 'NOTIFY.{}'.format( - sample_precompiled_letter_notification.reference).upper() - - -@freeze_time(FROZEN_DATE_TIME) -def test_get_bucket_name_and_prefix_for_test_noti_with_failed_validation( - sample_precompiled_letter_notification_using_test_key -): - sample_precompiled_letter_notification_using_test_key.status = NOTIFICATION_VALIDATION_FAILED - bucket, bucket_prefix = get_bucket_name_and_prefix_for_notification( - sample_precompiled_letter_notification_using_test_key - ) - - assert bucket == current_app.config['INVALID_PDF_BUCKET_NAME'] - assert bucket_prefix == 'NOTIFY.{}'.format( - sample_precompiled_letter_notification_using_test_key.reference).upper() - - -def test_get_bucket_name_and_prefix_for_notification_invalid_notification(): - with pytest.raises(AttributeError): - get_bucket_name_and_prefix_for_notification(None) - - -@pytest.mark.parametrize('postage,expected_postage', [ - ('second', 2), - ('first', 1), -]) -def test_generate_letter_pdf_filename_returns_correct_postage_for_filename( - notify_api, postage, expected_postage): - created_at = datetime(2017, 12, 4, 17, 29) - filename = generate_letter_pdf_filename(reference='foo', created_at=created_at, postage=postage) - - assert filename == '2017-12-04/NOTIFY.FOO.D.{}.C.20171204172900.PDF'.format(expected_postage) - - -def test_generate_letter_pdf_filename_returns_correct_filename_for_test_letters( - notify_api, mocker): - created_at = datetime(2017, 12, 4, 17, 29) - filename = generate_letter_pdf_filename( - reference='foo', - created_at=created_at, - ignore_folder=True - ) - - assert filename == 'NOTIFY.FOO.D.2.C.20171204172900.PDF' - - -def test_generate_letter_pdf_filename_returns_tomorrows_filename(notify_api, mocker): - created_at = datetime(2017, 12, 4, 17, 31) - filename = generate_letter_pdf_filename(reference='foo', created_at=created_at) - - assert filename == '2017-12-05/NOTIFY.FOO.D.2.C.20171204173100.PDF' - - -@mock_s3 -@pytest.mark.parametrize('bucket_config_name,filename_format', [ - ('TEST_LETTERS_BUCKET_NAME', 'NOTIFY.FOO.D.2.C.%Y%m%d%H%M%S.PDF'), - ('LETTERS_PDF_BUCKET_NAME', '%Y-%m-%d/NOTIFY.FOO.D.2.C.%Y%m%d%H%M%S.PDF') -]) -@freeze_time(FROZEN_DATE_TIME) -def test_get_letter_pdf_gets_pdf_from_correct_bucket( - sample_precompiled_letter_notification_using_test_key, - bucket_config_name, - filename_format -): - if bucket_config_name == 'LETTERS_PDF_BUCKET_NAME': - sample_precompiled_letter_notification_using_test_key.key_type = KEY_TYPE_NORMAL - - bucket_name = current_app.config[bucket_config_name] - filename = datetime.utcnow().strftime(filename_format) - conn = boto3.resource('s3', region_name='eu-west-1') - conn.create_bucket( - Bucket=bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - s3 = boto3.client('s3', region_name='eu-west-1') - s3.put_object(Bucket=bucket_name, Key=filename, Body=b'pdf_content') - - file_data, metadata = get_letter_pdf_and_metadata(sample_precompiled_letter_notification_using_test_key) - - assert file_data == b'pdf_content' - - -@pytest.mark.parametrize('is_precompiled_letter,bucket_config_name', [ - (False, 'LETTERS_PDF_BUCKET_NAME'), - (True, 'LETTERS_SCAN_BUCKET_NAME') -]) -def test_upload_letter_pdf_to_correct_bucket( - sample_letter_notification, mocker, is_precompiled_letter, bucket_config_name -): - if is_precompiled_letter: - sample_letter_notification.template.hidden = True - sample_letter_notification.template.name = PRECOMPILED_TEMPLATE_NAME - - mock_s3 = mocker.patch('app.letters.utils.s3upload') - - filename = generate_letter_pdf_filename( - reference=sample_letter_notification.reference, - created_at=sample_letter_notification.created_at, - ignore_folder=is_precompiled_letter - ) - - upload_letter_pdf(sample_letter_notification, b'\x00\x01', precompiled=is_precompiled_letter) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config[bucket_config_name], - file_location=filename, - filedata=b'\x00\x01', - region=current_app.config['AWS_REGION'] - ) - - -@pytest.mark.parametrize('postage,expected_postage', [ - ('second', 2), - ('first', 1) -]) -def test_upload_letter_pdf_uses_postage_from_notification( - sample_letter_template, mocker, postage, expected_postage -): - letter_notification = create_notification(template=sample_letter_template, postage=postage) - mock_s3 = mocker.patch('app.letters.utils.s3upload') - - filename = generate_letter_pdf_filename( - reference=letter_notification.reference, - created_at=letter_notification.created_at, - ignore_folder=False, - postage=letter_notification.postage - ) - - upload_letter_pdf(letter_notification, b'\x00\x01', precompiled=False) - - mock_s3.assert_called_once_with( - bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'], - file_location=filename, - filedata=b'\x00\x01', - region=current_app.config['AWS_REGION'] - ) - - -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_error(notify_api): - filename = 'test.pdf' - bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME'] - - conn = boto3.resource('s3', region_name='eu-west-1') - bucket = conn.create_bucket( - Bucket=bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - s3 = boto3.client('s3', region_name='eu-west-1') - s3.put_object(Bucket=bucket_name, Key=filename, Body=b'pdf_content') - - move_failed_pdf(filename, ScanErrorType.ERROR) - - assert 'ERROR/' + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@mock_s3 -@freeze_time(FROZEN_DATE_TIME) -def test_move_failed_pdf_scan_failed(notify_api): - filename = 'test.pdf' - bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME'] - - conn = boto3.resource('s3', region_name='eu-west-1') - bucket = conn.create_bucket( - Bucket=bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - s3 = boto3.client('s3', region_name='eu-west-1') - s3.put_object(Bucket=bucket_name, Key=filename, Body=b'pdf_content') - - move_failed_pdf(filename, ScanErrorType.FAILURE) - - assert 'FAILURE/' + filename in [o.key for o in bucket.objects.all()] - assert filename not in [o.key for o in bucket.objects.all()] - - -@pytest.mark.parametrize("timestamp, expected_folder_name", - [("2018-04-01 17:50:00", "2018-04-02/"), - ("2018-07-02 16:29:00", "2018-07-02/"), - ("2018-07-02 16:30:00", "2018-07-02/"), - ("2018-07-02 16:31:00", "2018-07-03/"), - ("2018-01-02 16:31:00", "2018-01-02/"), - ("2018-01-02 17:31:00", "2018-01-03/"), - - ("2018-07-02 22:30:00", "2018-07-03/"), - ("2018-07-02 23:30:00", "2018-07-03/"), - ("2018-07-03 00:30:00", "2018-07-03/"), - - ("2018-01-02 22:30:00", "2018-01-03/"), - ("2018-01-02 23:30:00", "2018-01-03/"), - ("2018-01-03 00:30:00", "2018-01-03/"), - ]) -def test_get_folder_name_in_british_summer_time(notify_api, timestamp, expected_folder_name): - timestamp = dateutil.parser.parse(timestamp) - folder_name = get_folder_name(created_at=timestamp) - assert folder_name == expected_folder_name - - -@mock_s3 -def test_move_sanitised_letter_to_live_pdf_bucket(notify_api, mocker): - filename = 'my_letter.pdf' - source_bucket_name = current_app.config['LETTER_SANITISE_BUCKET_NAME'] - target_bucket_name = current_app.config['LETTERS_PDF_BUCKET_NAME'] - - conn = boto3.resource('s3', region_name='eu-west-1') - source_bucket = conn.create_bucket( - Bucket=source_bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - target_bucket = conn.create_bucket( - Bucket=target_bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - s3 = boto3.client('s3', region_name='eu-west-1') - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'pdf_content') - - move_sanitised_letter_to_test_or_live_pdf_bucket( - filename=filename, - is_test_letter=False, - created_at=datetime.utcnow(), - new_filename=filename - ) - - assert not [x for x in source_bucket.objects.all()] - assert len([x for x in target_bucket.objects.all()]) == 1 - - -@mock_s3 -def test_move_sanitised_letter_to_test_pdf_bucket(notify_api, mocker): - filename = 'my_letter.pdf' - source_bucket_name = current_app.config['LETTER_SANITISE_BUCKET_NAME'] - target_bucket_name = current_app.config['TEST_LETTERS_BUCKET_NAME'] - - conn = boto3.resource('s3', region_name='eu-west-1') - source_bucket = conn.create_bucket( - Bucket=source_bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - target_bucket = conn.create_bucket( - Bucket=target_bucket_name, - CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'} - ) - - s3 = boto3.client('s3', region_name='eu-west-1') - s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'pdf_content') - - move_sanitised_letter_to_test_or_live_pdf_bucket( - filename=filename, - is_test_letter=True, - created_at=datetime.utcnow(), - new_filename=filename - ) - - assert not [x for x in source_bucket.objects.all()] - assert len([x for x in target_bucket.objects.all()]) == 1 - - -@freeze_time('2017-07-07 20:00:00') -def test_letter_print_day_returns_today_if_letter_was_printed_after_1730_yesterday(): - created_at = datetime(2017, 7, 6, 17, 30) - assert letter_print_day(created_at) == 'today' - - -@freeze_time('2017-07-07 16:30:00') -def test_letter_print_day_returns_today_if_letter_was_printed_today(): - created_at = datetime(2017, 7, 7, 12, 0) - assert letter_print_day(created_at) == 'today' - - -@pytest.mark.parametrize('created_at, formatted_date', [ - (datetime(2017, 7, 5, 16, 30), 'on 6 July'), - (datetime(2017, 7, 6, 16, 29), 'on 6 July'), - (datetime(2016, 8, 8, 10, 00), 'on 8 August'), - (datetime(2016, 12, 12, 17, 29), 'on 12 December'), - (datetime(2016, 12, 12, 17, 30), 'on 13 December'), -]) -@freeze_time('2017-07-07 16:30:00') -def test_letter_print_day_returns_formatted_date_if_letter_printed_before_1730_yesterday(created_at, formatted_date): - assert letter_print_day(created_at) == formatted_date - - -@pytest.mark.parametrize('number_of_pages, expected_billable_units', [(2, 1), (3, 2), (10, 5)]) -def test_get_billable_units_for_letter_page_count(number_of_pages, expected_billable_units): - result = get_billable_units_for_letter_page_count(number_of_pages) - assert result == expected_billable_units diff --git a/tests/app/letters/test_returned_letters.py b/tests/app/letters/test_returned_letters.py deleted file mode 100644 index cdda37c33..000000000 --- a/tests/app/letters/test_returned_letters.py +++ /dev/null @@ -1,42 +0,0 @@ -import pytest - - -@pytest.mark.parametrize('status, references', [ - (200, ["1234567890ABCDEF", "1234567890ABCDEG"]), - (400, ["1234567890ABCDEFG", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE", "1234567890ABCDEG"]), - (400, ["1234567890ABCDE\u26d4", "1234567890ABCDEG"]), - (400, ["NOTIFY0001234567890ABCDEF", "1234567890ABCDEG"]), -]) -def test_process_returned_letters(status, references, admin_request, mocker): - mock_celery = mocker.patch("app.letters.rest.process_returned_letters_list.apply_async") - - response = admin_request.post( - 'letter-job.create_process_returned_letters_job', - _data={"references": references}, - _expected_status=status - ) - - if status != 200: - assert '{} does not match'.format(references[0]) in response['errors'][0]['message'] - else: - mock_celery.assert_called_once_with(args=(references,), queue='database-tasks', compression='zlib') - - -def test_process_returned_letters_splits_tasks_up(admin_request, mocker): - mock_celery = mocker.patch("app.letters.rest.process_returned_letters_list.apply_async") - mocker.patch("app.letters.rest.MAX_REFERENCES_PER_TASK", 3) - - references = [f'{x:016}' for x in range(10)] - - admin_request.post( - 'letter-job.create_process_returned_letters_job', - _data={"references": references}, - ) - - assert mock_celery.call_count == 4 - - assert mock_celery.call_args_list[0][1]['args'][0] == ['0000000000000000', '0000000000000001', '0000000000000002'] - assert mock_celery.call_args_list[1][1]['args'][0] == ['0000000000000003', '0000000000000004', '0000000000000005'] - assert mock_celery.call_args_list[2][1]['args'][0] == ['0000000000000006', '0000000000000007', '0000000000000008'] - assert mock_celery.call_args_list[3][1]['args'][0] == ['0000000000000009'] diff --git a/tests/app/notifications/test_notifications_letter_callbacks.py b/tests/app/notifications/test_notifications_letter_callbacks.py deleted file mode 100644 index c0d1a1321..000000000 --- a/tests/app/notifications/test_notifications_letter_callbacks.py +++ /dev/null @@ -1,99 +0,0 @@ -import pytest -from flask import json - - -def dvla_post(client, data): - return client.post( - path='/notifications/letter/dvla', - data=data, - headers=[('Content-Type', 'application/json')] - ) - - -def test_dvla_callback_returns_400_with_invalid_request(client): - data = json.dumps({"foo": "bar"}) - response = dvla_post(client, data) - assert response.status_code == 400 - - -def test_dvla_callback_autoconfirms_subscription(client, mocker): - autoconfirm_mock = mocker.patch('app.notifications.notifications_letter_callback.autoconfirm_subscription') - - data = _sns_confirmation_callback() - response = dvla_post(client, data) - assert response.status_code == 200 - assert autoconfirm_mock.called - - -def test_dvla_callback_autoconfirm_does_not_call_update_letter_notifications_task(client, mocker): - autoconfirm_mock = mocker.patch('app.notifications.notifications_letter_callback.autoconfirm_subscription') - update_task = \ - mocker.patch('app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async') - - data = _sns_confirmation_callback() - response = dvla_post(client, data) - - assert response.status_code == 200 - assert autoconfirm_mock.called - assert not update_task.called - - -def test_dvla_callback_calls_does_not_update_letter_notifications_task_with_invalid_file_type(client, mocker): - update_task = \ - mocker.patch('app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async') - - data = _sample_sns_s3_callback("bar.txt") - response = dvla_post(client, data) - - assert response.status_code == 200 - assert not update_task.called - - -@pytest.mark.parametrize("filename", - ['Notify-20170411153023-rs.txt', 'Notify-20170411153023-rsp.txt']) -def test_dvla_rs_and_rsp_txt_file_callback_calls_update_letter_notifications_task(client, mocker, filename): - update_task = mocker.patch( - 'app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async') - daily_sorted_counts_task = mocker.patch( - 'app.notifications.notifications_letter_callback.record_daily_sorted_counts.apply_async') - data = _sample_sns_s3_callback(filename) - response = dvla_post(client, data) - - assert response.status_code == 200 - assert update_task.called - update_task.assert_called_with([filename], queue='notify-internal-tasks') - daily_sorted_counts_task.assert_called_with([filename], queue='notify-internal-tasks') - - -def test_dvla_ack_calls_does_not_call_letter_notifications_task(client, mocker): - update_task = mocker.patch( - 'app.notifications.notifications_letter_callback.update_letter_notifications_statuses.apply_async') - daily_sorted_counts_task = mocker.patch( - 'app.notifications.notifications_letter_callback.record_daily_sorted_counts.apply_async') - data = _sample_sns_s3_callback('bar.ack.txt') - response = dvla_post(client, data) - - assert response.status_code == 200 - update_task.assert_not_called() - daily_sorted_counts_task.assert_not_called() - - -def _sample_sns_s3_callback(filename): - message_contents = '''{"Records":[{"eventVersion":"2.0","eventSource":"aws:s3","awsRegion":"eu-west-1","eventTime":"2017-05-16T11:38:41.073Z","eventName":"ObjectCreated:Put","userIdentity":{"principalId":"some-p-id"},"requestParameters":{"sourceIPAddress":"8.8.8.8"},"responseElements":{"x-amz-request-id":"some-r-id","x-amz-id-2":"some-x-am-id"},"s3":{"s3SchemaVersion":"1.0","configurationId":"some-c-id","bucket":{"name":"some-bucket","ownerIdentity":{"principalId":"some-p-id"},"arn":"some-bucket-arn"}, - "object":{"key":"%s"}}}]}''' % (filename) # noqa - return json.dumps({ - "SigningCertURL": "foo.pem", - "UnsubscribeURL": "bar", - "Signature": "some-signature", - "Type": "Notification", - "Timestamp": "2016-05-03T08:35:12.884Z", - "SignatureVersion": "1", - "MessageId": "6adbfe0a-d610-509a-9c47-af894e90d32d", - "Subject": "Amazon S3 Notification", - "TopicArn": "sample-topic-arn", - "Message": message_contents - }) - - -def _sns_confirmation_callback(): - return b'{\n "Type": "SubscriptionConfirmation",\n "MessageId": "165545c9-2a5c-472c-8df2-7ff2be2b3b1b",\n "Token": "2336412f37fb687f5d51e6e241d09c805a5a57b30d712f794cc5f6a988666d92768dd60a747ba6f3beb71854e285d6ad02428b09ceece29417f1f02d609c582afbacc99c583a916b9981dd2728f4ae6fdb82efd087cc3b7849e05798d2d2785c03b0879594eeac82c01f235d0e717736",\n "TopicArn": "arn:aws:sns:us-west-2:123456789012:MyTopic",\n "Message": "You have chosen to subscribe to the topic arn:aws:sns:us-west-2:123456789012:MyTopic.\\nTo confirm the subscription, visit the SubscribeURL included in this message.",\n "SubscribeURL": "https://sns.us-west-2.amazonaws.com/?Action=ConfirmSubscription&TopicArn=arn:aws:sns:us-west-2:123456789012:MyTopic&Token=2336412f37fb687f5d51e6e241d09c805a5a57b30d712f794cc5f6a988666d92768dd60a747ba6f3beb71854e285d6ad02428b09ceece29417f1f02d609c582afbacc99c583a916b9981dd2728f4ae6fdb82efd087cc3b7849e05798d2d2785c03b0879594eeac82c01f235d0e717736",\n "Timestamp": "2012-04-26T20:45:04.751Z",\n "SignatureVersion": "1",\n "Signature": "EXAMPLEpH+DcEwjAPg8O9mY8dReBSwksfg2S7WKQcikcNKWLQjwu6A4VbeS0QHVCkhRS7fUQvi2egU3N858fiTDN6bkkOxYDVrY0Ad8L10Hs3zH81mtnPk5uvvolIC1CXGu43obcgFxeL3khZl8IKvO61GWB6jI9b5+gLPoBc1Q=",\n "SigningCertURL": "https://sns.us-west-2.amazonaws.com/SimpleNotificationService-f3ecfb7224c7233fe7bb5f59f96de52f.pem"\n}' # noqa diff --git a/tests/app/notifications/test_process_letter_notifications.py b/tests/app/notifications/test_process_letter_notifications.py deleted file mode 100644 index 9bebba06a..000000000 --- a/tests/app/notifications/test_process_letter_notifications.py +++ /dev/null @@ -1,89 +0,0 @@ -from app.models import LETTER_TYPE, NOTIFICATION_CREATED, Notification -from app.notifications.process_letter_notifications import ( - create_letter_notification, -) -from app.serialised_models import SerialisedTemplate - - -def test_create_letter_notification_creates_notification(sample_letter_template, sample_api_key): - data = { - 'personalisation': { - 'address_line_1': 'The Queen', - 'address_line_2': 'Buckingham Palace', - 'postcode': 'SW1 1AA', - } - } - - template = SerialisedTemplate.from_id_and_service_id( - sample_letter_template.id, sample_letter_template.service_id - ) - - notification = create_letter_notification( - data, - template, - sample_letter_template.service, - sample_api_key, - NOTIFICATION_CREATED, - ) - - assert notification == Notification.query.one() - assert notification.job is None - assert notification.status == NOTIFICATION_CREATED - assert notification.template_id == sample_letter_template.id - assert notification.template_version == sample_letter_template.version - assert notification.api_key == sample_api_key - assert notification.notification_type == LETTER_TYPE - assert notification.key_type == sample_api_key.key_type - assert notification.reference is not None - assert notification.client_reference is None - assert notification.postage == 'second' - - -def test_create_letter_notification_sets_reference(sample_letter_template, sample_api_key): - data = { - 'personalisation': { - 'address_line_1': 'The Queen', - 'address_line_2': 'Buckingham Palace', - 'postcode': 'SW1 1AA', - }, - 'reference': 'foo' - } - - template = SerialisedTemplate.from_id_and_service_id( - sample_letter_template.id, sample_letter_template.service_id - ) - - notification = create_letter_notification( - data, - template, - sample_letter_template.service, - sample_api_key, - NOTIFICATION_CREATED, - ) - - assert notification.client_reference == 'foo' - - -def test_create_letter_notification_sets_billable_units(sample_letter_template, sample_api_key): - data = { - 'personalisation': { - 'address_line_1': 'The Queen', - 'address_line_2': 'Buckingham Palace', - 'postcode': 'SW1 1AA', - }, - } - - template = SerialisedTemplate.from_id_and_service_id( - sample_letter_template.id, sample_letter_template.service_id - ) - - notification = create_letter_notification( - data, - template, - sample_letter_template.service, - sample_api_key, - NOTIFICATION_CREATED, - billable_units=3, - ) - - assert notification.billable_units == 3 diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index 98e698888..3af595b1e 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -11,7 +11,7 @@ from notifications_utils.recipients import ( ) from sqlalchemy.exc import SQLAlchemyError -from app.models import LETTER_TYPE, Notification, NotificationHistory +from app.models import SMS_TYPE, Notification, NotificationHistory from app.notifications.process_notifications import ( create_content_for_notification, persist_notification, @@ -251,11 +251,9 @@ def test_persist_notification_sets_daily_limit_cache_if_one_does_not_exists( (True, None, 'sms', 'normal', 'research-mode-tasks', 'provider_tasks.deliver_sms'), (True, None, 'email', 'normal', 'research-mode-tasks', 'provider_tasks.deliver_email'), (True, None, 'email', 'team', 'research-mode-tasks', 'provider_tasks.deliver_email'), - (True, None, 'letter', 'normal', 'research-mode-tasks', 'letters_pdf_tasks.get_pdf_for_templated_letter'), (False, None, 'sms', 'normal', 'send-sms-tasks', 'provider_tasks.deliver_sms'), (False, None, 'email', 'normal', 'send-email-tasks', 'provider_tasks.deliver_email'), (False, None, 'sms', 'team', 'send-sms-tasks', 'provider_tasks.deliver_sms'), - (False, None, 'letter', 'normal', 'create-letters-pdf-tasks', 'letters_pdf_tasks.get_pdf_for_templated_letter'), (False, None, 'sms', 'test', 'research-mode-tasks', 'provider_tasks.deliver_sms'), (True, 'notify-internal-tasks', 'email', 'normal', 'research-mode-tasks', 'provider_tasks.deliver_email'), (False, 'notify-internal-tasks', 'sms', 'normal', 'notify-internal-tasks', 'provider_tasks.deliver_sms'), @@ -452,50 +450,16 @@ def test_persist_email_notification_stores_normalised_email( assert persisted_notification.normalised_to == expected_recipient_normalised -@pytest.mark.parametrize( - "postage_argument, template_postage, expected_postage", - [ - ("second", "first", "second"), - ("first", "first", "first"), - ("first", "second", "first") - ] -) -def test_persist_letter_notification_finds_correct_postage( - mocker, - postage_argument, - template_postage, - expected_postage, - sample_service_full_permissions, - sample_api_key, -): - template = create_template(sample_service_full_permissions, template_type=LETTER_TYPE, postage=template_postage) - mocker.patch('app.dao.templates_dao.dao_get_template_by_id', return_value=template) - persist_notification( - template_id=template.id, - template_version=template.version, - recipient="Jane Doe, 10 Downing Street, London", - service=sample_service_full_permissions, - personalisation=None, - notification_type=LETTER_TYPE, - api_key_id=sample_api_key.id, - key_type=sample_api_key.key_type, - postage=postage_argument - ) - persisted_notification = Notification.query.all()[0] - - assert persisted_notification.postage == expected_postage - - def test_persist_notification_with_billable_units_stores_correct_info( mocker ): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service, template_type=LETTER_TYPE) + service = create_service(service_permissions=[SMS_TYPE]) + template = create_template(service, template_type=SMS_TYPE) mocker.patch('app.dao.templates_dao.dao_get_template_by_id', return_value=template) persist_notification( template_id=template.id, template_version=template.version, - recipient="123 Main Street", + recipient="+12028675309", service=template.service, personalisation=None, notification_type=template.template_type, @@ -506,22 +470,3 @@ def test_persist_notification_with_billable_units_stores_correct_info( persisted_notification = Notification.query.all()[0] assert persisted_notification.billable_units == 3 - - -@pytest.mark.parametrize('postage', ['europe', 'rest-of-world']) -def test_persist_notification_for_international_letter(sample_letter_template, postage): - notification = persist_notification( - template_id=sample_letter_template.id, - template_version=sample_letter_template.version, - recipient="123 Main Street", - service=sample_letter_template.service, - personalisation=None, - notification_type=sample_letter_template.template_type, - api_key_id=None, - key_type="normal", - billable_units=3, - postage=postage, - ) - persisted_notification = Notification.query.get(notification.id) - assert persisted_notification.postage == postage - assert persisted_notification.international diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index 89577b3ad..d9e9fb8cb 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -60,6 +60,7 @@ def test_receive_notification_returns_received_to_sns(client, mocker, sample_ser [str(inbound_sms_id), str(sample_service_full_permissions.id)], queue="notify-internal-tasks") +# TODO: figure out why creating a service first causes a db error @pytest.mark.parametrize('permissions', [ [SMS_TYPE], [INBOUND_SMS_TYPE], @@ -71,10 +72,10 @@ def test_receive_notification_from_sns_without_permissions_does_not_persist( permissions ): mocked = mocker.patch("app.notifications.receive_notifications.tasks.send_inbound_sms_to_service.apply_async") - create_service_with_inbound_number(inbound_number='07111111111', service_permissions=permissions) + # create_service_with_inbound_number(inbound_number='12025550104', service_permissions=permissions) data = { "ID": "1234", - "MSISDN": "07111111111", + "MSISDN": "12025550104", "Message": "Some message to notify", "Trigger": "Trigger?", "Number": "testing", diff --git a/tests/app/notifications/test_rest.py b/tests/app/notifications/test_rest.py index 44b893363..15d0a501c 100644 --- a/tests/app/notifications/test_rest.py +++ b/tests/app/notifications/test_rest.py @@ -13,20 +13,17 @@ from tests import create_service_authorization_header from tests.app.db import create_api_key, create_notification -@pytest.mark.parametrize('type', ('email', 'sms', 'letter')) +@pytest.mark.parametrize('type', ('email', 'sms')) def test_get_notification_by_id( client, sample_notification, sample_email_notification, - sample_letter_notification, type ): if type == 'email': notification_to_get = sample_email_notification if type == 'sms': notification_to_get = sample_notification - if type == 'letter': - notification_to_get = sample_letter_notification auth_header = create_service_authorization_header(service_id=notification_to_get.service_id) response = client.get( diff --git a/tests/app/notifications/test_validators.py b/tests/app/notifications/test_validators.py index f354d184e..f18c327fb 100644 --- a/tests/app/notifications/test_validators.py +++ b/tests/app/notifications/test_validators.py @@ -7,7 +7,7 @@ from notifications_utils import SMS_CHAR_COUNT_LIMIT import app from app.dao import templates_dao -from app.models import EMAIL_TYPE, INTERNATIONAL_LETTERS, LETTER_TYPE, SMS_TYPE +from app.models import EMAIL_TYPE, SMS_TYPE from app.notifications.process_notifications import ( create_content_for_notification, ) @@ -18,14 +18,12 @@ from app.notifications.validators import ( check_rate_limiting, check_reply_to, check_service_email_reply_to_id, - check_service_letter_contact_id, check_service_over_api_rate_limit, check_service_over_daily_message_limit, check_service_sms_sender_id, check_template_is_active, check_template_is_for_notification_type, service_can_send_to_recipient, - validate_address, validate_and_format_recipient, validate_template, ) @@ -38,7 +36,6 @@ from app.utils import get_template_instance from app.v2.errors import BadRequestError, RateLimitError, TooManyRequestsError from tests.app.db import ( create_api_key, - create_letter_contact, create_reply_to_email, create_service, create_service_guest_list, @@ -267,7 +264,7 @@ def test_service_can_send_to_recipient_fails_when_mobile_number_is_not_on_team(s @pytest.mark.parametrize('char_count', [612, 0, 494, 200, 918]) @pytest.mark.parametrize('show_prefix', [True, False]) -@pytest.mark.parametrize('template_type', ['sms', 'email', 'letter']) +@pytest.mark.parametrize('template_type', ['sms', 'email']) def test_check_is_message_too_long_passes(notify_db_session, show_prefix, char_count, template_type): service = create_service(prefix_sms=show_prefix) t = create_template(service=service, content='a' * char_count, template_type=template_type) @@ -503,7 +500,7 @@ def test_validate_and_format_recipient_fails_when_no_recipient(): assert e.value.message == "Recipient can't be empty" -@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter']) +@pytest.mark.parametrize('notification_type', ['sms', 'email']) def test_check_service_email_reply_to_id_where_reply_to_id_is_none(notification_type): assert check_service_email_reply_to_id(None, None, notification_type) is None @@ -530,7 +527,7 @@ def test_check_service_email_reply_to_id_where_reply_to_id_is_not_found(sample_s .format(fake_uuid, sample_service.id) -@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter']) +@pytest.mark.parametrize('notification_type', ['sms', 'email']) def test_check_service_sms_sender_id_where_sms_sender_id_is_none(notification_type): assert check_service_sms_sender_id(None, None, notification_type) is None @@ -557,33 +554,7 @@ def test_check_service_sms_sender_id_where_sms_sender_is_not_found(sample_servic .format(fake_uuid, sample_service.id) -def test_check_service_letter_contact_id_where_letter_contact_id_is_none(): - assert check_service_letter_contact_id(None, None, 'letter') is None - - -def test_check_service_letter_contact_id_where_letter_contact_id_is_found(sample_service): - letter_contact = create_letter_contact(service=sample_service, contact_block='123456') - assert check_service_letter_contact_id(sample_service.id, letter_contact.id, LETTER_TYPE) == '123456' - - -def test_check_service_letter_contact_id_where_service_id_is_not_found(sample_service, fake_uuid): - letter_contact = create_letter_contact(service=sample_service, contact_block='123456') - with pytest.raises(BadRequestError) as e: - check_service_letter_contact_id(fake_uuid, letter_contact.id, LETTER_TYPE) - assert e.value.status_code == 400 - assert e.value.message == 'letter_contact_id {} does not exist in database for service id {}' \ - .format(letter_contact.id, fake_uuid) - - -def test_check_service_letter_contact_id_where_letter_contact_is_not_found(sample_service, fake_uuid): - with pytest.raises(BadRequestError) as e: - check_service_letter_contact_id(sample_service.id, fake_uuid, LETTER_TYPE) - assert e.value.status_code == 400 - assert e.value.message == 'letter_contact_id {} does not exist in database for service id {}' \ - .format(fake_uuid, sample_service.id) - - -@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter']) +@pytest.mark.parametrize('notification_type', ['sms', 'email']) def test_check_reply_to_with_empty_reply_to(sample_service, notification_type): assert check_reply_to(sample_service.id, None, notification_type) is None @@ -598,11 +569,6 @@ def test_check_reply_to_sms_type(sample_service): assert check_reply_to(sample_service.id, sms_sender.id, SMS_TYPE) == '123456' -def test_check_reply_to_letter_type(sample_service): - letter_contact = create_letter_contact(service=sample_service, contact_block='123456') - assert check_reply_to(sample_service.id, letter_contact.id, LETTER_TYPE) == '123456' - - @pytest.mark.skip(reason="Needs updating for TTS: Failing for unknown reason") def test_check_if_service_can_send_files_by_email_raises_if_no_contact_link_set(sample_service): with pytest.raises(BadRequestError) as e: @@ -623,19 +589,3 @@ def test_check_if_service_can_send_files_by_email_passes_if_contact_link_set(sam service_contact_link=sample_service.contact_link, service_id=sample_service.id ) - - -@pytest.mark.parametrize('key, address_line_3, expected_postage', - [('address_line_3', 'SW1 1AA', None), - ('address_line_5', 'CANADA', 'rest-of-world'), - ('address_line_3', 'GERMANY', 'europe') - ]) -def test_validate_address(notify_db_session, key, address_line_3, expected_postage): - service = create_service(service_permissions=[LETTER_TYPE, INTERNATIONAL_LETTERS]) - data = { - 'address_line_1': 'Prince Harry', - 'address_line_2': 'Toronto', - key: address_line_3, - } - postage = validate_address(service, data) - assert postage == expected_postage diff --git a/tests/app/organisation/test_rest.py b/tests/app/organisation/test_rest.py index dd7f24e0a..650bcacf0 100644 --- a/tests/app/organisation/test_rest.py +++ b/tests/app/organisation/test_rest.py @@ -17,7 +17,6 @@ from tests.app.db import ( create_domain, create_email_branding, create_ft_billing, - create_letter_branding, create_organisation, create_service, create_template, @@ -76,7 +75,6 @@ def test_get_organisation_by_id(admin_request, notify_db_session): 'agreement_signed_version', 'agreement_signed_on_behalf_of_name', 'agreement_signed_on_behalf_of_email_address', - 'letter_branding_id', 'email_branding_id', 'domains', 'request_to_go_live_notes', @@ -95,7 +93,6 @@ def test_get_organisation_by_id(admin_request, notify_db_session): assert response['agreement_signed'] is None assert response['agreement_signed_by_id'] is None assert response['agreement_signed_version'] is None - assert response['letter_branding_id'] is None assert response['email_branding_id'] is None assert response['domains'] == [] assert response['request_to_go_live_notes'] is None @@ -439,23 +436,19 @@ def test_update_organisation_default_branding( org = create_organisation(name='Test Organisation') email_branding = create_email_branding() - letter_branding = create_letter_branding() assert org.email_branding is None - assert org.letter_branding is None admin_request.post( 'organisation.update_organisation', _data={ 'email_branding_id': str(email_branding.id), - 'letter_branding_id': str(letter_branding.id), }, organisation_id=org.id, _expected_status=204 ) assert org.email_branding == email_branding - assert org.letter_branding == letter_branding def test_post_update_organisation_raises_400_on_existing_org_name( @@ -848,7 +841,6 @@ def test_get_organisation_services_usage(admin_request, notify_db_session): assert service_usage['chargeable_billable_sms'] == 9.0 assert service_usage['emails_sent'] == 0 assert service_usage['free_sms_limit'] == 10 - assert service_usage['letter_cost'] == 0 assert service_usage['sms_billable_units'] == 19 assert service_usage['sms_remainder'] == 0 assert service_usage['sms_cost'] == 0.54 diff --git a/tests/app/platform_stats/test_rest.py b/tests/app/platform_stats/test_rest.py index 2bd3847c0..5df831047 100644 --- a/tests/app/platform_stats/test_rest.py +++ b/tests/app/platform_stats/test_rest.py @@ -72,11 +72,6 @@ def test_get_platform_stats_with_real_query(admin_request, notify_db_session): 'virus-scan-failed': 0, 'temporary-failure': 0, 'permanent-failure': 0, 'technical-failure': 0}, 'total': 4, 'test-key': 0 }, - 'letter': { - 'failures': { - 'virus-scan-failed': 0, 'temporary-failure': 0, 'permanent-failure': 0, 'technical-failure': 0}, - 'total': 0, 'test-key': 0 - }, 'sms': { 'failures': { 'virus-scan-failed': 0, 'temporary-failure': 0, 'permanent-failure': 0, 'technical-failure': 0}, @@ -134,59 +129,21 @@ def test_get_data_for_billing_report(notify_db_session, admin_request): end_date='2019-06-30' ) - # we set up 6 services, but only 4 returned. service_with_emails was skipped as it had no bills to pay, + # we set up 4 services, but only 1 returned. service_with_emails was skipped as it had no bills to pay, # and likewise the service with SMS within allowance was skipped. too. - assert len(response) == 4 - assert response[0]["organisation_id"] == str(fixtures["org_1"].id) - assert response[0]["service_id"] == str(fixtures["service_1_sms_and_letter"].id) - assert response[0]["sms_cost"] == 0 - assert response[0]["sms_chargeable_units"] == 0 - assert response[0]["total_letters"] == 8 - assert response[0]["letter_cost"] == 3.40 - assert response[0]["letter_breakdown"] == "6 second class letters at 45p\n2 first class letters at 35p\n" - assert response[0]["purchase_order_number"] == "service purchase order number" - assert response[0]["contact_names"] == "service billing contact names" - assert response[0]["contact_email_addresses"] == "service@billing.contact email@addresses.gov.uk" - assert response[0]["billing_reference"] == "service billing reference" - - assert response[1]["organisation_id"] == str(fixtures["org_for_service_with_letters"].id) - assert response[1]["service_id"] == str(fixtures["service_with_letters"].id) - assert response[1]["sms_cost"] == 0 - assert response[1]["sms_chargeable_units"] == 0 - assert response[1]["total_letters"] == 22 - assert response[1]["letter_cost"] == 14 - assert response[1]["letter_breakdown"] == "20 second class letters at 65p\n2 first class letters at 50p\n" - assert response[1]["purchase_order_number"] == "org3 purchase order number" - assert response[1]["contact_names"] == "org3 billing contact names" - assert response[1]["contact_email_addresses"] == "org3@billing.contact email@addresses.gov.uk" - assert response[1]["billing_reference"] == "org3 billing reference" - - assert response[2]["organisation_id"] == "" - assert response[2]["service_id"] == str(fixtures["service_with_sms_without_org"].id) - assert response[2]["sms_cost"] == 0.33 - assert response[2]["sms_chargeable_units"] == 3 - assert response[2]["total_letters"] == 0 - assert response[2]["letter_cost"] == 0 - assert response[2]["letter_breakdown"] == "" - assert response[2]["purchase_order_number"] == "sms purchase order number" - assert response[2]["contact_names"] == "sms billing contact names" - assert response[2]["contact_email_addresses"] == "sms@billing.contact email@addresses.gov.uk" - assert response[2]["billing_reference"] == "sms billing reference" - - assert response[3]["organisation_id"] == "" - assert response[3]["service_id"] == str(fixtures["service_with_letters_without_org"].id) - assert response[3]["sms_cost"] == 0 - assert response[3]["sms_chargeable_units"] == 0 - assert response[3]["total_letters"] == 18 - assert response[3]["letter_cost"] == 24.45 - assert response[3]["letter_breakdown"] == ( - "2 second class letters at 35p\n1 first class letters at 50p\n15 international letters at £1.55\n" - ) - assert response[3]["purchase_order_number"] is None + assert len(response) == 1 + assert response[0]["organisation_id"] == "" + assert response[0]["service_id"] == str(fixtures["service_with_sms_without_org"].id) + assert response[0]["sms_cost"] == 0.33 + assert response[0]["sms_chargeable_units"] == 3 + assert response[0]["purchase_order_number"] == "sms purchase order number" + assert response[0]["contact_names"] == "sms billing contact names" + assert response[0]["contact_email_addresses"] == "sms@billing.contact email@addresses.gov.uk" + assert response[0]["billing_reference"] == "sms billing reference" def test_daily_volumes_report( - notify_db_session, sample_template, sample_email_template, sample_letter_template, admin_request + notify_db_session, sample_template, sample_email_template, admin_request ): set_up_usage_data(datetime(2022, 3, 1)) response = admin_request.get( @@ -196,16 +153,16 @@ def test_daily_volumes_report( ) assert len(response) == 3 - assert response[0] == {'day': '2022-03-01', 'email_totals': 10, 'letter_sheet_totals': 3, - 'letter_totals': 2, 'sms_chargeable_units': 2, 'sms_fragment_totals': 2, 'sms_totals': 1} - assert response[1] == {'day': '2022-03-03', 'email_totals': 0, 'letter_sheet_totals': 10, 'letter_totals': 18, + assert response[0] == {'day': '2022-03-01', 'email_totals': 10, + 'sms_chargeable_units': 2, 'sms_fragment_totals': 2, 'sms_totals': 1} + assert response[1] == {'day': '2022-03-03', 'email_totals': 0, 'sms_chargeable_units': 2, 'sms_fragment_totals': 2, 'sms_totals': 2} - assert response[2] == {'day': '2022-03-08', 'email_totals': 0, 'letter_sheet_totals': 11, 'letter_totals': 12, + assert response[2] == {'day': '2022-03-08', 'email_totals': 0, 'sms_chargeable_units': 4, 'sms_fragment_totals': 4, 'sms_totals': 2} def test_volumes_by_service_report( - notify_db_session, sample_template, sample_email_template, sample_letter_template, admin_request + notify_db_session, sample_template, sample_email_template, admin_request ): fixture = set_up_usage_data(datetime(2022, 3, 1)) response = admin_request.get( @@ -214,29 +171,25 @@ def test_volumes_by_service_report( end_date='2022-03-01' ) - assert len(response) == 7 + assert len(response) == 5 # since we are using a pre-set up fixture, we only care about some of the results - assert response[0] == {'email_totals': 0, 'free_allowance': 10, 'letter_cost': 0.0, - 'letter_sheet_totals': 0, 'letter_totals': 0, + assert response[0] == {'email_totals': 0, 'free_allowance': 10, 'organisation_id': str(fixture['org_1'].id), 'organisation_name': fixture['org_1'].name, 'service_id': str(fixture['service_1_sms_and_letter'].id), 'service_name': fixture['service_1_sms_and_letter'].name, 'sms_chargeable_units': 2, 'sms_notifications': 1} - assert response[1] == {'email_totals': 0, 'free_allowance': 10, 'letter_cost': 0.0, 'letter_sheet_totals': 0, - 'letter_totals': 0, 'organisation_id': str(fixture['org_1'].id), + assert response[1] == {'email_totals': 0, 'free_allowance': 10, 'organisation_id': str(fixture['org_1'].id), 'organisation_name': fixture['org_1'].name, 'service_id': str(fixture['service_with_out_ft_billing_this_year'].id), 'service_name': fixture['service_with_out_ft_billing_this_year'].name, 'sms_chargeable_units': 0, 'sms_notifications': 0} - assert response[4] == {'email_totals': 0, 'free_allowance': 10, 'letter_cost': 0.0, 'letter_sheet_totals': 0, - 'letter_totals': 0, 'organisation_id': '', 'organisation_name': '', + assert response[3] == {'email_totals': 0, 'free_allowance': 10, 'organisation_id': '', 'organisation_name': '', 'service_id': str(fixture['service_with_sms_without_org'].id), 'service_name': fixture['service_with_sms_without_org'].name, 'sms_chargeable_units': 0, 'sms_notifications': 0} - assert response[6] == {'email_totals': 0, 'free_allowance': 10, 'letter_cost': 0.0, 'letter_sheet_totals': 0, - 'letter_totals': 0, 'organisation_id': '', 'organisation_name': '', + assert response[4] == {'email_totals': 0, 'free_allowance': 10, 'organisation_id': '', 'organisation_name': '', 'service_id': str(fixture['service_with_sms_within_allowance'].id), 'service_name': fixture['service_with_sms_within_allowance'].name, 'sms_chargeable_units': 0, 'sms_notifications': 0} diff --git a/tests/app/provider_details/test_rest.py b/tests/app/provider_details/test_rest.py index 4e2c001e9..0e0da6004 100644 --- a/tests/app/provider_details/test_rest.py +++ b/tests/app/provider_details/test_rest.py @@ -11,7 +11,7 @@ def test_get_provider_details_returns_all_providers(admin_request, notify_db_ses json_resp = admin_request.get('provider_details.get_providers')['provider_details'] assert len(json_resp) > 0 - assert {'ses', 'sns', 'dvla'} == {x['identifier'] for x in json_resp} + assert {'ses', 'sns'} == {x['identifier'] for x in json_resp} def test_get_provider_details_by_id(client, notify_db_session): diff --git a/tests/app/service/send_notification/test_send_notification.py b/tests/app/service/send_notification/test_send_notification.py index 86e4c6038..83122dcbc 100644 --- a/tests/app/service/send_notification/test_send_notification.py +++ b/tests/app/service/send_notification/test_send_notification.py @@ -1167,8 +1167,7 @@ def test_should_not_allow_email_notifications_if_service_permission_not_set( @pytest.mark.parametrize( "notification_type, err_msg", - [("letter", "letter notification type is not supported, please use the latest version of the client"), - ("apple", "apple notification type is not supported")]) + [("apple", "apple notification type is not supported")]) def test_should_throw_exception_if_notification_type_is_invalid(client, sample_service, notification_type, err_msg): auth_header = create_service_authorization_header(service_id=sample_service.id) response = client.post( @@ -1208,32 +1207,7 @@ def test_post_notification_should_set_reply_to_text(client, sample_service, mock assert notifications[0].reply_to_text == expected_reply_to -@pytest.mark.parametrize('last_line_of_address, expected_postage, expected_international', - [('France', 'europe', True), - ('Canada', 'rest-of-world', True), - ('SW1 1AA', 'second', False)]) -def test_send_notification_should_send_international_letters( - sample_letter_template, mocker, last_line_of_address, expected_postage, expected_international -): - deliver_mock = mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': sample_letter_template.id, - 'personalisation': { - 'address_line_1': 'Jane', - 'address_line_2': 'Rue Vert', - 'address_line_3': last_line_of_address - }, - 'to': 'Jane', - 'created_by': sample_letter_template.service.created_by_id - } - - notification_id = send_one_off_notification(sample_letter_template.service_id, data) - assert deliver_mock.called - notification = Notification.query.get(notification_id['id']) - assert notification.postage == expected_postage - assert notification.international == expected_international - - +@pytest.mark.skip(reason="Rewrite without letters?") @pytest.mark.parametrize('reference_paceholder,', [None, 'ref2']) def test_send_notification_should_set_client_reference_from_placeholder( sample_letter_template, mocker, reference_paceholder diff --git a/tests/app/service/send_notification/test_send_one_off_notification.py b/tests/app/service/send_notification/test_send_one_off_notification.py index d0065d0be..fcc458b02 100644 --- a/tests/app/service/send_notification/test_send_one_off_notification.py +++ b/tests/app/service/send_notification/test_send_one_off_notification.py @@ -12,7 +12,6 @@ from app.dao.service_guest_list_dao import ( from app.models import ( EMAIL_TYPE, KEY_TYPE_NORMAL, - LETTER_TYPE, MOBILE_TYPE, PRIORITY, SMS_TYPE, @@ -22,7 +21,6 @@ from app.models import ( from app.service.send_notification import send_one_off_notification from app.v2.errors import BadRequestError, TooManyRequestsError from tests.app.db import ( - create_letter_contact, create_reply_to_email, create_service, create_service_sms_sender, @@ -100,7 +98,6 @@ def test_send_one_off_notification_calls_persist_correctly_for_sms( created_by_id=str(service.created_by_id), reply_to_text='testing', reference=None, - postage=None, client_reference=None ) @@ -162,57 +159,6 @@ def test_send_one_off_notification_calls_persist_correctly_for_email( created_by_id=str(service.created_by_id), reply_to_text=None, reference=None, - postage=None, - client_reference=None - ) - - -def test_send_one_off_notification_calls_persist_correctly_for_letter( - mocker, - persist_mock, - celery_mock, - notify_db_session -): - mocker.patch( - 'app.service.send_notification.create_random_identifier', - return_value='this-is-random-in-real-life', - ) - service = create_service() - template = create_template( - service=service, - template_type=LETTER_TYPE, - postage='first', - subject="Test subject", - content="Hello (( Name))\nYour thing is due soon", - ) - - post_data = { - 'template_id': str(template.id), - 'to': 'First Last', - 'personalisation': { - 'name': 'foo', - 'address_line_1': 'First Last', - 'address_line_2': '1 Example Street', - 'postcode': 'SW1A 1AA', - }, - 'created_by': str(service.created_by_id) - } - - send_one_off_notification(service.id, post_data) - - persist_mock.assert_called_once_with( - template_id=template.id, - template_version=template.version, - recipient=post_data['to'], - service=template.service, - personalisation=post_data['personalisation'], - notification_type=LETTER_TYPE, - api_key_id=None, - key_type=KEY_TYPE_NORMAL, - created_by_id=str(service.created_by_id), - reply_to_text=None, - reference='this-is-random-in-real-life', - postage='first', client_reference=None ) @@ -360,56 +306,6 @@ def test_send_one_off_notification_should_add_email_reply_to_text_for_notificati assert notification.reply_to_text == reply_to_email.email_address -def test_send_one_off_letter_notification_should_use_template_reply_to_text(sample_letter_template, celery_mock): - letter_contact = create_letter_contact(sample_letter_template.service, "Edinburgh, ED1 1AA", is_default=False) - sample_letter_template.reply_to = str(letter_contact.id) - - data = { - 'to': 'user@example.com', - 'template_id': str(sample_letter_template.id), - 'personalisation': { - 'name': 'foo', - 'address_line_1': 'First Last', - 'address_line_2': '1 Example Street', - 'address_line_3': 'SW1A 1AA', - }, - 'created_by': str(sample_letter_template.service.created_by_id) - } - - notification_id = send_one_off_notification(service_id=sample_letter_template.service.id, post_data=data) - notification = Notification.query.get(notification_id['id']) - celery_mock.assert_called_once_with( - notification=notification, - research_mode=False, - queue=None - ) - - assert notification.reply_to_text == "Edinburgh, ED1 1AA" - - -@pytest.mark.skip(reason="Needs updating for TTS: Remove letters") -def test_send_one_off_letter_should_not_make_pdf_in_research_mode(sample_letter_template): - - sample_letter_template.service.research_mode = True - - data = { - 'to': 'A. Name', - 'template_id': str(sample_letter_template.id), - 'personalisation': { - 'name': 'foo', - 'address_line_1': 'First Last', - 'address_line_2': '1 Example Street', - 'address_line_3': 'SW1A 1AA', - }, - 'created_by': str(sample_letter_template.service.created_by_id) - } - - notification = send_one_off_notification(service_id=sample_letter_template.service.id, post_data=data) - notification = Notification.query.get(notification['id']) - - assert notification.status == "delivered" - - def test_send_one_off_sms_notification_should_use_sms_sender_reply_to_text(sample_service, celery_mock): template = create_template(service=sample_service, template_type=SMS_TYPE) sms_sender = create_service_sms_sender( diff --git a/tests/app/service/send_notification/test_send_pdf_letter_notification.py b/tests/app/service/send_notification/test_send_pdf_letter_notification.py deleted file mode 100644 index 091396fd0..000000000 --- a/tests/app/service/send_notification/test_send_pdf_letter_notification.py +++ /dev/null @@ -1,132 +0,0 @@ -import pytest -from freezegun import freeze_time -from notifications_utils.s3 import S3ObjectNotFound - -from app.dao.notifications_dao import get_notification_by_id -from app.models import EMAIL_TYPE, LETTER_TYPE, UPLOAD_LETTERS -from app.service.send_notification import send_pdf_letter_notification -from app.v2.errors import BadRequestError, TooManyRequestsError -from tests.app.db import create_service - - -@pytest.fixture -def post_data(sample_service_full_permissions, fake_uuid): - return { - 'filename': 'valid.pdf', - 'created_by': sample_service_full_permissions.users[0].id, - 'file_id': fake_uuid, - 'postage': 'second', - 'recipient_address': 'Bugs%20Bunny%0A123%20Main%20Street%0ALooney%20Town' - } - - -@pytest.mark.parametrize('permissions', [ - [EMAIL_TYPE], - [UPLOAD_LETTERS], -]) -def test_send_pdf_letter_notification_raises_error_if_service_does_not_have_permission( - notify_db_session, - permissions, - post_data, -): - service = create_service(service_permissions=permissions) - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(service.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_if_service_is_over_daily_message_limit( - mocker, - sample_service_full_permissions, - post_data, -): - mocker.patch( - 'app.service.send_notification.check_service_over_daily_message_limit', - side_effect=TooManyRequestsError(10)) - - with pytest.raises(TooManyRequestsError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_validates_created_by( - sample_service_full_permissions, - sample_user, - post_data -): - post_data['created_by'] = sample_user.id - - with pytest.raises(BadRequestError): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_raises_error_if_service_in_trial_mode( - mocker, - sample_service_full_permissions, - post_data, -): - sample_service_full_permissions.restricted = True - - with pytest.raises(BadRequestError) as e: - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - assert 'trial mode' in e.value.message - - -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_send_pdf_letter_notification_raises_error_when_pdf_is_not_in_transient_letter_bucket( - mocker, - sample_service_full_permissions, - notify_user, - post_data, -): - mocker.patch('app.service.send_notification.utils_s3download', side_effect=S3ObjectNotFound({}, '')) - - with pytest.raises(S3ObjectNotFound): - send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - - -def test_send_pdf_letter_notification_does_nothing_if_notification_already_exists( - mocker, - sample_service_full_permissions, - notify_user, - sample_notification, - post_data, -): - post_data['file_id'] = sample_notification.id - mocker.patch('app.service.send_notification.utils_s3download', side_effect=S3ObjectNotFound({}, '')) - response = send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - assert response['id'] == str(sample_notification.id) - - -@freeze_time("2019-08-02 11:00:00") -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_send_pdf_letter_notification_creates_notification_and_moves_letter( - mocker, - sample_service_full_permissions, - notify_user, - post_data, -): - mocker.patch('app.service.send_notification.utils_s3download') - mocker.patch('app.service.send_notification.get_page_count', return_value=1) - s3_mock = mocker.patch('app.service.send_notification.move_uploaded_pdf_to_letters_bucket') - - result = send_pdf_letter_notification(sample_service_full_permissions.id, post_data) - file_id = post_data['file_id'] - - notification = get_notification_by_id(file_id) - - assert str(notification.id) == file_id - assert notification.api_key_id is None - assert notification.client_reference == post_data['filename'] - assert notification.created_by_id == post_data['created_by'] - assert notification.postage == 'second' - assert notification.notification_type == LETTER_TYPE - assert notification.billable_units == 1 - assert notification.to == "Bugs Bunny\n123 Main Street\nLooney Town" - - assert notification.service_id == sample_service_full_permissions.id - assert result == {'id': str(notification.id)} - - s3_mock.assert_called_once_with( - 'service-{}/{}.pdf'.format(sample_service_full_permissions.id, file_id), - '2019-08-02/NOTIFY.{}.D.2.C.20190802110000.PDF'.format(notification.reference) - ) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index cbaa26c47..3f9c98563 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -21,15 +21,11 @@ from app.models import ( EMAIL_AUTH_TYPE, EMAIL_TYPE, INBOUND_SMS_TYPE, - INTERNATIONAL_LETTERS, INTERNATIONAL_SMS_TYPE, KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, - LETTER_TYPE, - NOTIFICATION_RETURNED_LETTER, SMS_TYPE, - UPLOAD_LETTERS, AnnualBilling, EmailBranding, InboundNumber, @@ -37,7 +33,6 @@ from app.models import ( Permission, Service, ServiceEmailReplyTo, - ServiceLetterContact, ServicePermission, ServiceSmsSender, User, @@ -45,20 +40,14 @@ from app.models import ( from tests import create_admin_authorization_header from tests.app.db import ( create_annual_billing, - create_api_key, create_domain, create_email_branding, create_ft_billing, create_ft_notification_status, create_inbound_number, - create_job, - create_letter_branding, - create_letter_contact, create_notification, - create_notification_history, create_organisation, create_reply_to_email, - create_returned_letter, create_service, create_service_sms_sender, create_service_with_defined_sms_sender, @@ -208,8 +197,6 @@ def test_get_live_services_data(sample_user, admin_request): 'contact_name': 'Test User', 'email_totals': 1, 'email_volume_intent': None, - 'letter_totals': 0, - 'letter_volume_intent': None, 'live_date': 'Mon, 01 Jan 2018 00:00:00 GMT', 'organisation_name': 'test_org_1', 'service_id': ANY, @@ -226,8 +213,6 @@ def test_get_live_services_data(sample_user, admin_request): 'contact_name': 'Test User', 'email_totals': 0, 'email_volume_intent': None, - 'letter_totals': 0, - 'letter_volume_intent': None, 'live_date': 'Tue, 01 Jan 2019 00:00:00 GMT', 'organisation_name': None, 'service_id': ANY, @@ -263,7 +248,6 @@ def test_get_service_by_id(admin_request, sample_service): 'go_live_user', 'id', 'inbound_api', - 'letter_branding', 'message_limit', 'name', 'notes', @@ -277,7 +261,6 @@ def test_get_service_by_id(admin_request, sample_service): 'restricted', 'service_callback_api', 'volume_email', - 'volume_letter', 'volume_sms', } @@ -300,7 +283,7 @@ def test_get_service_list_has_default_permissions(admin_request, service_factory set( json['permissions'] ) == { - EMAIL_TYPE, SMS_TYPE, INTERNATIONAL_SMS_TYPE, LETTER_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS + EMAIL_TYPE, SMS_TYPE, INTERNATIONAL_SMS_TYPE, } for json in json_resp['data'] ) @@ -312,7 +295,7 @@ def test_get_service_by_id_has_default_service_permissions(admin_request, sample assert set( json_resp['data']['permissions'] ) == { - EMAIL_TYPE, SMS_TYPE, INTERNATIONAL_SMS_TYPE, LETTER_TYPE, UPLOAD_LETTERS, INTERNATIONAL_LETTERS + EMAIL_TYPE, SMS_TYPE, INTERNATIONAL_SMS_TYPE, } @@ -391,7 +374,6 @@ def test_create_service( assert json_resp['data']['name'] == 'created service' assert json_resp['data']['email_from'] == 'created.service' assert not json_resp['data']['research_mode'] - assert json_resp['data']['letter_branding'] is None assert json_resp['data']['count_as_live'] is expected_count_as_live service_db = Service.query.get(json_resp['data']['id']) @@ -507,8 +489,6 @@ def test_create_service_inherits_branding_from_organisation( org = create_organisation() email_branding = create_email_branding() org.email_branding = email_branding - letter_branding = create_letter_branding() - org.letter_branding = letter_branding create_domain('example.gov.uk', org.id) sample_user.email_address = 'test@example.gov.uk' @@ -527,7 +507,6 @@ def test_create_service_inherits_branding_from_organisation( ) assert json_resp['data']['email_branding'] == str(email_branding.id) - assert json_resp['data']['letter_branding'] == str(letter_branding.id) def test_should_not_create_service_with_missing_user_id_field(notify_api, fake_uuid): @@ -722,53 +701,6 @@ def test_cant_update_service_org_type_to_random_value(client, sample_service): assert resp.status_code == 500 -def test_update_service_letter_branding(client, notify_db_session, sample_service): - letter_branding = create_letter_branding(name='test brand', filename='test-brand') - data = { - 'letter_branding': str(letter_branding.id) - } - - auth_header = create_admin_authorization_header() - - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - result = resp.json - assert resp.status_code == 200 - assert result['data']['letter_branding'] == str(letter_branding.id) - - -def test_update_service_remove_letter_branding(client, notify_db_session, sample_service): - letter_branding = create_letter_branding(name='test brand', filename='test-brand') - sample_service - data = { - 'letter_branding': str(letter_branding.id) - } - - auth_header = create_admin_authorization_header() - - client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - data = { - 'letter_branding': None - } - resp = client.post( - '/service/{}'.format(sample_service.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - result = resp.json - assert resp.status_code == 200 - assert result['data']['letter_branding'] is None - - def test_update_service_remove_email_branding(admin_request, notify_db_session, sample_service): brand = EmailBranding(colour='#000000', logo='justice-league.png', name='Justice League') sample_service.email_branding = brand @@ -810,7 +742,7 @@ def test_update_service_flags(client, sample_service): data = { 'research_mode': True, - 'permissions': [LETTER_TYPE, INTERNATIONAL_SMS_TYPE] + 'permissions': [INTERNATIONAL_SMS_TYPE] } auth_header = create_admin_authorization_header() @@ -823,13 +755,12 @@ def test_update_service_flags(client, sample_service): result = resp.json assert resp.status_code == 200 assert result['data']['research_mode'] is True - assert set(result['data']['permissions']) == set([LETTER_TYPE, INTERNATIONAL_SMS_TYPE]) + assert set(result['data']['permissions']) == set([INTERNATIONAL_SMS_TYPE]) @pytest.mark.parametrize('field', ( 'volume_email', 'volume_sms', - 'volume_letter', )) @pytest.mark.parametrize('value, expected_status, expected_persisted', ( (1234, 200, 1234), @@ -887,7 +818,7 @@ def service_with_no_permissions(notify_db_session): def test_update_service_flags_with_service_without_default_service_permissions(client, service_with_no_permissions): auth_header = create_admin_authorization_header() data = { - 'permissions': [LETTER_TYPE, INTERNATIONAL_SMS_TYPE], + 'permissions': [INTERNATIONAL_SMS_TYPE], } resp = client.post( @@ -898,7 +829,7 @@ def test_update_service_flags_with_service_without_default_service_permissions(c result = resp.json assert resp.status_code == 200 - assert set(result['data']['permissions']) == set([LETTER_TYPE, INTERNATIONAL_SMS_TYPE]) + assert set(result['data']['permissions']) == set([INTERNATIONAL_SMS_TYPE]) def test_update_service_flags_will_remove_service_permissions(client, notify_db_session): @@ -930,7 +861,7 @@ def test_update_permissions_will_override_permission_flags(client, service_with_ auth_header = create_admin_authorization_header() data = { - 'permissions': [LETTER_TYPE, INTERNATIONAL_SMS_TYPE] + 'permissions': [INTERNATIONAL_SMS_TYPE] } resp = client.post( @@ -941,14 +872,14 @@ def test_update_permissions_will_override_permission_flags(client, service_with_ result = resp.json assert resp.status_code == 200 - assert set(result['data']['permissions']) == set([LETTER_TYPE, INTERNATIONAL_SMS_TYPE]) + assert set(result['data']['permissions']) == set([INTERNATIONAL_SMS_TYPE]) def test_update_service_permissions_will_add_service_permissions(client, sample_service): auth_header = create_admin_authorization_header() data = { - 'permissions': [EMAIL_TYPE, SMS_TYPE, LETTER_TYPE] + 'permissions': [EMAIL_TYPE, SMS_TYPE] } resp = client.post( @@ -959,7 +890,7 @@ def test_update_service_permissions_will_add_service_permissions(client, sample_ result = resp.json assert resp.status_code == 200 - assert set(result['data']['permissions']) == set([SMS_TYPE, EMAIL_TYPE, LETTER_TYPE]) + assert set(result['data']['permissions']) == set([SMS_TYPE, EMAIL_TYPE]) @pytest.mark.parametrize( @@ -968,7 +899,6 @@ def test_update_service_permissions_will_add_service_permissions(client, sample_ (EMAIL_TYPE), (SMS_TYPE), (INTERNATIONAL_SMS_TYPE), - (LETTER_TYPE), (INBOUND_SMS_TYPE), (EMAIL_AUTH_TYPE), ] @@ -1016,7 +946,7 @@ def test_update_permissions_with_duplicate_permissions_will_raise_error(client, auth_header = create_admin_authorization_header() data = { - 'permissions': [EMAIL_TYPE, SMS_TYPE, LETTER_TYPE, LETTER_TYPE] + 'permissions': [EMAIL_TYPE, SMS_TYPE, SMS_TYPE] } resp = client.post( @@ -1028,7 +958,7 @@ def test_update_permissions_with_duplicate_permissions_will_raise_error(client, assert resp.status_code == 400 assert result['result'] == 'error' - assert "Duplicate Service Permission: ['{}']".format(LETTER_TYPE) in result['message']['permissions'] + assert "Duplicate Service Permission: ['{}']".format(SMS_TYPE) in result['message']['permissions'] def test_update_service_research_mode_throws_validation_error(notify_api, sample_service): @@ -1273,7 +1203,6 @@ def test_add_existing_user_to_another_service_with_all_permissions( data = { "permissions": [ {"permission": "send_emails"}, - {"permission": "send_letters"}, {"permission": "send_texts"}, {"permission": "manage_users"}, {"permission": "manage_settings"}, @@ -1312,7 +1241,7 @@ def test_add_existing_user_to_another_service_with_all_permissions( assert resp.status_code == 200 json_resp = resp.json permissions = json_resp['data']['permissions'][str(sample_service.id)] - expected_permissions = ['send_texts', 'send_emails', 'send_letters', 'manage_users', + expected_permissions = ['send_texts', 'send_emails', 'manage_users', 'manage_settings', 'manage_templates', 'manage_api_keys', 'view_activity'] assert sorted(expected_permissions) == sorted(permissions) @@ -1335,7 +1264,6 @@ def test_add_existing_user_to_another_service_with_send_permissions(notify_api, data = { "permissions": [ {"permission": "send_emails"}, - {"permission": "send_letters"}, {"permission": "send_texts"}, ], "folder_permissions": [] @@ -1360,7 +1288,7 @@ def test_add_existing_user_to_another_service_with_send_permissions(notify_api, json_resp = resp.json permissions = json_resp['data']['permissions'][str(sample_service.id)] - expected_permissions = ['send_texts', 'send_emails', 'send_letters'] + expected_permissions = ['send_texts', 'send_emails'] assert sorted(expected_permissions) == sorted(permissions) @@ -2012,7 +1940,7 @@ def test_get_detailed_service(sample_template, client, sample_service, today_onl service = resp.json['data'] assert service['id'] == str(sample_service.id) assert 'statistics' in service.keys() - assert set(service['statistics'].keys()) == {SMS_TYPE, EMAIL_TYPE, LETTER_TYPE} + assert set(service['statistics'].keys()) == {SMS_TYPE, EMAIL_TYPE} assert service['statistics'][SMS_TYPE] == stats @@ -2036,7 +1964,6 @@ def test_get_services_with_detailed_flag(client, sample_template): assert data[0]['statistics'] == { EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, SMS_TYPE: {'delivered': 0, 'failed': 0, 'requested': 3}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} } @@ -2059,7 +1986,6 @@ def test_get_services_with_detailed_flag_excluding_from_test_key(client, sample_ assert data[0]['statistics'] == { EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, SMS_TYPE: {'delivered': 0, 'failed': 0, 'requested': 2}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} } @@ -2120,13 +2046,11 @@ def test_get_detailed_services_groups_by_service(notify_db_session): assert data[0]['statistics'] == { EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, SMS_TYPE: {'delivered': 1, 'failed': 0, 'requested': 3}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} } assert data[1]['id'] == str(service_2.id) assert data[1]['statistics'] == { EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, SMS_TYPE: {'delivered': 0, 'failed': 0, 'requested': 1}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} } @@ -2149,13 +2073,11 @@ def test_get_detailed_services_includes_services_with_no_notifications(notify_db assert data[0]['statistics'] == { EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, SMS_TYPE: {'delivered': 0, 'failed': 0, 'requested': 1}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} } assert data[1]['id'] == str(service_2.id) assert data[1]['statistics'] == { EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, SMS_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} } @@ -2175,7 +2097,6 @@ def test_get_detailed_services_only_includes_todays_notifications(sample_templat assert data[0]['statistics'] == { EMAIL_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0}, SMS_TYPE: {'delivered': 0, 'failed': 0, 'requested': 3}, - LETTER_TYPE: {'delivered': 0, 'failed': 0, 'requested': 0} } @@ -2209,7 +2130,6 @@ def test_get_detailed_services_for_date_range(sample_template, start_date_delta, assert len(data) == 1 assert data[0]['statistics'][EMAIL_TYPE] == {'delivered': 0, 'failed': 0, 'requested': 0} assert data[0]['statistics'][SMS_TYPE] == {'delivered': 2, 'failed': 0, 'requested': 2} - assert data[0]['statistics'][LETTER_TYPE] == {'delivered': 0, 'failed': 0, 'requested': 0} def test_search_for_notification_by_to_field(client, sample_template, sample_email_template): @@ -2303,29 +2223,6 @@ def test_search_for_notification_by_to_field_returns_no_next_link_if_50_or_less( assert response_json['links'] == {} -def test_search_for_notification_by_to_field_for_letter( - client, - notify_db_session, - sample_letter_template, - sample_email_template, - sample_template, -): - letter_notification = create_notification(sample_letter_template, to_field='A. Name', normalised_to='a.name') - create_notification(sample_email_template, to_field='A.Name@example.com', normalised_to='a.name@example.com') - create_notification(sample_template, to_field='44770900123', normalised_to='44770900123') - response = client.get( - '/service/{}/notifications?to={}&template_type={}'.format( - sample_letter_template.service_id, 'A. Name', 'letter', - ), - headers=[create_admin_authorization_header()] - ) - notifications = json.loads(response.get_data(as_text=True))['notifications'] - - assert response.status_code == 200 - assert len(notifications) == 1 - assert notifications[0]['id'] == str(letter_notification.id) - - def test_update_service_calls_send_notification_as_service_becomes_live(notify_db_session, client, mocker): send_notification_mock = mocker.patch('app.service.rest.send_notification_to_service_users') @@ -2484,66 +2381,6 @@ def test_send_one_off_notification(sample_service, admin_request, mocker): assert response['id'] == str(noti.id) -@pytest.mark.skip(reason="Skipping letter-related functionality for now") -def test_create_pdf_letter(mocker, sample_service_full_permissions, client, fake_uuid, notify_user): - mocker.patch('app.service.send_notification.utils_s3download') - mocker.patch('app.service.send_notification.get_page_count', return_value=1) - mocker.patch('app.service.send_notification.move_uploaded_pdf_to_letters_bucket') - - user = sample_service_full_permissions.users[0] - data = json.dumps({ - 'filename': 'valid.pdf', - 'created_by': str(user.id), - 'file_id': fake_uuid, - 'postage': 'second', - 'recipient_address': 'Bugs%20Bunny%0A123%20Main%20Street%0ALooney%20Town' - }) - - response = client.post( - url_for('service.create_pdf_letter', service_id=sample_service_full_permissions.id), - data=data, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()] - ) - json_resp = json.loads(response.get_data(as_text=True)) - - assert response.status_code == 201 - assert json_resp == {'id': fake_uuid} - - -@pytest.mark.parametrize('post_data, expected_errors', [ - ( - {}, - [ - {'error': 'ValidationError', 'message': 'postage is a required property'}, - {'error': 'ValidationError', 'message': 'filename is a required property'}, - {'error': 'ValidationError', 'message': 'created_by is a required property'}, - {'error': 'ValidationError', 'message': 'file_id is a required property'}, - {'error': 'ValidationError', 'message': 'recipient_address is a required property'} - ] - ), - ( - {"postage": "third", "filename": "string", "created_by": "string", "file_id": "string", - "recipient_address": "Some Address"}, - [ - {'error': 'ValidationError', - 'message': 'postage invalid. It must be first, second, europe or rest-of-world.'} - ] - ) -]) -def test_create_pdf_letter_validates_against_json_schema( - sample_service_full_permissions, client, post_data, expected_errors -): - response = client.post( - url_for('service.create_pdf_letter', service_id=sample_service_full_permissions.id), - data=json.dumps(post_data), - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()] - ) - json_resp = json.loads(response.get_data(as_text=True)) - - assert response.status_code == 400 - assert json_resp['errors'] == expected_errors - - def test_get_notification_for_service_includes_template_redacted(admin_request, sample_notification): resp = admin_request.get( 'service.get_notification_for_service', @@ -2555,17 +2392,6 @@ def test_get_notification_for_service_includes_template_redacted(admin_request, assert resp['template']['redact_personalisation'] is False -def test_get_notification_for_service_includes_precompiled_letter(admin_request, sample_notification): - resp = admin_request.get( - 'service.get_notification_for_service', - service_id=sample_notification.service_id, - notification_id=sample_notification.id - ) - - assert resp['id'] == str(sample_notification.id) - assert resp['template']['is_precompiled_letter'] is False - - def test_get_all_notifications_for_service_includes_template_redacted(admin_request, sample_service): normal_template = create_template(sample_service) @@ -2589,31 +2415,23 @@ def test_get_all_notifications_for_service_includes_template_redacted(admin_requ assert resp['notifications'][1]['template']['redact_personalisation'] is True -def test_get_all_notifications_for_service_includes_template_hidden(admin_request, sample_service): - letter_template = create_template(sample_service, template_type=LETTER_TYPE) - precompiled_template = create_template( - sample_service, - template_type=LETTER_TYPE, - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True - ) +# TODO: check whether all hidden templates are also precompiled letters +# def test_get_all_notifications_for_service_includes_template_hidden(admin_request, sample_service): +# letter_template = create_template(sample_service, template_type=LETTER_TYPE) - with freeze_time('2000-01-01'): - letter_noti = create_notification(letter_template) - with freeze_time('2000-01-02'): - precompiled_noti = create_notification(precompiled_template) +# with freeze_time('2000-01-01'): +# letter_noti = create_notification(letter_template) - resp = admin_request.get( - 'service.get_all_notifications_for_service', - service_id=sample_service.id - ) +# resp = admin_request.get( +# 'service.get_all_notifications_for_service', +# service_id=sample_service.id +# ) - assert resp['notifications'][0]['id'] == str(precompiled_noti.id) - assert resp['notifications'][0]['template']['is_precompiled_letter'] is True +# assert resp['notifications'][0]['id'] == str(precompiled_noti.id) +# assert resp['notifications'][0]['template']['is_precompiled_letter'] is True - assert resp['notifications'][1]['id'] == str(letter_noti.id) - assert resp['notifications'][1]['template']['is_precompiled_letter'] is False +# assert resp['notifications'][1]['id'] == str(letter_noti.id) +# assert resp['notifications'][1]['template']['is_precompiled_letter'] is False def test_search_for_notification_by_to_field_returns_personlisation( @@ -2913,192 +2731,6 @@ def test_get_email_reply_to_address(client, notify_db_session): assert json.loads(response.get_data(as_text=True)) == reply_to.serialize() -def test_get_letter_contacts_when_there_are_no_letter_contacts(client, sample_service): - response = client.get('/service/{}/letter-contact'.format(sample_service.id), - headers=[create_admin_authorization_header()]) - - assert json.loads(response.get_data(as_text=True)) == [] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_one_letter_contact(client, notify_db_session): - service = create_service() - create_letter_contact(service, 'Aberdeen, AB23 1XH') - - response = client.get('/service/{}/letter-contact'.format(service.id), - headers=[create_admin_authorization_header()]) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 1 - assert json_response[0]['contact_block'] == 'Aberdeen, AB23 1XH' - assert json_response[0]['is_default'] - assert json_response[0]['created_at'] - assert not json_response[0]['updated_at'] - assert response.status_code == 200 - - -def test_get_letter_contacts_with_multiple_letter_contacts(client, notify_db_session): - service = create_service() - letter_contact_a = create_letter_contact(service, 'Aberdeen, AB23 1XH') - letter_contact_b = create_letter_contact(service, 'London, E1 8QS', False) - - response = client.get('/service/{}/letter-contact'.format(service.id), - headers=[create_admin_authorization_header()]) - json_response = json.loads(response.get_data(as_text=True)) - - assert len(json_response) == 2 - assert response.status_code == 200 - - assert json_response[0]['id'] == str(letter_contact_a.id) - assert json_response[0]['service_id'] == str(letter_contact_a.service_id) - assert json_response[0]['contact_block'] == 'Aberdeen, AB23 1XH' - assert json_response[0]['is_default'] - assert json_response[0]['created_at'] - assert not json_response[0]['updated_at'] - - assert json_response[1]['id'] == str(letter_contact_b.id) - assert json_response[1]['service_id'] == str(letter_contact_b.service_id) - assert json_response[1]['contact_block'] == 'London, E1 8QS' - assert not json_response[1]['is_default'] - assert json_response[1]['created_at'] - assert not json_response[1]['updated_at'] - - -def test_get_letter_contact_by_id(client, notify_db_session): - service = create_service() - letter_contact = create_letter_contact(service, 'London, E1 8QS') - - response = client.get('/service/{}/letter-contact/{}'.format(service.id, letter_contact.id), - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - - assert response.status_code == 200 - assert json.loads(response.get_data(as_text=True)) == letter_contact.serialize() - - -def test_get_letter_contact_return_404_when_invalid_contact_id(client, notify_db_session): - service = create_service() - - response = client.get('/service/{}/letter-contact/{}'.format(service.id, '93d59f88-4aa1-453c-9900-f61e2fc8a2de'), - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - - assert response.status_code == 404 - - -def test_add_service_contact_block(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post('/service/{}/letter-contact'.format(sample_service.id), - data=data, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp['data'] == results[0].serialize() - - -def test_add_service_letter_contact_can_add_multiple_addresses(client, sample_service): - first = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - client.post('/service/{}/letter-contact'.format(sample_service.id), - data=first, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - - second = json.dumps({"contact_block": "Aberdeen, AB23 1XH", "is_default": True}) - response = client.post('/service/{}/letter-contact'.format(sample_service.id), - data=second, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - assert response.status_code == 201 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 2 - default = [x for x in results if x.is_default] - assert json_resp['data'] == default[0].serialize() - first_letter_contact_not_default = [x for x in results if not x.is_default] - assert first_letter_contact_not_default[0].contact_block == 'London, E1 8QS' - - -def test_add_service_letter_contact_block_fine_if_no_default(client, sample_service): - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post('/service/{}/letter-contact'.format(sample_service.id), - data=data, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - assert response.status_code == 201 - - -def test_add_service_letter_contact_block_404s_when_invalid_service_id(client, notify_db_session): - response = client.post('/service/{}/letter-contact'.format(uuid.uuid4()), - data={}, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result['result'] == 'error' - assert result['message'] == 'No result found' - - -def test_update_service_letter_contact(client, sample_service): - original_letter_contact = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": True}) - response = client.post('/service/{}/letter-contact/{}'.format(sample_service.id, original_letter_contact.id), - data=data, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - - assert response.status_code == 200 - json_resp = json.loads(response.get_data(as_text=True)) - results = ServiceLetterContact.query.all() - assert len(results) == 1 - assert json_resp['data'] == results[0].serialize() - - -def test_update_service_letter_contact_returns_200_when_no_default(client, sample_service): - original_reply_to = create_letter_contact(service=sample_service, contact_block="Aberdeen, AB23 1XH") - data = json.dumps({"contact_block": "London, E1 8QS", "is_default": False}) - response = client.post('/service/{}/letter-contact/{}'.format(sample_service.id, original_reply_to.id), - data=data, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - assert response.status_code == 200 - - -def test_update_service_letter_contact_returns_404_when_invalid_service_id(client, notify_db_session): - response = client.post('/service/{}/letter-contact/{}'.format(uuid.uuid4(), uuid.uuid4()), - data={}, - headers=[('Content-Type', 'application/json'), create_admin_authorization_header()]) - - assert response.status_code == 404 - result = json.loads(response.get_data(as_text=True)) - assert result['result'] == 'error' - assert result['message'] == 'No result found' - - -def test_delete_service_letter_contact_can_archive_letter_contact(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - letter_contact = create_letter_contact(service=service, contact_block='Swansea, SN1 3CC', is_default=False) - - admin_request.post( - 'service.delete_service_letter_contact', - service_id=service.id, - letter_contact_id=letter_contact.id, - ) - - assert letter_contact.archived is True - - -def test_delete_service_letter_contact_returns_200_if_archiving_template_default(admin_request, notify_db_session): - service = create_service() - create_letter_contact(service=service, contact_block='Edinburgh, ED1 1AA') - letter_contact = create_letter_contact(service=service, contact_block='Swansea, SN1 3CC', is_default=False) - create_template(service=service, template_type='letter', reply_to=letter_contact.id) - - response = admin_request.post( - 'service.delete_service_letter_contact', - service_id=service.id, - letter_contact_id=letter_contact.id, - _expected_status=200 - ) - assert response['data']['archived'] is True - - def test_add_service_sms_sender_can_add_multiple_senders(client, notify_db_session): service = create_service() data = { @@ -3374,134 +3006,8 @@ def test_get_organisation_for_service_id_return_empty_dict_if_service_not_in_org assert response == {} -def test_cancel_notification_for_service_raises_invalid_request_when_notification_is_not_found( - admin_request, - sample_service, - fake_uuid, -): - response = admin_request.post( - 'service.cancel_notification_for_service', - service_id=sample_service.id, - notification_id=fake_uuid, - _expected_status=404 - ) - assert response['message'] == 'Notification not found' - assert response['result'] == 'error' - - -def test_cancel_notification_for_service_raises_invalid_request_when_notification_is_not_a_letter( - admin_request, - sample_notification, -): - response = admin_request.post( - 'service.cancel_notification_for_service', - service_id=sample_notification.service_id, - notification_id=sample_notification.id, - _expected_status=400 - ) - assert response['message'] == 'Notification cannot be cancelled - only letters can be cancelled' - assert response['result'] == 'error' - - -@pytest.mark.parametrize('notification_status', [ - 'cancelled', - 'sending', - 'sent', - 'delivered', - 'pending', - 'failed', - 'technical-failure', - 'temporary-failure', - 'permanent-failure', - 'validation-failed', - 'virus-scan-failed', - 'returned-letter', -]) -@freeze_time('2018-07-07 12:00:00') -def test_cancel_notification_for_service_raises_invalid_request_when_letter_is_in_wrong_state_to_be_cancelled( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - sample_letter_notification.created_at = datetime.now() - - response = admin_request.post( - 'service.cancel_notification_for_service', - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400 - ) - if notification_status == 'cancelled': - assert response['message'] == 'This letter has already been cancelled.' - else: - assert response['message'] == ( - f"We could not cancel this letter. " - f"Letter status: {notification_status}, created_at: 2018-07-07 12:00:00" - ) - assert response['result'] == 'error' - - -@pytest.mark.skip(reason="Needs updating for TTS: Remove letters") -@pytest.mark.parametrize('notification_status', ['created', 'pending-virus-check']) -@freeze_time('2018-07-07 16:00:00') -def test_cancel_notification_for_service_updates_letter_if_letter_is_in_cancellable_state( - admin_request, - sample_letter_notification, - notification_status, -): - sample_letter_notification.status = notification_status - sample_letter_notification.created_at = datetime.now() - - response = admin_request.post( - 'service.cancel_notification_for_service', - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response['status'] == 'cancelled' - - -@freeze_time('2017-12-12 17:30:00') -def test_cancel_notification_for_service_raises_error_if_its_too_late_to_cancel( - admin_request, - sample_letter_notification, -): - sample_letter_notification.created_at = datetime(2017, 12, 11, 17, 0) - - response = admin_request.post( - 'service.cancel_notification_for_service', - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - _expected_status=400 - ) - assert response['message'] == 'It’s too late to cancel this letter. Printing started on 11 December at 5.30pm' - assert response['result'] == 'error' - - -@pytest.mark.skip(reason="Needs updating for TTS: Remove letters") -@pytest.mark.parametrize('created_at', [ - datetime(2018, 7, 6, 22, 30), # yesterday evening - datetime(2018, 7, 6, 23, 30), # this morning early hours (in bst) - datetime(2018, 7, 7, 10, 0), # this morning normal hours -]) -@freeze_time('2018-7-7 16:00:00') -def test_cancel_notification_for_service_updates_letter_if_still_time_to_cancel( - admin_request, - sample_letter_notification, - created_at, -): - sample_letter_notification.created_at = created_at - - response = admin_request.post( - 'service.cancel_notification_for_service', - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - ) - assert response['status'] == 'cancelled' - - def test_get_monthly_notification_data_by_service(sample_service, admin_request): - create_ft_notification_status(date(2019, 4, 17), notification_type='letter', service=sample_service, + create_ft_notification_status(date(2019, 4, 17), notification_type='sms', service=sample_service, notification_status='delivered') create_ft_notification_status(date(2019, 3, 5), notification_type='email', service=sample_service, notification_status='sending', count=4) @@ -3513,188 +3019,5 @@ def test_get_monthly_notification_data_by_service(sample_service, admin_request) assert response == [ ['2019-03-01', str(sample_service.id), 'Sample service', 'email', 4, 0, 0, 0, 0, 0], - ['2019-04-01', str(sample_service.id), 'Sample service', 'letter', 0, 1, 0, 0, 0, 0], + ['2019-04-01', str(sample_service.id), 'Sample service', 'sms', 0, 1, 0, 0, 0, 0], ] - - -@freeze_time('2019-12-11 13:30') -def test_get_returned_letter_statistics(admin_request, sample_service): - create_returned_letter(sample_service, reported_at=datetime.utcnow() - timedelta(days=3)) - create_returned_letter(sample_service, reported_at=datetime.utcnow() - timedelta(days=2)) - create_returned_letter(sample_service, reported_at=datetime.utcnow() - timedelta(days=1)) - - response = admin_request.get('service.returned_letter_statistics', service_id=sample_service.id) - - assert response == { - 'returned_letter_count': 3, - 'most_recent_report': '2019-12-10 00:00:00.000000' - } - - -@freeze_time('2019-12-11 13:30') -def test_get_returned_letter_statistics_with_old_returned_letters( - mocker, - admin_request, - sample_service, -): - create_returned_letter(sample_service, reported_at=datetime.utcnow() - timedelta(days=8)) - create_returned_letter(sample_service, reported_at=datetime.utcnow() - timedelta(days=800)) - - count_mock = mocker.patch( - 'app.service.rest.fetch_recent_returned_letter_count', - ) - - assert admin_request.get( - 'service.returned_letter_statistics', - service_id=sample_service.id, - ) == { - 'returned_letter_count': 0, - 'most_recent_report': '2019-12-03 00:00:00.000000', - } - - assert count_mock.called is False - - -def test_get_returned_letter_statistics_with_no_returned_letters( - mocker, - admin_request, - sample_service, -): - count_mock = mocker.patch( - 'app.service.rest.fetch_recent_returned_letter_count', - ) - - assert admin_request.get( - 'service.returned_letter_statistics', - service_id=sample_service.id, - ) == { - 'returned_letter_count': 0, - 'most_recent_report': None, - } - - assert count_mock.called is False - - -@freeze_time('2019-12-11 13:30') -def test_get_returned_letter_summary(admin_request, sample_service): - create_returned_letter(sample_service, reported_at=datetime.utcnow() - timedelta(days=3)) - create_returned_letter(sample_service, reported_at=datetime.utcnow()) - create_returned_letter(sample_service, reported_at=datetime.utcnow()) - - response = admin_request.get('service.returned_letter_summary', service_id=sample_service.id) - - assert len(response) == 2 - assert response[0] == {'returned_letter_count': 2, 'reported_at': '2019-12-11'} - assert response[1] == {'returned_letter_count': 1, 'reported_at': '2019-12-08'} - - -@freeze_time('2019-12-11 13:30') -def test_get_returned_letter(admin_request, sample_letter_template): - job = create_job(template=sample_letter_template) - letter_from_job = create_notification(template=sample_letter_template, client_reference='letter_from_job', - status=NOTIFICATION_RETURNED_LETTER, - job=job, job_row_number=2, - created_at=datetime.utcnow() - timedelta(days=1), - created_by_id=sample_letter_template.service.users[0].id) - create_returned_letter(service=sample_letter_template.service, reported_at=datetime.utcnow(), - notification_id=letter_from_job.id) - - one_off_letter = create_notification(template=sample_letter_template, - status=NOTIFICATION_RETURNED_LETTER, - created_at=datetime.utcnow() - timedelta(days=2), - created_by_id=sample_letter_template.service.users[0].id) - create_returned_letter(service=sample_letter_template.service, reported_at=datetime.utcnow(), - notification_id=one_off_letter.id) - - api_key = create_api_key(service=sample_letter_template.service) - api_letter = create_notification(template=sample_letter_template, client_reference='api_letter', - status=NOTIFICATION_RETURNED_LETTER, - created_at=datetime.utcnow() - timedelta(days=3), - api_key=api_key) - create_returned_letter(service=sample_letter_template.service, reported_at=datetime.utcnow(), - notification_id=api_letter.id) - - precompiled_template = create_template(service=sample_letter_template.service, template_type='letter', hidden=True, - template_name='hidden template') - precompiled_letter = create_notification_history(template=precompiled_template, api_key=api_key, - client_reference='precompiled letter', - created_at=datetime.utcnow() - timedelta(days=4)) - create_returned_letter(service=sample_letter_template.service, reported_at=datetime.utcnow(), - notification_id=precompiled_letter.id) - - uploaded_letter = create_notification_history(template=precompiled_template, client_reference='filename.pdf', - created_at=datetime.utcnow() - timedelta(days=5), - created_by_id=sample_letter_template.service.users[0].id) - create_returned_letter(service=sample_letter_template.service, reported_at=datetime.utcnow(), - notification_id=uploaded_letter.id) - - not_included_in_results_template = create_template(service=create_service(service_name='not included in results'), - template_type='letter') - letter_4 = create_notification_history(template=not_included_in_results_template, - status=NOTIFICATION_RETURNED_LETTER) - create_returned_letter(service=not_included_in_results_template.service, reported_at=datetime.utcnow(), - notification_id=letter_4.id) - response = admin_request.get('service.get_returned_letters', service_id=sample_letter_template.service_id, - reported_at='2019-12-11') - - assert len(response) == 5 - assert response[0]['notification_id'] == str(letter_from_job.id) - assert not response[0]['client_reference'] - assert response[0]['reported_at'] == '2019-12-11' - assert response[0]['created_at'] == '2019-12-10 13:30:00.000000' - assert response[0]['template_name'] == sample_letter_template.name - assert response[0]['template_id'] == str(sample_letter_template.id) - assert response[0]['template_version'] == sample_letter_template.version - assert response[0]['user_name'] == sample_letter_template.service.users[0].name - assert response[0]['original_file_name'] == job.original_file_name - assert response[0]['job_row_number'] == 3 - assert not response[0]['uploaded_letter_file_name'] - - assert response[1]['notification_id'] == str(one_off_letter.id) - assert not response[1]['client_reference'] - assert response[1]['reported_at'] == '2019-12-11' - assert response[1]['created_at'] == '2019-12-09 13:30:00.000000' - assert response[1]['template_name'] == sample_letter_template.name - assert response[1]['template_id'] == str(sample_letter_template.id) - assert response[1]['template_version'] == sample_letter_template.version - assert response[1]['user_name'] == sample_letter_template.service.users[0].name - assert not response[1]['original_file_name'] - assert not response[1]['job_row_number'] - assert not response[1]['uploaded_letter_file_name'] - - assert response[2]['notification_id'] == str(api_letter.id) - assert response[2]['client_reference'] == 'api_letter' - assert response[2]['reported_at'] == '2019-12-11' - assert response[2]['created_at'] == '2019-12-08 13:30:00.000000' - assert response[2]['template_name'] == sample_letter_template.name - assert response[2]['template_id'] == str(sample_letter_template.id) - assert response[2]['template_version'] == sample_letter_template.version - assert response[2]['user_name'] == 'API' - assert not response[2]['original_file_name'] - assert not response[2]['job_row_number'] - assert not response[2]['uploaded_letter_file_name'] - - assert response[3]['notification_id'] == str(precompiled_letter.id) - assert response[3]['client_reference'] == 'precompiled letter' - assert response[3]['reported_at'] == '2019-12-11' - assert response[3]['created_at'] == '2019-12-07 13:30:00.000000' - assert not response[3]['template_name'] - assert not response[3]['template_id'] - assert not response[3]['template_version'] - assert response[3]['user_name'] == 'API' - assert not response[3]['original_file_name'] - assert not response[3]['job_row_number'] - assert not response[3]['uploaded_letter_file_name'] - - assert response[4]['notification_id'] == str(uploaded_letter.id) - assert not response[4]['client_reference'] - assert response[4]['reported_at'] == '2019-12-11' - assert response[4]['created_at'] == '2019-12-06 13:30:00.000000' - assert not response[4]['template_name'] - assert not response[4]['template_id'] - assert not response[4]['template_version'] - assert response[4]['user_name'] == sample_letter_template.service.users[0].name - assert response[4]['email_address'] == sample_letter_template.service.users[0].email_address - assert not response[4]['original_file_name'] - assert not response[4]['job_row_number'] - assert response[4]['uploaded_letter_file_name'] == 'filename.pdf' diff --git a/tests/app/service/test_service_data_retention_rest.py b/tests/app/service/test_service_data_retention_rest.py index 97fbdfd91..ab122aae9 100644 --- a/tests/app/service/test_service_data_retention_rest.py +++ b/tests/app/service/test_service_data_retention_rest.py @@ -10,8 +10,6 @@ def test_get_service_data_retention(client, sample_service): sms_data_retention = create_service_data_retention(service=sample_service) email_data_retention = create_service_data_retention(service=sample_service, notification_type='email', days_of_retention=10) - letter_data_retention = create_service_data_retention(service=sample_service, notification_type='letter', - days_of_retention=30) response = client.get( '/service/{}/data-retention'.format(str(sample_service.id)), @@ -20,10 +18,9 @@ def test_get_service_data_retention(client, sample_service): assert response.status_code == 200 json_response = json.loads(response.get_data(as_text=True)) - assert len(json_response) == 3 + assert len(json_response) == 2 assert json_response[0] == email_data_retention.serialize() assert json_response[1] == sms_data_retention.serialize() - assert json_response[2] == letter_data_retention.serialize() def test_get_service_data_retention_returns_empty_list(client, sample_service): @@ -99,7 +96,7 @@ def test_create_service_data_retention_returns_400_when_notification_type_is_inv json_resp = json.loads(response.get_data(as_text=True)) assert response.status_code == 400 assert json_resp['errors'][0]['error'] == 'ValidationError' - assert json_resp['errors'][0]['message'] == 'notification_type unknown is not one of [sms, letter, email]' + assert json_resp['errors'][0]['message'] == 'notification_type unknown is not one of [sms, email]' def test_create_service_data_retention_returns_400_when_data_retention_for_notification_type_already_exists( diff --git a/tests/app/service/test_statistics.py b/tests/app/service/test_statistics.py index 249a2f6b8..59d193757 100644 --- a/tests/app/service/test_statistics.py +++ b/tests/app/service/test_statistics.py @@ -19,38 +19,33 @@ NewStatsRow = collections.namedtuple('row', ('notification_type', 'status', 'key # email_counts and sms_counts are 3-tuple of requested, delivered, failed -@pytest.mark.idparametrize('stats, email_counts, sms_counts, letter_counts', { - 'empty': ([], [0, 0, 0], [0, 0, 0], [0, 0, 0]), +@pytest.mark.idparametrize('stats, email_counts, sms_counts', { + 'empty': ([], [0, 0, 0], [0, 0, 0]), 'always_increment_requested': ([ StatsRow('email', 'delivered', 1), StatsRow('email', 'failed', 1) - ], [2, 1, 1], [0, 0, 0], [0, 0, 0]), + ], [2, 1, 1], [0, 0, 0]), 'dont_mix_template_types': ([ StatsRow('email', 'delivered', 1), StatsRow('sms', 'delivered', 1), - StatsRow('letter', 'delivered', 1) - ], [1, 1, 0], [1, 1, 0], [1, 1, 0]), + ], [1, 1, 0], [1, 1, 0]), 'convert_fail_statuses_to_failed': ([ StatsRow('email', 'failed', 1), StatsRow('email', 'technical-failure', 1), StatsRow('email', 'temporary-failure', 1), StatsRow('email', 'permanent-failure', 1), - StatsRow('letter', 'validation-failed', 1), - StatsRow('letter', 'virus-scan-failed', 1), - StatsRow('letter', 'permanent-failure', 1), - StatsRow('letter', 'cancelled', 1), - ], [4, 0, 4], [0, 0, 0], [3, 0, 3]), + ], [4, 0, 4], [0, 0, 0]), 'convert_sent_to_delivered': ([ StatsRow('sms', 'sending', 1), StatsRow('sms', 'delivered', 1), StatsRow('sms', 'sent', 1), - ], [0, 0, 0], [3, 2, 0], [0, 0, 0]), + ], [0, 0, 0], [3, 2, 0]), 'handles_none_rows': ([ StatsRow('sms', 'sending', 1), StatsRow(None, None, None) - ], [0, 0, 0], [1, 0, 0], [0, 0, 0]) + ], [0, 0, 0], [1, 0, 0]) }) -def test_format_statistics(stats, email_counts, sms_counts, letter_counts): +def test_format_statistics(stats, email_counts, sms_counts): ret = format_statistics(stats) @@ -66,18 +61,11 @@ def test_format_statistics(stats, email_counts, sms_counts, letter_counts): in zip(['requested', 'delivered', 'failed'], sms_counts) } - assert ret['letter'] == { - status: count - for status, count - in zip(['requested', 'delivered', 'failed'], letter_counts) - } - def test_create_zeroed_stats_dicts(): assert create_zeroed_stats_dicts() == { 'sms': {'requested': 0, 'delivered': 0, 'failed': 0}, 'email': {'requested': 0, 'delivered': 0, 'failed': 0}, - 'letter': {'requested': 0, 'delivered': 0, 'failed': 0}, } @@ -95,12 +83,6 @@ def test_create_stats_dict(): 'permanent-failure': 0, 'temporary-failure': 0, 'virus-scan-failed': 0}}, - 'letter': {'total': 0, - 'test-key': 0, - 'failures': {'technical-failure': 0, - 'permanent-failure': 0, - 'temporary-failure': 0, - 'virus-scan-failed': 0}} } @@ -108,7 +90,6 @@ def test_format_admin_stats_only_includes_test_key_notifications_in_test_key_sec rows = [ NewStatsRow('email', 'technical-failure', 'test', 3), NewStatsRow('sms', 'permanent-failure', 'test', 4), - NewStatsRow('letter', 'virus-scan-failed', 'test', 5), ] stats_dict = format_admin_stats(rows) @@ -120,10 +101,6 @@ def test_format_admin_stats_only_includes_test_key_notifications_in_test_key_sec assert stats_dict['sms']['failures']['permanent-failure'] == 0 assert stats_dict['sms']['test-key'] == 4 - assert stats_dict['letter']['total'] == 0 - assert stats_dict['letter']['failures']['virus-scan-failed'] == 0 - assert stats_dict['letter']['test-key'] == 5 - def test_format_admin_stats_counts_non_test_key_notifications_correctly(): rows = [ @@ -131,7 +108,6 @@ def test_format_admin_stats_counts_non_test_key_notifications_correctly(): NewStatsRow('email', 'created', 'team', 3), NewStatsRow('sms', 'temporary-failure', 'normal', 6), NewStatsRow('sms', 'sent', 'normal', 2), - NewStatsRow('letter', 'pending-virus-check', 'normal', 1), ] stats_dict = format_admin_stats(rows) @@ -141,8 +117,6 @@ def test_format_admin_stats_counts_non_test_key_notifications_correctly(): assert stats_dict['sms']['total'] == 8 assert stats_dict['sms']['failures']['permanent-failure'] == 0 - assert stats_dict['letter']['total'] == 1 - def _stats(requested, delivered, failed): return {'requested': requested, 'delivered': delivered, 'failed': failed} @@ -180,7 +154,7 @@ def test_create_empty_monthly_notification_status_stats_dict(year, expected_year output = create_empty_monthly_notification_status_stats_dict(year) assert sorted(output.keys()) == expected_years for v in output.values(): - assert v == {'sms': {}, 'email': {}, 'letter': {}} + assert v == {'sms': {}, 'email': {}} @freeze_time('2018-06-01 04:59:59') @@ -208,7 +182,7 @@ def test_add_monthly_notification_status_stats(): add_monthly_notification_status_stats(data, rows) assert data == { - '2018-04': {'sms': {'sending': 1, 'delivered': 2}, 'email': {'sending': 4}, 'letter': {}}, - '2018-05': {'sms': {'sending': 24}, 'email': {'sending': 32}, 'letter': {}}, - '2018-06': {'sms': {}, 'email': {}, 'letter': {}}, + '2018-04': {'sms': {'sending': 1, 'delivered': 2}, 'email': {'sending': 4}}, + '2018-05': {'sms': {'sending': 24}, 'email': {'sending': 32}}, + '2018-06': {'sms': {}, 'email': {}}, } diff --git a/tests/app/service/test_statistics_rest.py b/tests/app/service/test_statistics_rest.py index ee5d2ab36..fc1eec28b 100644 --- a/tests/app/service/test_statistics_rest.py +++ b/tests/app/service/test_statistics_rest.py @@ -9,8 +9,6 @@ from app.models import ( KEY_TYPE_NORMAL, KEY_TYPE_TEAM, KEY_TYPE_TEST, - LETTER_TYPE, - PRECOMPILED_TEMPLATE_NAME, SMS_TYPE, ) from tests.app.db import ( @@ -57,8 +55,8 @@ def test_get_template_usage_by_month_returns_correct_data( def test_get_template_usage_by_month_returns_two_templates(admin_request, sample_template, sample_service): template_one = create_template( sample_service, - template_type=LETTER_TYPE, - template_name=PRECOMPILED_TEMPLATE_NAME, + template_type=SMS_TYPE, + template_name="TEST TEMPLATE", hidden=True ) create_ft_notification_status(local_date=datetime(2017, 4, 2), template=template_one, count=1) @@ -80,7 +78,6 @@ def test_get_template_usage_by_month_returns_two_templates(admin_request, sample assert resp_json[0]["month"] == 4 assert resp_json[0]["year"] == 2017 assert resp_json[0]["count"] == 1 - assert resp_json[0]["is_precompiled_letter"] is True assert resp_json[1]["template_id"] == str(sample_template.id) assert resp_json[1]["name"] == sample_template.name @@ -88,7 +85,6 @@ def test_get_template_usage_by_month_returns_two_templates(admin_request, sample assert resp_json[1]["month"] == 4 assert resp_json[1]["year"] == 2017 assert resp_json[1]["count"] == 3 - assert resp_json[1]["is_precompiled_letter"] is False assert resp_json[2]["template_id"] == str(sample_template.id) assert resp_json[2]["name"] == sample_template.name @@ -96,7 +92,6 @@ def test_get_template_usage_by_month_returns_two_templates(admin_request, sample assert resp_json[2]["month"] == 11 assert resp_json[2]["year"] == 2017 assert resp_json[2]["count"] == 1 - assert resp_json[2]["is_precompiled_letter"] is False @pytest.mark.parametrize('today_only, stats', [ @@ -113,7 +108,7 @@ def test_get_service_notification_statistics(admin_request, sample_service, samp today_only=today_only ) - assert set(resp['data'].keys()) == {SMS_TYPE, EMAIL_TYPE, LETTER_TYPE} + assert set(resp['data'].keys()) == {SMS_TYPE, EMAIL_TYPE} assert resp['data'][SMS_TYPE] == stats @@ -126,7 +121,6 @@ def test_get_service_notification_statistics_with_unknown_service(admin_request) assert resp['data'] == { SMS_TYPE: {'requested': 0, 'delivered': 0, 'failed': 0}, EMAIL_TYPE: {'requested': 0, 'delivered': 0, 'failed': 0}, - LETTER_TYPE: {'requested': 0, 'delivered': 0, 'failed': 0}, } @@ -167,7 +161,7 @@ def test_get_monthly_notification_stats_returns_empty_stats_with_correct_dates(a ] assert sorted(response['data'].keys()) == keys for val in response['data'].values(): - assert val == {'sms': {}, 'email': {}, 'letter': {}} + assert val == {'sms': {}, 'email': {}} def test_get_monthly_notification_stats_returns_stats(admin_request, sample_service): @@ -195,8 +189,7 @@ def test_get_monthly_notification_stats_returns_stats(admin_request, sample_serv # it combines the two days 'delivered': 2 }, - 'email': {}, - 'letter': {} + 'email': {} } assert response['data']['2016-07'] == { # it combines the two template types @@ -206,8 +199,7 @@ def test_get_monthly_notification_stats_returns_stats(admin_request, sample_serv }, 'email': { 'delivered': 1 - }, - 'letter': {} + } } @@ -233,8 +225,7 @@ def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats( 'sms': { 'delivered': 1 }, - 'email': {}, - 'letter': {} + 'email': {} } assert response['data']['2016-06'] == { 'sms': { @@ -242,8 +233,7 @@ def test_get_monthly_notification_stats_combines_todays_data_and_historic_stats( 'created': 3, 'delivered': 1, }, - 'email': {}, - 'letter': {} + 'email': {} } @@ -284,6 +274,5 @@ def test_get_monthly_notification_stats_only_gets_for_one_service(admin_request, assert response['data']['2016-06'] == { 'sms': {'created': 1}, - 'email': {}, - 'letter': {} + 'email': {} } diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index f47ca3527..1cda2b10d 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -1,45 +1,22 @@ -import base64 import json import random import string import uuid from datetime import datetime, timedelta -import botocore import pytest -import requests_mock from freezegun import freeze_time from notifications_utils import SMS_CHAR_COUNT_LIMIT -from PyPDF2.errors import PdfReadError -from app.dao.templates_dao import ( - dao_get_template_by_id, - dao_get_template_versions, - dao_redact_template, - dao_update_template, -) -from app.models import ( - EMAIL_TYPE, - LETTER_TYPE, - SMS_TYPE, - Template, - TemplateHistory, -) +from app.dao.templates_dao import dao_get_template_by_id, dao_redact_template +from app.models import EMAIL_TYPE, SMS_TYPE, Template, TemplateHistory from tests import create_admin_authorization_header -from tests.app.db import ( - create_letter_contact, - create_notification, - create_service, - create_template, - create_template_folder, -) -from tests.conftest import set_config_values +from tests.app.db import create_service, create_template, create_template_folder @pytest.mark.parametrize('template_type, subject', [ (SMS_TYPE, None), (EMAIL_TYPE, 'subject'), - (LETTER_TYPE, 'subject'), ]) def test_should_create_a_new_template_for_a_service( client, sample_user, template_type, subject @@ -54,8 +31,6 @@ def test_should_create_a_new_template_for_a_service( } if subject: data.update({'subject': subject}) - if template_type == LETTER_TYPE: - data.update({'postage': 'first'}) data = json.dumps(data) auth_header = create_admin_authorization_header() @@ -79,11 +54,6 @@ def test_should_create_a_new_template_for_a_service( else: assert not json_resp['data']['subject'] - if template_type == LETTER_TYPE: - assert json_resp['data']['postage'] == 'first' - else: - assert not json_resp['data']['postage'] - template = Template.query.get(json_resp['data']['id']) from app.schemas import template_schema assert sorted(json_resp['data']) == sorted(template_schema.dump(template)) @@ -115,35 +85,6 @@ def test_create_a_new_template_for_a_service_adds_folder_relationship( assert template.folder == parent_folder -@pytest.mark.parametrize("template_type, expected_postage", [ - (SMS_TYPE, None), (EMAIL_TYPE, None), (LETTER_TYPE, "second") -]) -def test_create_a_new_template_for_a_service_adds_postage_for_letters_only( - client, sample_service, template_type, expected_postage -): - data = { - 'name': 'my template', - 'template_type': template_type, - 'content': 'template content', - 'service': str(sample_service.id), - 'created_by': str(sample_service.users[0].id) - } - if template_type in [EMAIL_TYPE, LETTER_TYPE]: - data["subject"] = "Hi, I have good news" - - data = json.dumps(data) - auth_header = create_admin_authorization_header() - - response = client.post( - '/service/{}/template'.format(sample_service.id), - headers=[('Content-Type', 'application/json'), auth_header], - data=data - ) - assert response.status_code == 201 - template = Template.query.filter(Template.name == 'my template').first() - assert template.postage == expected_postage - - def test_create_template_should_return_400_if_folder_is_for_a_different_service( client, sample_service ): @@ -218,7 +159,6 @@ def test_should_raise_error_if_service_does_not_exist_on_create(client, sample_u @pytest.mark.parametrize('permissions, template_type, subject, expected_error', [ ([EMAIL_TYPE], SMS_TYPE, None, {'template_type': ['Creating text message templates is not allowed']}), ([SMS_TYPE], EMAIL_TYPE, 'subject', {'template_type': ['Creating email templates is not allowed']}), - ([SMS_TYPE], LETTER_TYPE, 'subject', {'template_type': ['Creating letter templates is not allowed']}), ]) def test_should_raise_error_on_create_if_no_permission( client, sample_user, permissions, template_type, subject, expected_error): @@ -249,8 +189,7 @@ def test_should_raise_error_on_create_if_no_permission( @pytest.mark.parametrize('template_type, permissions, expected_error', [ (SMS_TYPE, [EMAIL_TYPE], {'template_type': ['Updating text message templates is not allowed']}), - (EMAIL_TYPE, [LETTER_TYPE], {'template_type': ['Updating email templates is not allowed']}), - (LETTER_TYPE, [SMS_TYPE], {'template_type': ['Updating letter templates is not allowed']}) + (EMAIL_TYPE, [SMS_TYPE], {'template_type': ['Updating email templates is not allowed']}), ]) def test_should_be_error_on_update_if_no_permission( client, @@ -323,8 +262,8 @@ def test_should_be_error_if_service_does_not_exist_on_update(client, fake_uuid): assert json_resp['message'] == 'No result found' -@pytest.mark.parametrize('template_type', [EMAIL_TYPE, LETTER_TYPE]) -def test_must_have_a_subject_on_an_email_or_letter_template(client, sample_user, sample_service, template_type): +@pytest.mark.parametrize('template_type', [EMAIL_TYPE]) +def test_must_have_a_subject_on_an_email_template(client, sample_user, sample_service, template_type): data = { 'name': 'my template', 'template_type': template_type, @@ -347,8 +286,8 @@ def test_must_have_a_subject_on_an_email_or_letter_template(client, sample_user, def test_update_should_update_a_template(client, sample_user): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service, template_type="letter", postage="second") + service = create_service() + template = create_template(service, template_type="sms") assert template.created_by == service.created_by assert template.created_by != sample_user @@ -356,7 +295,6 @@ def test_update_should_update_a_template(client, sample_user): data = { 'content': 'my template has new content, swell!', 'created_by': str(sample_user.id), - 'postage': 'first' } data = json.dumps(data) auth_header = create_admin_authorization_header() @@ -372,7 +310,6 @@ def test_update_should_update_a_template(client, sample_user): assert update_json_resp['data']['content'] == ( 'my template has new content, swell!' ) - assert update_json_resp['data']['postage'] == 'first' assert update_json_resp['data']['name'] == template.name assert update_json_resp['data']['template_type'] == template.template_type assert update_json_resp['data']['version'] == 2 @@ -429,50 +366,6 @@ def test_should_be_able_to_archive_template_should_remove_template_folders( assert not updated_template.folder -def test_get_precompiled_template_for_service( - client, - notify_user, - sample_service, -): - assert len(sample_service.templates) == 0 - - response = client.get( - '/service/{}/template/precompiled'.format(sample_service.id), - headers=[create_admin_authorization_header()], - ) - assert response.status_code == 200 - assert len(sample_service.templates) == 1 - - data = json.loads(response.get_data(as_text=True)) - assert data['name'] == 'Pre-compiled PDF' - assert data['hidden'] is True - - -def test_get_precompiled_template_for_service_when_service_has_existing_precompiled_template( - client, - notify_user, - sample_service, -): - create_template( - sample_service, - template_name='Exisiting precompiled template', - template_type=LETTER_TYPE, - hidden=True) - assert len(sample_service.templates) == 1 - - response = client.get( - '/service/{}/template/precompiled'.format(sample_service.id), - headers=[create_admin_authorization_header()], - ) - - assert response.status_code == 200 - assert len(sample_service.templates) == 1 - - data = json.loads(response.get_data(as_text=True)) - assert data['name'] == 'Exisiting precompiled template' - assert data['hidden'] is True - - def test_should_be_able_to_get_all_templates_for_a_service(client, sample_user, sample_service): data = { 'name': 'my template 1', @@ -561,13 +454,11 @@ def test_should_get_return_all_fields_by_default( 'hidden', 'id', 'name', - 'postage', 'process_type', 'redact_personalisation', 'reply_to', 'reply_to_text', 'service', - 'service_letter_contact', 'subject', 'template_redacted', 'template_type', @@ -583,7 +474,6 @@ def test_should_get_return_all_fields_by_default( @pytest.mark.parametrize('template_type, expected_content', ( (EMAIL_TYPE, None), (SMS_TYPE, None), - (LETTER_TYPE, None), )) def test_should_not_return_content_and_subject_if_requested( admin_request, @@ -623,11 +513,6 @@ def test_should_not_return_content_and_subject_if_requested( None, 'hello ((name)) we’ve received your ((thing))', SMS_TYPE - ), - ( - 'about your ((thing))', - 'hello ((name)) we’ve received your ((thing))', - LETTER_TYPE ) ] ) @@ -853,54 +738,6 @@ def test_update_set_process_type_on_template(client, sample_template): assert template.process_type == 'priority' -def test_create_a_template_with_reply_to(admin_request, sample_user): - service = create_service(service_permissions=['letter']) - letter_contact = create_letter_contact(service, "Edinburgh, ED1 1AA") - data = { - 'name': 'my template', - 'subject': 'subject', - 'template_type': 'letter', - 'content': 'template content', - 'service': str(service.id), - 'created_by': str(sample_user.id), - 'reply_to': str(letter_contact.id), - } - - json_resp = admin_request.post('template.create_template', service_id=service.id, _data=data, _expected_status=201) - - assert json_resp['data']['template_type'] == 'letter' - assert json_resp['data']['reply_to'] == str(letter_contact.id) - assert json_resp['data']['reply_to_text'] == letter_contact.contact_block - - template = Template.query.get(json_resp['data']['id']) - from app.schemas import template_schema - assert sorted(json_resp['data']) == sorted(template_schema.dump(template)) - th = TemplateHistory.query.filter_by(id=template.id, version=1).one() - assert th.service_letter_contact_id == letter_contact.id - - -def test_create_a_template_with_foreign_service_reply_to(admin_request, sample_user): - service = create_service(service_permissions=['letter']) - service2 = create_service(service_name='test service', email_from='test@example.com', - service_permissions=['letter']) - letter_contact = create_letter_contact(service2, "Edinburgh, ED1 1AA") - data = { - 'name': 'my template', - 'subject': 'subject', - 'template_type': 'letter', - 'content': 'template content', - 'service': str(service.id), - 'created_by': str(sample_user.id), - 'reply_to': str(letter_contact.id), - } - - json_resp = admin_request.post('template.create_template', service_id=service.id, _data=data, _expected_status=400) - - assert json_resp['message'] == "letter_contact_id {} does not exist in database for service id {}".format( - str(letter_contact.id), str(service.id) - ) - - @pytest.mark.parametrize('post_data, expected_errors', [ ( {}, @@ -912,15 +749,7 @@ def test_create_a_template_with_foreign_service_reply_to(admin_request, sample_u {"error": "ValidationError", "message": "service is a required property"}, {"error": "ValidationError", "message": "created_by is a required property"}, ] - ), - ( - {"name": "my template", "template_type": "sms", "content": "hi", "postage": "third", - "service": "1af43c02-b5a8-4923-ad7f-5279b75ff2d0", "created_by": "30587644-9083-44d8-a114-98887f07f1e3"}, - [ - {"error": "ValidationError", - "message": "postage invalid. It must be first, second, europe or rest-of-world."}, - ] - ), + ) ]) def test_create_template_validates_against_json_schema( admin_request, @@ -937,113 +766,6 @@ def test_create_template_validates_against_json_schema( assert response['errors'] == expected_errors -@pytest.mark.parametrize('template_default, service_default', - [('template address', 'service address'), - (None, 'service address'), - ('template address', None), - (None, None) - ]) -def test_get_template_reply_to(client, sample_service, template_default, service_default): - auth_header = create_admin_authorization_header() - if service_default: - create_letter_contact( - service=sample_service, contact_block=service_default, is_default=True - ) - if template_default: - template_default_contact = create_letter_contact( - service=sample_service, contact_block=template_default, is_default=False - ) - reply_to_id = str(template_default_contact.id) if template_default else None - template = create_template(service=sample_service, template_type='letter', reply_to=reply_to_id) - - resp = client.get('/service/{}/template/{}'.format(template.service_id, template.id), - headers=[auth_header]) - - assert resp.status_code == 200, resp.get_data(as_text=True) - json_resp = json.loads(resp.get_data(as_text=True)) - - assert 'service_letter_contact_id' not in json_resp['data'] - assert json_resp['data']['reply_to'] == reply_to_id - assert json_resp['data']['reply_to_text'] == template_default - - -def test_update_template_reply_to(client, sample_letter_template): - auth_header = create_admin_authorization_header() - letter_contact = create_letter_contact(sample_letter_template.service, "Edinburgh, ED1 1AA") - data = { - 'reply_to': str(letter_contact.id), - } - - resp = client.post('/service/{}/template/{}'.format(sample_letter_template.service_id, sample_letter_template.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert resp.status_code == 200, resp.get_data(as_text=True) - - template = dao_get_template_by_id(sample_letter_template.id) - assert template.service_letter_contact_id == letter_contact.id - th = TemplateHistory.query.filter_by(id=sample_letter_template.id, version=2).one() - assert th.service_letter_contact_id == letter_contact.id - - -def test_update_template_reply_to_set_to_blank(client, notify_db_session): - auth_header = create_admin_authorization_header() - service = create_service(service_permissions=['letter']) - letter_contact = create_letter_contact(service, "Edinburgh, ED1 1AA") - template = create_template(service=service, template_type='letter', reply_to=letter_contact.id) - - data = { - 'reply_to': None, - } - - resp = client.post('/service/{}/template/{}'.format(template.service_id, template.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert resp.status_code == 200, resp.get_data(as_text=True) - - template = dao_get_template_by_id(template.id) - assert template.service_letter_contact_id is None - th = TemplateHistory.query.filter_by(id=template.id, version=2).one() - assert th.service_letter_contact_id is None - - -def test_update_template_validates_postage(admin_request, sample_service_full_permissions): - template = create_template(service=sample_service_full_permissions, template_type='letter') - - response = admin_request.post( - 'template.update_template', - service_id=sample_service_full_permissions.id, - template_id=template.id, - _data={"postage": "third"}, - _expected_status=400 - ) - assert 'postage invalid' in response['errors'][0]['message'] - - -def test_update_template_with_foreign_service_reply_to(client, sample_letter_template): - auth_header = create_admin_authorization_header() - - service2 = create_service(service_name='test service', email_from='test@example.com', - service_permissions=['letter']) - letter_contact = create_letter_contact(service2, "Edinburgh, ED1 1AA") - - data = { - 'reply_to': str(letter_contact.id), - } - - resp = client.post('/service/{}/template/{}'.format(sample_letter_template.service_id, sample_letter_template.id), - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert resp.status_code == 400, resp.get_data(as_text=True) - json_resp = json.loads(resp.get_data(as_text=True)) - - assert json_resp['message'] == "letter_contact_id {} does not exist in database for service id {}".format( - str(letter_contact.id), str(sample_letter_template.service_id) - ) - - def test_update_redact_template(admin_request, sample_template): assert sample_template.redact_personalisation is False @@ -1131,587 +853,3 @@ def test_update_redact_template_400s_if_no_created_by(admin_request, sample_temp assert sample_template.redact_personalisation is False assert sample_template.template_redacted.updated_at == original_updated_time - - -def test_preview_letter_template_by_id_invalid_file_type( - sample_letter_notification, - admin_request): - - resp = admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=sample_letter_notification.service_id, - template_id=sample_letter_notification.template_id, - notification_id=sample_letter_notification.id, - file_type='doc', - _expected_status=400 - ) - - assert ['file_type must be pdf or png'] == resp['message']['content'] - - -@freeze_time('2012-12-12') -@pytest.mark.parametrize('file_type', ('png', 'pdf')) -def test_preview_letter_template_by_id_valid_file_type( - notify_api, - sample_letter_notification, - admin_request, - file_type, -): - sample_letter_notification.created_at = datetime.utcnow() - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker() as request_mock: - content = b'\x00\x01' - - mock_post = request_mock.post( - 'http://localhost/notifications-template-preview/preview.{}'.format(file_type), - content=content, - headers={'X-pdf-page-count': '1'}, - status_code=200 - ) - - resp = admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - file_type=file_type, - ) - - post_json = mock_post.last_request.json() - assert post_json['template']['id'] == str(sample_letter_notification.template_id) - assert post_json['values'] == { - 'address_line_1': 'A1', - 'address_line_2': 'A2', - 'address_line_3': 'A3', - 'address_line_4': 'A4', - 'address_line_5': 'A5', - 'address_line_6': 'A6', - 'postcode': 'A_POST', - } - assert post_json['date'] == '2012-12-12T00:00:00' - assert post_json['filename'] is None - assert base64.b64decode(resp['content']) == content - - -@freeze_time('2012-12-12') -def test_preview_letter_template_by_id_shows_template_version_used_by_notification( - notify_api, - sample_letter_notification, - sample_letter_template, - admin_request -): - sample_letter_notification.created_at = datetime.utcnow() - assert sample_letter_notification.template_version == 1 - - # Create a new template history to check that our preview doesn't use the newest version - # but instead the one linked with the notification - sample_letter_template.content = 'new content' - dao_update_template(sample_letter_template) - versions = dao_get_template_versions(sample_letter_notification.service.id, sample_letter_template.id) - assert len(versions) == 2 - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker() as request_mock: - content = b'\x00\x01' - - mock_post = request_mock.post( - 'http://localhost/notifications-template-preview/preview.png', - content=content, - headers={'X-pdf-page-count': '1'}, - status_code=200 - ) - - admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - file_type='png', - ) - - post_json = mock_post.last_request.json() - assert post_json['template']['id'] == str(sample_letter_notification.template_id) - assert post_json['template']['version'] == '1' - - -def test_preview_letter_template_by_id_template_preview_500( - notify_api, - client, - admin_request, - sample_letter_notification): - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - import requests_mock - with requests_mock.Mocker() as request_mock: - content = b'\x00\x01' - - mock_post = request_mock.post( - 'http://localhost/notifications-template-preview/preview.pdf', - content=content, - headers={'X-pdf-page-count': '1'}, - status_code=404 - ) - - resp = admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=sample_letter_notification.service_id, - notification_id=sample_letter_notification.id, - file_type='pdf', - _expected_status=500 - ) - - assert mock_post.last_request.json() - assert 'Status code: 404' in resp['message'] - assert 'Error generating preview letter for {}'.format(sample_letter_notification.id) in resp['message'] - - -def test_preview_letter_template_precompiled_pdf_file_type( - notify_api, - client, - admin_request, - sample_service, - mocker -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker(): - - content = b'\x00\x01' - - mock_get_letter_pdf = mocker.patch( - 'app.template.rest.get_letter_pdf_and_metadata', - return_value=(content, { - "message": "", - "invalid_pages": "", - "page_count": "1" - }) - ) - - resp = admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=notification.service_id, - notification_id=notification.id, - file_type='pdf' - ) - - assert mock_get_letter_pdf.called_once_with(notification) - assert base64.b64decode(resp['content']) == content - - -def test_preview_letter_template_precompiled_s3_error( - notify_api, - client, - admin_request, - sample_service, - mocker -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker(): - - mocker.patch('app.template.rest.get_letter_pdf_and_metadata', - side_effect=botocore.exceptions.ClientError( - {'Error': {'Code': '403', 'Message': 'Unauthorized'}}, - 'GetObject' - )) - - request = admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=notification.service_id, - notification_id=notification.id, - file_type='pdf', - _expected_status=500 - ) - - assert request['message'] == "Error extracting requested page from PDF file for notification_id {} type " \ - " An error occurred (403) " \ - "when calling the GetObject operation: Unauthorized".format(notification.id) - - -@pytest.mark.parametrize( - "requested_page, message, expected_post_url", - [ - # page defaults to 1, page is valid, no overlay shown - ("", "", 'precompiled-preview.png'), - # page is valid, no overlay shown - ("1", "", 'precompiled-preview.png'), - # page is invalid but not because content is outside printable area so no overlay - ("1", "letter-not-a4-portrait-oriented", 'precompiled-preview.png'), - # page is invalid, overlay shown - ("1", "content-outside-printable-area", 'precompiled/overlay.png?page_number=1'), - # page is valid, no overlay shown - ("2", "content-outside-printable-area", 'precompiled-preview.png'), - # page is invalid, overlay shown - ("3", "content-outside-printable-area", 'precompiled/overlay.png?page_number=3'), - ] -) -def test_preview_letter_template_precompiled_for_png_shows_overlay_on_pages_with_content_outside_printable_area( - notify_api, - client, - admin_request, - sample_service, - mocker, - requested_page, - message, - expected_post_url, -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker() as request_mock: - - pdf_content = b'\x00\x01' - expected_returned_content = b'\x00\x02' - - metadata = { - "message": message, - "invalid_pages": "[1,3]", - "page_count": "4" - } - - mock_get_letter_pdf = mocker.patch( - 'app.template.rest.get_letter_pdf_and_metadata', - return_value=(pdf_content, metadata) - ) - - mocker.patch('app.template.rest.extract_page_from_pdf', return_value=pdf_content) - - mock_post = request_mock.post( - 'http://localhost/notifications-template-preview/{}'.format(expected_post_url), - content=expected_returned_content, - headers={'X-pdf-page-count': '4'}, - status_code=200 - ) - - response = admin_request.get( - 'template.preview_letter_template_by_notification_id', - page=requested_page, - service_id=notification.service_id, - notification_id=notification.id, - file_type="png", - ) - - with pytest.raises(ValueError): - mock_post.last_request.json() - assert mock_get_letter_pdf.called_once_with(notification) - assert base64.b64decode(response['content']) == expected_returned_content - assert response["metadata"] == metadata - - -@pytest.mark.parametrize( - "invalid_pages", - [ - "[1,3]", - "[2,4]", # it shouldn't make a difference if the error was on the first page or not - ] -) -def test_preview_letter_template_precompiled_for_pdf_shows_overlay_on_all_pages_if_content_outside_printable_area( - notify_api, - client, - admin_request, - sample_service, - mocker, - invalid_pages, -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker() as request_mock: - - pdf_content = b'\x00\x01' - expected_returned_content = b'\x00\x02' - - metadata = { - "message": "content-outside-printable-area", - "invalid_pages": invalid_pages, - "page_count": "4" - } - - mock_get_letter_pdf = mocker.patch( - 'app.template.rest.get_letter_pdf_and_metadata', - return_value=(pdf_content, metadata) - ) - - mocker.patch('app.template.rest.extract_page_from_pdf', return_value=pdf_content) - - mock_post = request_mock.post( - 'http://localhost/notifications-template-preview/precompiled/overlay.pdf', - content=expected_returned_content, - headers={'X-pdf-page-count': '4'}, - status_code=200 - ) - - response = admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=notification.service_id, - notification_id=notification.id, - file_type="pdf", - ) - - with pytest.raises(ValueError): - mock_post.last_request.json() - assert mock_get_letter_pdf.called_once_with(notification) - assert base64.b64decode(response['content']) == expected_returned_content - assert response["metadata"] == metadata - - -@pytest.mark.parametrize('page_number,expect_preview_url', [ - ('', 'http://localhost/notifications-template-preview/precompiled-preview.png?hide_notify=true'), - ('1', 'http://localhost/notifications-template-preview/precompiled-preview.png?hide_notify=true'), - ('2', 'http://localhost/notifications-template-preview/precompiled-preview.png') -]) -def test_preview_letter_template_precompiled_png_file_type_hide_notify_tag_only_on_first_page( - notify_api, - client, - admin_request, - sample_service, - mocker, - page_number, - expect_preview_url -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - pdf_content = b'\x00\x01' - png_content = b'\x00\x02' - encoded = base64.b64encode(png_content).decode('utf-8') - - mocker.patch( - 'app.template.rest.get_letter_pdf_and_metadata', - return_value=(pdf_content, { - "message": "", - "invalid_pages": "", - "page_count": "2" - }) - ) - mocker.patch('app.template.rest.extract_page_from_pdf', return_value=png_content) - mock_get_png_preview = mocker.patch('app.template.rest._get_png_preview_or_overlaid_pdf', return_value=encoded) - - admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=notification.service_id, - notification_id=notification.id, - file_type='png', - page=page_number - ) - - mock_get_png_preview.assert_called_once_with( - expect_preview_url, encoded, notification.id, json=False - ) - - -def test_preview_letter_template_precompiled_png_template_preview_500_error( - notify_api, - client, - admin_request, - sample_service, - mocker -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker() as request_mock: - - pdf_content = b'\x00\x01' - png_content = b'\x00\x02' - - mocker.patch('app.template.rest.get_letter_pdf_and_metadata', return_value=(pdf_content, { - "message": "", - "invalid_pages": "", - "page_count": "1" - })) - - mocker.patch('app.template.rest.extract_page_from_pdf', return_value=pdf_content) - - mock_post = request_mock.post( - 'http://localhost/notifications-template-preview/precompiled-preview.png', - content=png_content, - headers={'X-pdf-page-count': '1'}, - status_code=500 - ) - - admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=notification.service_id, - notification_id=notification.id, - file_type='png', - _expected_status=500 - - ) - - with pytest.raises(ValueError): - mock_post.last_request.json() - - -def test_preview_letter_template_precompiled_png_template_preview_400_error( - notify_api, - client, - admin_request, - sample_service, - mocker -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker() as request_mock: - - pdf_content = b'\x00\x01' - png_content = b'\x00\x02' - - mocker.patch('app.template.rest.get_letter_pdf_and_metadata', return_value=(pdf_content, { - "message": "", - "invalid_pages": "", - "page_count": "1" - })) - - mocker.patch('app.template.rest.extract_page_from_pdf', return_value=pdf_content) - - mock_post = request_mock.post( - 'http://localhost/notifications-template-preview/precompiled-preview.png', - content=png_content, - headers={'X-pdf-page-count': '1'}, - status_code=404 - ) - - admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=notification.service_id, - notification_id=notification.id, - file_type='png', - _expected_status=500 - ) - - with pytest.raises(ValueError): - mock_post.last_request.json() - - -def test_preview_letter_template_precompiled_png_template_preview_pdf_error( - notify_api, - client, - admin_request, - sample_service, - mocker -): - - template = create_template(sample_service, - template_type='letter', - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - hidden=True) - - notification = create_notification(template) - - with set_config_values(notify_api, { - 'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview', - 'TEMPLATE_PREVIEW_API_KEY': 'test-key' - }): - with requests_mock.Mocker() as request_mock: - - pdf_content = b'\x00\x01' - png_content = b'\x00\x02' - - mocker.patch('app.template.rest.get_letter_pdf_and_metadata', return_value=(pdf_content, { - "message": "", - "invalid_pages": "", - "page_count": "1" - })) - - error_message = "PDF Error message" - mocker.patch('app.template.rest.extract_page_from_pdf', side_effect=PdfReadError(error_message)) - - request_mock.post( - 'http://localhost/notifications-template-preview/precompiled-preview.png', - content=png_content, - headers={'X-pdf-page-count': '1'}, - status_code=404 - ) - - request = admin_request.get( - 'template.preview_letter_template_by_notification_id', - service_id=notification.service_id, - notification_id=notification.id, - file_type='png', - _expected_status=500 - ) - - assert request['message'] == "Error extracting requested page from PDF file for notification_id {} type " \ - "{} {}".format(notification.id, type(PdfReadError()), error_message) diff --git a/tests/app/template/test_rest_history.py b/tests/app/template/test_rest_history.py index 47d7d5ac5..3e6f400d2 100644 --- a/tests/app/template/test_rest_history.py +++ b/tests/app/template/test_rest_history.py @@ -5,7 +5,6 @@ from flask import url_for from app.dao.templates_dao import dao_update_template from tests import create_admin_authorization_header -from tests.app.db import create_letter_contact def test_template_history_version(notify_api, sample_user, sample_template): @@ -99,22 +98,3 @@ def test_all_versions_of_template(notify_api, sample_template): assert json_resp['data'][1]['content'] == newer_content assert json_resp['data'][1]['updated_at'] assert json_resp['data'][2]['content'] == old_content - - -def test_update_template_reply_to_updates_history(client, sample_letter_template): - auth_header = create_admin_authorization_header() - letter_contact = create_letter_contact(sample_letter_template.service, "Edinburgh, ED1 1AA") - - sample_letter_template.reply_to = letter_contact.id - dao_update_template(sample_letter_template) - - resp = client.get( - '/service/{}/template/{}/version/2'.format(sample_letter_template.service_id, sample_letter_template.id), - headers=[auth_header] - ) - assert resp.status_code == 200 - - hist_json_resp = json.loads(resp.get_data(as_text=True)) - assert 'service_letter_contact_id' not in hist_json_resp['data'] - assert hist_json_resp['data']['reply_to'] == str(letter_contact.id) - assert hist_json_resp['data']['reply_to_text'] == letter_contact.contact_block diff --git a/tests/app/template_statistics/test_rest.py b/tests/app/template_statistics/test_rest.py index efc063af4..2f124dbe7 100644 --- a/tests/app/template_statistics/test_rest.py +++ b/tests/app/template_statistics/test_rest.py @@ -42,7 +42,6 @@ def test_get_template_statistics_for_service_by_day_returns_template_info(admin_ assert json_resp['data'][0]['template_id'] == str(sample_notification.template_id) assert json_resp['data'][0]['template_name'] == 'sms Template Name' assert json_resp['data'][0]['template_type'] == 'sms' - assert json_resp['data'][0]['is_precompiled_letter'] is False @pytest.mark.parametrize('var_name', ['limit_days', 'whole_days']) @@ -77,8 +76,7 @@ def test_get_template_statistics_for_service_by_day_goes_to_db( count=3, template_name=sample_template.name, notification_type=sample_template.template_type, - status='created', - is_precompiled_letter=False + status='created' ) ] ) @@ -93,8 +91,7 @@ def test_get_template_statistics_for_service_by_day_goes_to_db( "count": 3, "template_name": sample_template.name, "template_type": sample_template.template_type, - "status": "created", - "is_precompiled_letter": False + "status": "created" }] # dao only called for 2nd, since redis returned values for first call diff --git a/tests/app/test_config.py b/tests/app/test_config.py index 1f77278d1..fe2fef296 100644 --- a/tests/app/test_config.py +++ b/tests/app/test_config.py @@ -4,7 +4,7 @@ from app.config import QueueNames def test_queue_names_all_queues_correct(): # Need to ensure that all_queues() only returns queue names used in API queues = QueueNames.all_queues() - assert len(queues) == 17 + assert len(queues) == 15 assert set([ QueueNames.PRIORITY, QueueNames.PERIODIC, @@ -16,10 +16,8 @@ def test_queue_names_all_queues_correct(): QueueNames.JOBS, QueueNames.RETRY, QueueNames.NOTIFY, - QueueNames.CREATE_LETTERS_PDF, QueueNames.CALLBACKS, QueueNames.CALLBACKS_RETRY, - QueueNames.LETTERS, QueueNames.SMS_CALLBACKS, QueueNames.SAVE_API_EMAIL, QueueNames.SAVE_API_SMS, diff --git a/tests/app/test_model.py b/tests/app/test_model.py index 8e42900d0..e68a35c27 100644 --- a/tests/app/test_model.py +++ b/tests/app/test_model.py @@ -7,22 +7,16 @@ from app.models import ( EMAIL_TYPE, MOBILE_TYPE, NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, NOTIFICATION_FAILED, NOTIFICATION_PENDING, - NOTIFICATION_SENDING, - NOTIFICATION_STATUS_LETTER_ACCEPTED, - NOTIFICATION_STATUS_LETTER_RECEIVED, NOTIFICATION_STATUS_TYPES_FAILED, NOTIFICATION_TECHNICAL_FAILURE, - PRECOMPILED_TEMPLATE_NAME, SMS_TYPE, Notification, ServiceGuestList, ) from tests.app.db import ( create_inbound_number, - create_letter_contact, create_notification, create_reply_to_email, create_service, @@ -63,22 +57,16 @@ def test_should_not_build_service_guest_list_from_invalid_contact(recipient_type @pytest.mark.parametrize('initial_statuses, expected_statuses', [ # passing in single statuses as strings (NOTIFICATION_FAILED, NOTIFICATION_STATUS_TYPES_FAILED), - (NOTIFICATION_STATUS_LETTER_ACCEPTED, [NOTIFICATION_SENDING, NOTIFICATION_CREATED]), (NOTIFICATION_CREATED, [NOTIFICATION_CREATED]), (NOTIFICATION_TECHNICAL_FAILURE, [NOTIFICATION_TECHNICAL_FAILURE]), # passing in lists containing single statuses ([NOTIFICATION_FAILED], NOTIFICATION_STATUS_TYPES_FAILED), ([NOTIFICATION_CREATED], [NOTIFICATION_CREATED]), ([NOTIFICATION_TECHNICAL_FAILURE], [NOTIFICATION_TECHNICAL_FAILURE]), - (NOTIFICATION_STATUS_LETTER_RECEIVED, NOTIFICATION_DELIVERED), # passing in lists containing multiple statuses ([NOTIFICATION_FAILED, NOTIFICATION_CREATED], NOTIFICATION_STATUS_TYPES_FAILED + [NOTIFICATION_CREATED]), ([NOTIFICATION_CREATED, NOTIFICATION_PENDING], [NOTIFICATION_CREATED, NOTIFICATION_PENDING]), ([NOTIFICATION_CREATED, NOTIFICATION_TECHNICAL_FAILURE], [NOTIFICATION_CREATED, NOTIFICATION_TECHNICAL_FAILURE]), - ( - [NOTIFICATION_FAILED, NOTIFICATION_STATUS_LETTER_ACCEPTED], - NOTIFICATION_STATUS_TYPES_FAILED + [NOTIFICATION_SENDING, NOTIFICATION_CREATED] - ), # checking we don't end up with duplicates ( [NOTIFICATION_FAILED, NOTIFICATION_CREATED, NOTIFICATION_TECHNICAL_FAILURE], @@ -121,11 +109,6 @@ def test_notification_for_csv_returns_correct_job_row_number(sample_job): ('sms', 'temporary-failure', 'Phone not accepting messages right now'), ('sms', 'permanent-failure', 'Phone number doesn’t exist'), ('sms', 'sent', 'Sent internationally'), - ('letter', 'created', 'Accepted'), - ('letter', 'sending', 'Accepted'), - ('letter', 'technical-failure', 'Technical failure'), - ('letter', 'permanent-failure', 'Permanent failure'), - ('letter', 'delivered', 'Received') ]) def test_notification_for_csv_returns_formatted_status( sample_service, @@ -184,30 +167,13 @@ def test_notification_subject_is_none_for_sms(sample_service): assert notification.subject is None -@pytest.mark.parametrize('template_type', ['email', 'letter']) +@pytest.mark.parametrize('template_type', ['email']) def test_notification_subject_fills_in_placeholders(sample_service, template_type): template = create_template(service=sample_service, template_type=template_type, subject='((name))') notification = create_notification(template=template, personalisation={'name': 'hello'}) assert notification.subject == 'hello' -def test_letter_notification_serializes_with_address(client, sample_letter_notification): - sample_letter_notification.personalisation = { - 'address_line_1': 'foo', - 'address_line_3': 'bar', - 'address_line_5': None, - 'postcode': 'SW1 1AA' - } - res = sample_letter_notification.serialize() - assert res['line_1'] == 'foo' - assert res['line_2'] is None - assert res['line_3'] == 'bar' - assert res['line_4'] is None - assert res['line_5'] is None - assert res['line_6'] is None - assert res['postcode'] == 'SW1 1AA' - - def test_notification_serializes_created_by_name_with_no_created_by_id(client, sample_notification): res = sample_notification.serialize() assert res['created_by_name'] is None @@ -229,11 +195,6 @@ def test_email_notification_serializes_with_subject(client, sample_email_templat assert res['subject'] == 'Email Subject' -def test_letter_notification_serializes_with_subject(client, sample_letter_template): - res = sample_letter_template.serialize_for_v2() - assert res['subject'] == 'Template subject' - - def test_notification_references_template_history(client, sample_template): noti = create_notification(sample_template) sample_template.version = 3 @@ -280,62 +241,11 @@ def test_service_get_default_reply_to_email_address(sample_service): assert sample_service.get_default_reply_to_email_address() == 'default@email.com' -def test_service_get_default_contact_letter(sample_service): - create_letter_contact(service=sample_service, contact_block='London,\nNW1A 1AA') - - assert sample_service.get_default_letter_contact() == 'London,\nNW1A 1AA' - - def test_service_get_default_sms_sender(notify_db_session): service = create_service() assert service.get_default_sms_sender() == 'testing' -def test_letter_notification_serializes_correctly(client, sample_letter_notification): - sample_letter_notification.personalisation = { - 'addressline1': 'test', - 'addressline2': 'London', - 'postcode': 'N1', - } - - json = sample_letter_notification.serialize() - assert json['line_1'] == 'test' - assert json['line_2'] == 'London' - assert json['postcode'] == 'N1' - - -def test_letter_notification_postcode_can_be_null_for_precompiled_letters(client, sample_letter_notification): - sample_letter_notification.personalisation = { - 'address_line_1': 'test', - 'address_line_2': 'London', - } - - json = sample_letter_notification.serialize() - assert json['line_1'] == 'test' - assert json['line_2'] == 'London' - assert json['postcode'] is None - - -def test_is_precompiled_letter_false(sample_letter_template): - assert not sample_letter_template.is_precompiled_letter - - -def test_is_precompiled_letter_true(sample_letter_template): - sample_letter_template.hidden = True - sample_letter_template.name = PRECOMPILED_TEMPLATE_NAME - assert sample_letter_template.is_precompiled_letter - - -def test_is_precompiled_letter_hidden_true_not_name(sample_letter_template): - sample_letter_template.hidden = True - assert not sample_letter_template.is_precompiled_letter - - -def test_is_precompiled_letter_name_correct_not_hidden(sample_letter_template): - sample_letter_template.name = PRECOMPILED_TEMPLATE_NAME - assert not sample_letter_template.is_precompiled_letter - - def test_template_folder_is_parent(sample_service): x = None folders = [] diff --git a/tests/app/upload/__init__.py b/tests/app/upload/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/app/upload/test_rest.py b/tests/app/upload/test_rest.py deleted file mode 100644 index 53195e151..000000000 --- a/tests/app/upload/test_rest.py +++ /dev/null @@ -1,254 +0,0 @@ -from datetime import datetime, timedelta - -from freezegun import freeze_time - -from app.models import JOB_STATUS_FINISHED, JOB_STATUS_PENDING, LETTER_TYPE -from tests.app.db import ( - create_ft_notification_status, - create_job, - create_notification, - create_template, -) -from tests.conftest import set_config - - -def create_uploaded_letter(letter_template, service, status='created', created_at=None): - return create_notification( - template=letter_template, - to_field="742 Evergreen Terrace", - status=status, - reference="dvla-reference", - client_reference="file-name", - one_off=True, - created_by_id=service.users[0].id, - created_at=created_at - ) - - -def create_precompiled_template(service): - return create_template( - service, - template_type=LETTER_TYPE, - template_name='Pre-compiled PDF', - subject='Pre-compiled PDF', - content="", - hidden=True, - postage="second", - ) - - -@freeze_time('2020-02-02 14:00') -def test_get_uploads(admin_request, sample_template): - letter_template = create_precompiled_template(sample_template.service) - - create_uploaded_letter(letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(minutes=4)) - upload_2 = create_job(template=sample_template, - processing_started=datetime.utcnow() - timedelta(minutes=3), - job_status=JOB_STATUS_FINISHED) - create_uploaded_letter(letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(minutes=2)) - upload_4 = create_job(template=sample_template, - processing_started=datetime.utcnow() - timedelta(minutes=1), - job_status=JOB_STATUS_FINISHED) - upload_5 = create_job(template=sample_template, processing_started=None, - job_status=JOB_STATUS_PENDING, notification_count=10) - - service_id = sample_template.service.id - - resp_json = admin_request.get('upload.get_uploads_by_service', service_id=service_id) - data = resp_json['data'] - assert len(data) == 4 - assert data[0] == {'id': str(upload_5.id), - 'original_file_name': 'some.csv', - 'recipient': None, - 'notification_count': 10, - 'template_type': 'sms', - 'created_at': upload_5.created_at.strftime("%Y-%m-%d %H:%M:%S"), - 'statistics': [], - 'upload_type': 'job'} - assert data[1] == {'id': None, - 'original_file_name': 'Uploaded letters', - 'recipient': None, - 'notification_count': 2, - 'template_type': 'letter', - 'created_at': upload_4.created_at.replace(hour=22, minute=30).strftime( - "%Y-%m-%d %H:%M:%S"), - 'statistics': [], - 'upload_type': 'letter_day'} - assert data[3] == {'id': str(upload_2.id), - 'original_file_name': "some.csv", - 'recipient': None, - 'notification_count': 1, - 'template_type': 'sms', - 'created_at': upload_2.created_at.strftime( - "%Y-%m-%d %H:%M:%S"), - 'statistics': [], - 'upload_type': 'job'} - - -def test_get_uploads_should_return_statistics(admin_request, sample_template): - now = datetime.utcnow() - earlier = datetime.utcnow() - timedelta(days=1) - job_1 = create_job(template=sample_template, job_status='pending') - job_2 = create_job(sample_template, processing_started=earlier) - for _ in range(3): - create_notification(template=sample_template, job=job_2, status='created') - - job_3 = create_job(sample_template, processing_started=now) - for _ in range(4): - create_notification(template=sample_template, job=job_3, status='sending') - - letter_template = create_precompiled_template(sample_template.service) - create_uploaded_letter(letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(days=3)) - - resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)['data'] - assert len(resp_json) == 4 - assert resp_json[0]['id'] == str(job_1.id) - assert resp_json[0]['statistics'] == [] - assert resp_json[1]['id'] == str(job_3.id) - assert resp_json[1]['statistics'] == [{'status': 'sending', 'count': 4}] - assert resp_json[2]['id'] == str(job_2.id) - assert resp_json[2]['statistics'] == [{'status': 'created', 'count': 3}] - assert resp_json[3]['id'] is None - assert resp_json[3]['statistics'] == [] - - -def test_get_uploads_should_paginate(admin_request, sample_template): - for _ in range(10): - create_job(sample_template) - - with set_config(admin_request.app, 'PAGE_SIZE', 2): - resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id) - - assert len(resp_json['data']) == 2 - assert resp_json['page_size'] == 2 - assert resp_json['total'] == 10 - assert 'links' in resp_json - assert set(resp_json['links'].keys()) == {'next', 'last'} - - -def test_get_uploads_accepts_page_parameter(admin_request, sample_template): - for _ in range(10): - create_job(sample_template) - - with set_config(admin_request.app, 'PAGE_SIZE', 2): - resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id, page=2) - - assert len(resp_json['data']) == 2 - assert resp_json['page_size'] == 2 - assert resp_json['total'] == 10 - assert 'links' in resp_json - assert set(resp_json['links'].keys()) == {'prev', 'next', 'last'} - - -@freeze_time('2017-06-10 12:00') -def test_get_uploads_should_retrieve_from_ft_notification_status_for_old_jobs(admin_request, sample_template): - # it's the 10th today, so 3 days should include all of 7th, 8th, 9th, and some of 10th. - just_three_days_ago = datetime(2017, 6, 7, 3, 59, 59) - not_quite_three_days_ago = just_three_days_ago + timedelta(seconds=1) - - job_1 = create_job(sample_template, created_at=just_three_days_ago, processing_started=just_three_days_ago) - job_2 = create_job(sample_template, created_at=just_three_days_ago, processing_started=not_quite_three_days_ago) - # is old but hasn't started yet (probably a scheduled job). We don't have any stats for this job yet. - job_3 = create_job(sample_template, created_at=just_three_days_ago, processing_started=None) - - # some notifications created more than three days ago, some created after the midnight cutoff - create_ft_notification_status(datetime(2017, 6, 6, 12), job=job_1, notification_status='delivered', count=2) - create_ft_notification_status(datetime(2017, 6, 7, 12), job=job_1, notification_status='delivered', count=4) - # job2's new enough - create_notification(job=job_2, status='created', created_at=not_quite_three_days_ago) - - # this isn't picked up because the job is too new - create_ft_notification_status(datetime(2017, 6, 7, 12), job=job_2, notification_status='delivered', count=8) - # this isn't picked up - while the job is old, it started in last 3 days so we look at notification table instead - create_ft_notification_status(datetime(2017, 6, 7, 12), job=job_3, notification_status='delivered', count=16) - - # this isn't picked up because we're using the ft status table for job_1 as it's old - create_notification(job=job_1, status='created', created_at=not_quite_three_days_ago) - - resp_json = admin_request.get('upload.get_uploads_by_service', service_id=sample_template.service_id)['data'] - - assert resp_json[0]['id'] == str(job_3.id) - assert resp_json[0]['statistics'] == [] - assert resp_json[1]['id'] == str(job_2.id) - assert resp_json[1]['statistics'] == [{'status': 'created', 'count': 1}] - assert resp_json[2]['id'] == str(job_1.id) - assert resp_json[2]['statistics'] == [{'status': 'delivered', 'count': 6}] - - -@freeze_time('2020-02-02 14:00') -def test_get_uploaded_letters_by_print_date(admin_request, sample_template): - letter_template = create_precompiled_template(sample_template.service) - - letter_1 = create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(minutes=1) - ) - letter_2 = create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(minutes=2) - ) - - service_id = sample_template.service.id - - resp_json = admin_request.get( - 'upload.get_uploaded_letter_by_service_and_print_day', - service_id=service_id, - letter_print_date='2020-02-02', - ) - assert resp_json['total'] == 2 - assert resp_json['page_size'] == 50 - assert len(resp_json['notifications']) == 2 - - assert resp_json['notifications'][0]['id'] == str(letter_1.id) - assert resp_json['notifications'][0]['created_at'] == letter_1.created_at.strftime( - '%Y-%m-%dT%H:%M:%S+00:00' - ) - - assert resp_json['notifications'][1]['id'] == str(letter_2.id) - assert resp_json['notifications'][1]['created_at'] == letter_2.created_at.strftime( - '%Y-%m-%dT%H:%M:%S+00:00' - ) - - -@freeze_time('2020-02-02 14:00') -def test_get_uploaded_letters_by_print_date_paginates(admin_request, sample_template): - letter_template = create_precompiled_template(sample_template.service) - - for _ in range(101): - create_uploaded_letter( - letter_template, sample_template.service, status='delivered', - created_at=datetime.utcnow() - timedelta(minutes=1) - ) - - service_id = sample_template.service.id - - resp_json = admin_request.get( - 'upload.get_uploaded_letter_by_service_and_print_day', - service_id=service_id, - letter_print_date='2020-02-02', - page=2, - ) - assert resp_json['total'] == 101 - assert resp_json['page_size'] == 50 - assert len(resp_json['notifications']) == 50 - assert resp_json['links']['prev'] == ( - f'/service/{service_id}/upload/uploaded-letters/2020-02-02?page=1' - ) - assert resp_json['links']['next'] == ( - f'/service/{service_id}/upload/uploaded-letters/2020-02-02?page=3' - ) - - -def test_get_uploaded_letters_by_print_date_404s_for_bad_date( - admin_request, - sample_service, -): - admin_request.get( - 'upload.get_uploaded_letter_by_service_and_print_day', - service_id=sample_service.id, - letter_print_date='foo', - _expected_status=400, - ) diff --git a/tests/app/v2/notifications/test_get_notifications.py b/tests/app/v2/notifications/test_get_notifications.py index 2129b558a..1a17b5eb0 100644 --- a/tests/app/v2/notifications/test_get_notifications.py +++ b/tests/app/v2/notifications/test_get_notifications.py @@ -1,5 +1,3 @@ -import datetime - import pytest from flask import json, url_for @@ -67,7 +65,6 @@ def test_get_notification_by_id_returns_200( 'sent_at': sample_notification.sent_at, 'completed_at': sample_notification.completed_at(), 'scheduled_for': None, - 'postage': None, 'provider_response': None } @@ -120,7 +117,6 @@ def test_get_notification_by_id_with_placeholders_returns_200( 'sent_at': sample_notification.sent_at, 'completed_at': sample_notification.completed_at(), 'scheduled_for': None, - 'postage': None, 'provider_response': None } @@ -216,35 +212,6 @@ def test_get_notification_by_id_invalid_id(client, sample_notification, id): "status_code": 400} -@pytest.mark.parametrize('created_at_month, postage, estimated_delivery', [ - (12, 'second', '2000-12-06T16:00:00.000000Z'), # 4pm GMT in winter - (6, 'second', '2000-06-05T15:00:00.000000Z'), # 4pm BST in summer - - (12, 'first', '2000-12-05T16:00:00.000000Z'), # 4pm GMT in winter - (6, 'first', '2000-06-03T15:00:00.000000Z'), # 4pm BST in summer (two days before 2nd class due to weekends) -]) -def test_get_notification_adds_delivery_estimate_for_letters( - client, - sample_letter_notification, - created_at_month, - postage, - estimated_delivery, -): - sample_letter_notification.created_at = datetime.date(2000, created_at_month, 1) - sample_letter_notification.postage = postage - - auth_header = create_service_authorization_header(service_id=sample_letter_notification.service_id) - response = client.get( - path='/v2/notifications/{}'.format(sample_letter_notification.id), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - json_response = json.loads(response.get_data(as_text=True)) - assert response.status_code == 200 - assert json_response['postage'] == postage - assert json_response['estimated_delivery'] == estimated_delivery - - @pytest.mark.parametrize('template_type', ['sms', 'email']) def test_get_notification_doesnt_have_delivery_estimate_for_non_letters(client, sample_service, template_type): template = create_template(service=sample_service, template_type=template_type) @@ -376,7 +343,7 @@ def test_get_all_notifications_filter_by_template_type_invalid_template_type(cli assert json_response['status_code'] == 400 assert len(json_response['errors']) == 1 - assert json_response['errors'][0]['message'] == "template_type orange is not one of [sms, email, letter]" + assert json_response['errors'][0]['message'] == "template_type orange is not one of [sms, email]" def test_get_all_notifications_filter_by_single_status(client, sample_template): @@ -415,7 +382,7 @@ def test_get_all_notifications_filter_by_status_invalid_status(client, sample_no assert len(json_response['errors']) == 1 assert json_response['errors'][0]['message'] == "status elephant is not one of [cancelled, created, sending, " \ "sent, delivered, pending, failed, technical-failure, temporary-failure, permanent-failure, " \ - "pending-virus-check, validation-failed, virus-scan-failed, returned-letter, accepted, received]" + "pending-virus-check, validation-failed, virus-scan-failed]" def test_get_all_notifications_filter_by_multiple_statuses(client, sample_template): @@ -583,11 +550,10 @@ def test_get_all_notifications_filter_multiple_query_parameters(client, sample_e def test_get_all_notifications_renames_letter_statuses( client, - sample_letter_notification, sample_notification, sample_email_notification, ): - auth_header = create_service_authorization_header(service_id=sample_letter_notification.service_id) + auth_header = create_service_authorization_header(service_id=sample_email_notification.service_id) response = client.get( path=url_for('v2_notifications.get_notifications'), headers=[('Content-Type', 'application/json'), auth_header] @@ -599,127 +565,5 @@ def test_get_all_notifications_renames_letter_statuses( for noti in json_response['notifications']: if noti['type'] == 'sms' or noti['type'] == 'email': assert noti['status'] == 'created' - elif noti['type'] == 'letter': - assert noti['status'] == 'accepted' else: pytest.fail() - - -@pytest.mark.parametrize('db_status,expected_status', [ - ('created', 'accepted'), - ('sending', 'accepted'), - ('delivered', 'received'), - ('pending', 'pending'), - ('technical-failure', 'technical-failure') -]) -def test_get_notifications_renames_letter_statuses(client, sample_letter_template, db_status, expected_status): - letter_noti = create_notification( - sample_letter_template, - status=db_status, - personalisation={'address_line_1': 'Mr Foo', 'address_line_2': '1 Bar Street', 'postcode': 'N1'} - ) - auth_header = create_service_authorization_header(service_id=letter_noti.service_id) - response = client.get( - path=url_for('v2_notifications.get_notification_by_id', notification_id=letter_noti.id), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - json_response = json.loads(response.get_data(as_text=True)) - assert response.status_code == 200 - assert json_response['status'] == expected_status - - -def test_get_pdf_for_notification_returns_pdf_content( - client, - sample_letter_notification, - mocker, -): - mock_get_letter_pdf = mocker.patch( - 'app.v2.notifications.get_notifications.get_letter_pdf_and_metadata', return_value=(b'foo', { - "message": "", - "invalid_pages": "", - "page_count": "1" - }) - ) - sample_letter_notification.status = 'created' - - auth_header = create_service_authorization_header(service_id=sample_letter_notification.service_id) - response = client.get( - path=url_for('v2_notifications.get_pdf_for_notification', notification_id=sample_letter_notification.id), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - assert response.status_code == 200 - assert response.get_data() == b'foo' - mock_get_letter_pdf.assert_called_once_with(sample_letter_notification) - - -def test_get_pdf_for_notification_returns_400_if_pdf_not_found( - client, - sample_letter_notification, - mocker, -): - # if no files are returned get_letter_pdf throws StopIteration as the iterator runs out - mock_get_letter_pdf = mocker.patch( - 'app.v2.notifications.get_notifications.get_letter_pdf_and_metadata', - side_effect=(StopIteration, {}) - ) - sample_letter_notification.status = 'created' - - auth_header = create_service_authorization_header(service_id=sample_letter_notification.service_id) - response = client.get( - path=url_for('v2_notifications.get_pdf_for_notification', notification_id=sample_letter_notification.id), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - assert response.status_code == 400 - assert response.json['errors'] == [{ - 'error': 'PDFNotReadyError', - 'message': 'PDF not available yet, try again later' - }] - mock_get_letter_pdf.assert_called_once_with(sample_letter_notification) - - -@pytest.mark.parametrize('status, expected_message', [ - ('virus-scan-failed', 'File did not pass the virus scan'), - ('technical-failure', 'PDF not available for letters in status technical-failure'), -]) -def test_get_pdf_for_notification_only_returns_pdf_content_if_right_status( - client, - sample_letter_notification, - mocker, - status, - expected_message -): - mock_get_letter_pdf = mocker.patch( - 'app.v2.notifications.get_notifications.get_letter_pdf_and_metadata', return_value=(b'foo', { - "message": "", - "invalid_pages": "", - "page_count": "1" - }) - ) - sample_letter_notification.status = status - - auth_header = create_service_authorization_header(service_id=sample_letter_notification.service_id) - response = client.get( - path=url_for('v2_notifications.get_pdf_for_notification', notification_id=sample_letter_notification.id), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - assert response.status_code == 400 - assert response.json['errors'] == [{ - 'error': 'BadRequestError', - 'message': expected_message - }] - assert mock_get_letter_pdf.called is False - - -def test_get_pdf_for_notification_fails_for_non_letters(client, sample_notification): - auth_header = create_service_authorization_header(service_id=sample_notification.service_id) - response = client.get( - path=url_for('v2_notifications.get_pdf_for_notification', notification_id=sample_notification.id), - headers=[('Content-Type', 'application/json'), auth_header] - ) - - assert response.status_code == 400 - assert response.json['errors'] == [{'error': 'BadRequestError', 'message': 'Notification is not a letter'}] diff --git a/tests/app/v2/notifications/test_notification_schemas.py b/tests/app/v2/notifications/test_notification_schemas.py index 33a5b2030..dd95aaf61 100644 --- a/tests/app/v2/notifications/test_notification_schemas.py +++ b/tests/app/v2/notifications/test_notification_schemas.py @@ -45,7 +45,7 @@ def test_get_notifications_request_invalid_statuses( partial_error_status = "is not one of " \ "[cancelled, created, sending, sent, delivered, pending, failed, " \ "technical-failure, temporary-failure, permanent-failure, pending-virus-check, " \ - "validation-failed, virus-scan-failed, returned-letter, accepted, received]" + "validation-failed, virus-scan-failed]" with pytest.raises(ValidationError) as e: validate({'status': invalid_statuses + valid_statuses}, get_notifications_request) @@ -67,7 +67,7 @@ def test_get_notifications_request_invalid_statuses( def test_get_notifications_request_invalid_template_types( invalid_template_types, valid_template_types ): - partial_error_template_type = "is not one of [sms, email, letter]" + partial_error_template_type = "is not one of [sms, email]" with pytest.raises(ValidationError) as e: validate({'template_type': invalid_template_types + valid_template_types}, get_notifications_request) @@ -93,12 +93,12 @@ def test_get_notifications_request_invalid_statuses_and_template_types(): for invalid_status in ["elephant", "giraffe"]: assert "status {} is not one of [cancelled, created, sending, sent, delivered, " \ "pending, failed, technical-failure, temporary-failure, permanent-failure, " \ - "pending-virus-check, validation-failed, virus-scan-failed, returned-letter, accepted, received]".format( + "pending-virus-check, validation-failed, virus-scan-failed]".format( invalid_status ) in error_messages for invalid_template_type in ["orange", "avocado"]: - assert "template_type {} is not one of [sms, email, letter]" \ + assert "template_type {} is not one of [sms, email]" \ .format(invalid_template_type) in error_messages diff --git a/tests/app/v2/notifications/test_post_letter_notifications.py b/tests/app/v2/notifications/test_post_letter_notifications.py deleted file mode 100644 index 4687f5ae2..000000000 --- a/tests/app/v2/notifications/test_post_letter_notifications.py +++ /dev/null @@ -1,740 +0,0 @@ -import uuid -from unittest.mock import ANY - -import pytest -from flask import json, url_for - -from app.config import QueueNames -from app.models import ( - EMAIL_TYPE, - INTERNATIONAL_LETTERS, - KEY_TYPE_NORMAL, - KEY_TYPE_TEAM, - KEY_TYPE_TEST, - LETTER_TYPE, - NOTIFICATION_CREATED, - NOTIFICATION_DELIVERED, - NOTIFICATION_PENDING_VIRUS_CHECK, - NOTIFICATION_SENDING, - SMS_TYPE, - Job, - Notification, -) -from app.notifications.process_letter_notifications import ( - create_letter_notification, -) -from app.schema_validation import validate -from app.v2.errors import RateLimitError -from app.v2.notifications.notification_schemas import post_letter_response -from tests import create_service_authorization_header -from tests.app.db import create_letter_contact, create_service, create_template -from tests.conftest import set_config_values - -test_address = { - 'address_line_1': 'test 1', - 'address_line_2': 'test 2', - 'postcode': 'test pc' -} - - -def letter_request(client, data, service_id, key_type=KEY_TYPE_NORMAL, _expected_status=201, precompiled=False): - if precompiled: - url = url_for('v2_notifications.post_precompiled_letter_notification') - else: - url = url_for('v2_notifications.post_notification', notification_type=LETTER_TYPE) - resp = client.post( - url, - data=json.dumps(data), - headers=[ - ('Content-Type', 'application/json'), - create_service_authorization_header(service_id=service_id, key_type=key_type) - ] - ) - json_resp = json.loads(resp.get_data(as_text=True)) - assert resp.status_code == _expected_status, json_resp - return json_resp - - -@pytest.mark.parametrize('reference', [None, 'reference_from_client']) -def test_post_letter_notification_returns_201(client, sample_letter_template, mocker, reference): - mock = mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': 'SW1 1AA', - 'name': 'Lizzie' - } - } - - if reference: - data.update({'reference': reference}) - - resp_json = letter_request(client, data, service_id=sample_letter_template.service_id) - - assert validate(resp_json, post_letter_response) == resp_json - assert Job.query.count() == 0 - notification = Notification.query.one() - assert notification.status == NOTIFICATION_CREATED - assert resp_json['id'] == str(notification.id) - assert resp_json['reference'] == reference - assert resp_json['content']['subject'] == sample_letter_template.subject - assert resp_json['content']['body'] == sample_letter_template.content - assert 'v2/notifications/{}'.format(notification.id) in resp_json['uri'] - assert resp_json['template']['id'] == str(sample_letter_template.id) - assert resp_json['template']['version'] == sample_letter_template.version - assert ( - 'services/{}/templates/{}'.format( - sample_letter_template.service_id, - sample_letter_template.id - ) in resp_json['template']['uri'] - ) - assert not resp_json['scheduled_for'] - assert not notification.reply_to_text - mock.assert_called_once_with([str(notification.id)], queue=QueueNames.CREATE_LETTERS_PDF) - - -def test_post_letter_notification_sets_postage( - client, notify_db_session, mocker -): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service, template_type="letter", postage="first") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(template.id), - 'personalisation': { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': 'SW1 1AA', - 'name': 'Lizzie' - } - } - - resp_json = letter_request(client, data, service_id=service.id) - - assert validate(resp_json, post_letter_response) == resp_json - notification = Notification.query.one() - assert notification.postage == "first" - - -def test_post_letter_notification_formats_postcode( - client, notify_db_session, mocker -): - service = create_service(service_permissions=[LETTER_TYPE]) - template = create_template(service, template_type="letter") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(template.id), - 'personalisation': { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': ' Sw1 1aa ', - 'name': 'Lizzie' - } - } - - resp_json = letter_request(client, data, service_id=service.id) - - assert validate(resp_json, post_letter_response) == resp_json - notification = Notification.query.one() - # We store what the client gives us, and only reformat it when - # generating the PDF - assert notification.personalisation["postcode"] == ' Sw1 1aa ' - - -def test_post_letter_notification_stores_country( - client, notify_db_session, mocker -): - service = create_service(service_permissions=[LETTER_TYPE, INTERNATIONAL_LETTERS]) - template = create_template(service, template_type="letter") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(template.id), - 'personalisation': { - 'address_line_1': 'Kaiser Wilhelm II', - 'address_line_2': 'Kronprinzenpalais', - 'address_line_5': ' deutschland ', - } - } - - resp_json = letter_request(client, data, service_id=service.id) - - assert validate(resp_json, post_letter_response) == resp_json - notification = Notification.query.one() - # In the personalisation we store what the client gives us - assert notification.personalisation["address_line_1"] == 'Kaiser Wilhelm II' - assert notification.personalisation["address_line_2"] == 'Kronprinzenpalais' - assert notification.personalisation["address_line_5"] == ' deutschland ' - # In the to field we store the whole address with the canonical country - assert notification.to == ( - 'Kaiser Wilhelm II\n' - 'Kronprinzenpalais\n' - 'Germany' - ) - assert notification.postage == 'europe' - assert notification.international - - -def test_post_letter_notification_international_sets_rest_of_world( - client, notify_db_session, mocker -): - service = create_service(service_permissions=[LETTER_TYPE, INTERNATIONAL_LETTERS]) - template = create_template(service, template_type="letter") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(template.id), - 'personalisation': { - 'address_line_1': 'Prince Harry', - 'address_line_2': 'Toronto', - 'address_line_5': 'Canada', - } - } - - resp_json = letter_request(client, data, service_id=service.id) - - assert validate(resp_json, post_letter_response) == resp_json - notification = Notification.query.one() - - assert notification.postage == 'rest-of-world' - - -@pytest.mark.parametrize('permissions, personalisation, expected_error', ( - ( - [LETTER_TYPE], - { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': 'not a real postcode', - 'name': 'Lizzie' - }, - 'Must be a real UK postcode', - ), - ( - [LETTER_TYPE], - { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': ']Buckingham Palace', - 'postcode': 'SW1A 1AA', - 'name': 'Lizzie' - }, - 'Address lines must not start with any of the following characters: @ ( ) = [ ] ” \\ / , < >', - ), - ( - [LETTER_TYPE, INTERNATIONAL_LETTERS], - { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': 'not a real postcode', - 'name': 'Lizzie' - }, - 'Last line of address must be a real UK postcode or another country', - ), -)) -def test_post_letter_notification_throws_error_for_bad_address( - client, notify_db_session, mocker, permissions, personalisation, expected_error -): - service = create_service(service_permissions=permissions) - template = create_template(service, template_type="letter", postage="first") - mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(template.id), - 'personalisation': personalisation - } - - error_json = letter_request(client, data, service_id=service.id, _expected_status=400) - - assert error_json['status_code'] == 400 - assert error_json['errors'] == [{ - 'error': 'ValidationError', - 'message': expected_error - }] - - -@pytest.mark.parametrize('env', [ - 'staging', - 'live', -]) -def test_post_letter_notification_with_test_key_creates_pdf_and_sets_status_to_delivered( - notify_api, client, sample_letter_template, mocker, env): - - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': 'SW1 1AA', - 'name': 'Lizzie' - }, - 'reference': 'foo' - } - - fake_create_letter_task = mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - fake_create_dvla_response_task = mocker.patch( - 'app.celery.research_mode_tasks.create_fake_letter_response_file.apply_async') - - with set_config_values(notify_api, { - 'NOTIFY_ENVIRONMENT': env - }): - letter_request(client, data, service_id=sample_letter_template.service_id, key_type=KEY_TYPE_TEST) - - notification = Notification.query.one() - - fake_create_letter_task.assert_called_once_with([str(notification.id)], queue='research-mode-tasks') - assert not fake_create_dvla_response_task.called - assert notification.status == NOTIFICATION_DELIVERED - assert notification.updated_at is not None - - -@pytest.mark.parametrize('env', [ - 'development', - 'preview', -]) -def test_post_letter_notification_with_test_key_creates_pdf_and_sets_status_to_sending_and_sends_fake_response_file( - notify_api, client, sample_letter_template, mocker, env): - - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': 'SW1 1AA', - 'name': 'Lizzie' - }, - 'reference': 'foo' - } - - fake_create_letter_task = mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - fake_create_dvla_response_task = mocker.patch( - 'app.celery.research_mode_tasks.create_fake_letter_response_file.apply_async') - with set_config_values(notify_api, { - 'NOTIFY_ENVIRONMENT': env - }): - letter_request(client, data, service_id=sample_letter_template.service_id, key_type=KEY_TYPE_TEST) - - notification = Notification.query.one() - - fake_create_letter_task.assert_called_once_with([str(notification.id)], queue='research-mode-tasks') - assert fake_create_dvla_response_task.called - assert notification.status == NOTIFICATION_SENDING - - -def test_post_letter_notification_returns_400_and_missing_template( - client, - sample_service_full_permissions -): - data = { - 'template_id': str(uuid.uuid4()), - 'personalisation': test_address - } - - error_json = letter_request(client, data, service_id=sample_service_full_permissions.id, _expected_status=400) - - assert error_json['status_code'] == 400 - assert error_json['errors'] == [{'error': 'BadRequestError', 'message': 'Template not found'}] - - -def test_post_letter_notification_returns_400_for_empty_personalisation( - client, - sample_service_full_permissions, - sample_letter_template -): - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': {'address_line_1': '', 'address_line_2': '', 'postcode': ''} - } - - error_json = letter_request(client, data, service_id=sample_service_full_permissions.id, _expected_status=400) - - assert error_json['status_code'] == 400 - assert all([e['error'] == 'ValidationError' for e in error_json['errors']]) - assert set([e['message'] for e in error_json['errors']]) == { - 'Address must be at least 3 lines', - } - - -def test_post_notification_returns_400_for_missing_letter_contact_block_personalisation( - client, - sample_service, -): - letter_contact_block = create_letter_contact( - service=sample_service, contact_block='((contact block))', is_default=True - ) - template = create_template( - service=sample_service, - template_type='letter', - reply_to=letter_contact_block.id, - ) - data = { - 'template_id': str(template.id), - 'personalisation': { - 'address_line_1': 'Line 1', - 'address_line_2': 'Line 2', - 'postcode': 'SW1A 1AA', - }, - } - - error_json = letter_request( - client, - data, - service_id=sample_service.id, - _expected_status=400, - ) - - assert error_json['status_code'] == 400 - assert error_json['errors'] == [{ - 'error': 'BadRequestError', - 'message': 'Missing personalisation: contact block' - }] - - -def test_notification_returns_400_for_missing_template_field( - client, - sample_service_full_permissions -): - data = { - 'personalisation': test_address - } - - error_json = letter_request(client, data, service_id=sample_service_full_permissions.id, _expected_status=400) - - assert error_json['status_code'] == 400 - assert error_json['errors'] == [{ - 'error': 'ValidationError', - 'message': 'template_id is a required property' - }] - - -def test_notification_returns_400_if_address_doesnt_have_underscores( - client, - sample_letter_template -): - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': { - 'address line 1': 'Her Royal Highness Queen Elizabeth II', - 'address-line-2': 'Buckingham Palace', - 'postcode': 'SW1 1AA', - } - } - - error_json = letter_request(client, data, service_id=sample_letter_template.service_id, _expected_status=400) - - assert error_json['status_code'] == 400 - assert error_json['errors'] == [ - { - 'error': 'ValidationError', - 'message': 'Address must be at least 3 lines' - } - ] - - -def test_returns_a_429_limit_exceeded_if_rate_limit_exceeded( - client, - sample_letter_template, - mocker -): - persist_mock = mocker.patch('app.v2.notifications.post_notifications.persist_notification') - mocker.patch( - 'app.v2.notifications.post_notifications.check_rate_limiting', - side_effect=RateLimitError('LIMIT', 'INTERVAL', 'TYPE') - ) - - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': test_address - } - - error_json = letter_request(client, data, service_id=sample_letter_template.service_id, _expected_status=429) - - assert error_json['status_code'] == 429 - assert error_json['errors'] == [{ - 'error': 'RateLimitError', - 'message': 'Exceeded rate limit for key type TYPE of LIMIT requests per INTERVAL seconds' - }] - - assert not persist_mock.called - - -@pytest.mark.parametrize('service_args, expected_status, expected_message', [ - ( - {'service_permissions': [EMAIL_TYPE, SMS_TYPE]}, - 400, - 'Service is not allowed to send letters', - ), - ( - {'restricted': True}, - 403, - 'Cannot send letters when service is in trial mode', - ) -]) -def test_post_letter_notification_returns_403_if_not_allowed_to_send_notification( - client, - notify_db_session, - service_args, - expected_status, - expected_message, -): - service = create_service(**service_args) - template = create_template(service, template_type=LETTER_TYPE) - - data = { - 'template_id': str(template.id), - 'personalisation': test_address - } - - error_json = letter_request(client, data, service_id=service.id, _expected_status=expected_status) - assert error_json['status_code'] == expected_status - assert error_json['errors'] == [ - {'error': 'BadRequestError', 'message': expected_message} - ] - - -def test_post_letter_notification_doesnt_accept_team_key(client, sample_letter_template, mocker): - mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': {'address_line_1': 'Foo', 'address_line_2': 'Bar', 'postcode': 'Baz'} - } - - error_json = letter_request( - client, - data, - sample_letter_template.service_id, - key_type=KEY_TYPE_TEAM, - _expected_status=403 - ) - - assert error_json['status_code'] == 403 - assert error_json['errors'] == [{'error': 'BadRequestError', 'message': 'Cannot send letters with a team api key'}] - - -def test_post_letter_notification_doesnt_send_in_trial(client, sample_trial_letter_template, mocker): - mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - data = { - 'template_id': str(sample_trial_letter_template.id), - 'personalisation': {'address_line_1': 'Foo', 'address_line_2': 'Bar', 'postcode': 'Baz'} - } - - error_json = letter_request( - client, - data, - sample_trial_letter_template.service_id, - _expected_status=403 - ) - - assert error_json['status_code'] == 403 - assert error_json['errors'] == [ - {'error': 'BadRequestError', 'message': 'Cannot send letters when service is in trial mode'}] - - -def test_post_letter_notification_is_delivered_but_still_creates_pdf_if_in_trial_mode_and_using_test_key( - client, - sample_trial_letter_template, - mocker -): - fake_create_letter_task = mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - data = { - "template_id": sample_trial_letter_template.id, - "personalisation": {'address_line_1': 'Foo', 'address_line_2': 'Bar', 'postcode': 'BA5 5AB'} - } - - letter_request(client, data=data, service_id=sample_trial_letter_template.service_id, key_type=KEY_TYPE_TEST) - - notification = Notification.query.one() - assert notification.status == NOTIFICATION_DELIVERED - fake_create_letter_task.assert_called_once_with([str(notification.id)], queue='research-mode-tasks') - - -def test_post_letter_notification_is_delivered_and_has_pdf_uploaded_to_test_letters_bucket_using_test_key( - client, - notify_user, - mocker -): - sample_letter_service = create_service(service_permissions=['letter']) - mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task') - s3mock = mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf', return_value='test.pdf') - data = { - "reference": "letter-reference", - "content": "bGV0dGVyLWNvbnRlbnQ=" - } - letter_request( - client, - data=data, - service_id=str(sample_letter_service.id), - key_type=KEY_TYPE_TEST, - precompiled=True) - - notification = Notification.query.one() - assert notification.status == NOTIFICATION_PENDING_VIRUS_CHECK - s3mock.assert_called_once_with(ANY, b'letter-content', precompiled=True) - - -def test_post_letter_notification_ignores_reply_to_text_for_service( - client, notify_db_session, mocker -): - mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - service = create_service(service_permissions=[LETTER_TYPE]) - create_letter_contact(service=service, contact_block='ignored', is_default=True) - template = create_template(service=service, template_type='letter') - data = { - "template_id": template.id, - "personalisation": {'address_line_1': 'Foo', 'address_line_2': 'Bar', 'postcode': 'BA5 5AB'} - } - letter_request(client, data=data, service_id=service.id, key_type=KEY_TYPE_NORMAL) - - notifications = Notification.query.all() - assert len(notifications) == 1 - assert notifications[0].reply_to_text is None - - -def test_post_letter_notification_persists_notification_reply_to_text_for_template( - client, notify_db_session, mocker -): - mocker.patch('app.celery.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - service = create_service(service_permissions=[LETTER_TYPE]) - create_letter_contact(service=service, contact_block='the default', is_default=True) - template_letter_contact = create_letter_contact(service=service, contact_block='not the default', is_default=False) - template = create_template(service=service, template_type='letter', reply_to=template_letter_contact.id) - data = { - "template_id": template.id, - "personalisation": {'address_line_1': 'Foo', 'address_line_2': 'Bar', 'postcode': 'BA5 5AB'} - } - letter_request(client, data=data, service_id=service.id, key_type=KEY_TYPE_NORMAL) - - notifications = Notification.query.all() - assert len(notifications) == 1 - assert notifications[0].reply_to_text == 'not the default' - - -def test_post_precompiled_letter_with_invalid_base64(client, notify_user, mocker): - sample_service = create_service(service_permissions=['letter']) - mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf') - - data = { - "reference": "letter-reference", - "content": "hi" - } - auth_header = create_service_authorization_header(service_id=sample_service.id) - response = client.post( - path="v2/notifications/letter", - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert response.status_code == 400, response.get_data(as_text=True) - resp_json = json.loads(response.get_data(as_text=True)) - assert resp_json['errors'][0]['message'] == 'Cannot decode letter content (invalid base64 encoding)' - - assert not Notification.query.first() - - -@pytest.mark.parametrize('notification_postage, expected_postage', [ - ('second', 'second'), - ('first', 'first'), - (None, 'second') -]) -def test_post_precompiled_letter_notification_returns_201( - client, notify_user, mocker, notification_postage, expected_postage -): - sample_service = create_service(service_permissions=['letter']) - s3mock = mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf') - mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task') - data = { - "reference": "letter-reference", - "content": "bGV0dGVyLWNvbnRlbnQ=" - } - if notification_postage: - data["postage"] = notification_postage - auth_header = create_service_authorization_header(service_id=sample_service.id) - response = client.post( - path="v2/notifications/letter", - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert response.status_code == 201, response.get_data(as_text=True) - - s3mock.assert_called_once_with(ANY, b'letter-content', precompiled=True) - - notification = Notification.query.one() - - assert notification.billable_units == 0 - assert notification.status == NOTIFICATION_PENDING_VIRUS_CHECK - assert notification.postage == expected_postage - - resp_json = json.loads(response.get_data(as_text=True)) - assert resp_json == {'id': str(notification.id), 'reference': 'letter-reference', 'postage': expected_postage} - - -def test_post_precompiled_letter_notification_if_s3_upload_fails_notification_is_not_persisted( - client, notify_user, mocker -): - sample_service = create_service(service_permissions=['letter']) - persist_letter_mock = mocker.patch('app.v2.notifications.post_notifications.create_letter_notification', - side_effect=create_letter_notification) - s3mock = mocker.patch('app.v2.notifications.post_notifications.upload_letter_pdf', side_effect=Exception()) - mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task') - data = { - "reference": "letter-reference", - "content": "bGV0dGVyLWNvbnRlbnQ=" - } - - auth_header = create_service_authorization_header(service_id=sample_service.id) - with pytest.raises(expected_exception=Exception): - client.post( - path="v2/notifications/letter", - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert s3mock.called - assert persist_letter_mock.called - assert Notification.query.count() == 0 - - -def test_post_letter_notification_throws_error_for_invalid_postage(client, notify_user, mocker): - sample_service = create_service(service_permissions=['letter']) - data = { - "reference": "letter-reference", - "content": "bGV0dGVyLWNvbnRlbnQ=", - "postage": "space unicorn" - } - auth_header = create_service_authorization_header(service_id=sample_service.id) - response = client.post( - path="v2/notifications/letter", - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), auth_header]) - - assert response.status_code == 400, response.get_data(as_text=True) - resp_json = json.loads(response.get_data(as_text=True)) - assert resp_json['errors'][0]['message'] == "postage invalid. It must be first, second, europe or rest-of-world." - - assert not Notification.query.first() - - -@pytest.mark.parametrize('content_type', - ['application/json', 'application/text']) -def test_post_letter_notification_when_payload_is_invalid_json_returns_400( - client, sample_service, content_type): - auth_header = create_service_authorization_header(service_id=sample_service.id) - payload_not_json = { - "template_id": "dont-convert-to-json", - } - response = client.post( - path='/v2/notifications/letter', - data=payload_not_json, - headers=[('Content-Type', content_type), auth_header], - ) - - assert response.status_code == 400 - error_msg = json.loads(response.get_data(as_text=True))["errors"][0]["message"] - - assert error_msg == 'Invalid JSON supplied in POST data' diff --git a/tests/app/v2/notifications/test_post_notifications.py b/tests/app/v2/notifications/test_post_notifications.py index 50e098b89..91189a3a1 100644 --- a/tests/app/v2/notifications/test_post_notifications.py +++ b/tests/app/v2/notifications/test_post_notifications.py @@ -57,7 +57,6 @@ def test_post_sms_notification_returns_201(client, sample_template_with_placehol assert len(notifications) == 1 assert notifications[0].status == NOTIFICATION_CREATED notification_id = notifications[0].id - assert notifications[0].postage is None assert notifications[0].document_download_count is None assert resp_json['id'] == str(notification_id) assert resp_json['reference'] == reference @@ -358,7 +357,6 @@ def test_post_notification_returns_400_and_missing_template(client, sample_servi @pytest.mark.parametrize("notification_type, key_send_to, send_to", [ ("sms", "phone_number", "+447700900855"), ("email", "email_address", "sample@email.com"), - ("letter", "personalisation", {"address_line_1": "The queen", "postcode": "SW1 1AA"}) ]) def test_post_notification_returns_401_and_well_formed_auth_error(client, sample_template, notification_type, key_send_to, send_to): @@ -429,7 +427,6 @@ def test_post_email_notification_returns_201(client, sample_email_template_with_ assert validate(resp_json, post_email_response) == resp_json notification = Notification.query.one() assert notification.status == NOTIFICATION_CREATED - assert notification.postage is None assert resp_json['id'] == str(notification.id) assert resp_json['reference'] == reference assert notification.reference is None @@ -1156,32 +1153,3 @@ def test_post_notifications_doesnt_use_save_queue_for_test_notifications( assert mock_send_task.called assert not save_task.called assert len(Notification.query.all()) == 1 - - -def test_post_notification_does_not_use_save_queue_for_letters(client, sample_letter_template, mocker): - mock_save = mocker.patch("app.v2.notifications.post_notifications.save_email_or_sms_to_queue") - mock_create_pdf_task = mocker.patch('app.celery.tasks.letters_pdf_tasks.get_pdf_for_templated_letter.apply_async') - - with set_config_values(current_app, { - 'HIGH_VOLUME_SERVICE': [str(sample_letter_template.service_id)], - - }): - data = { - 'template_id': str(sample_letter_template.id), - 'personalisation': { - 'address_line_1': 'Her Royal Highness Queen Elizabeth II', - 'address_line_2': 'Buckingham Palace', - 'address_line_3': 'London', - 'postcode': 'SW1 1AA', - } - } - response = client.post( - path='/v2/notifications/letter', - data=json.dumps(data), - headers=[('Content-Type', 'application/json'), - create_service_authorization_header(service_id=sample_letter_template.service_id)] - ) - assert response.status_code == 201 - json_resp = response.get_json() - assert not mock_save.called - mock_create_pdf_task.assert_called_once_with([str(json_resp['id'])], queue='create-letters-pdf-tasks') diff --git a/tests/app/v2/template/test_get_template.py b/tests/app/v2/template/test_get_template.py index 4dbee4e81..4503c753f 100644 --- a/tests/app/v2/template/test_get_template.py +++ b/tests/app/v2/template/test_get_template.py @@ -1,29 +1,23 @@ import pytest from flask import json -from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, TEMPLATE_TYPES +from app.models import EMAIL_TYPE, SMS_TYPE, TEMPLATE_TYPES from app.utils import DATETIME_FORMAT from tests import create_service_authorization_header -from tests.app.db import create_letter_contact, create_template +from tests.app.db import create_template valid_version_params = [None, 1] -@pytest.mark.parametrize("tmp_type, expected_name, expected_subject,postage", [ - (SMS_TYPE, 'sms Template Name', None, None), - (EMAIL_TYPE, 'email Template Name', 'Template subject', None), - (LETTER_TYPE, 'letter Template Name', 'Template subject', "second") +@pytest.mark.parametrize("tmp_type, expected_name, expected_subject", [ + (SMS_TYPE, 'sms Template Name', None), + (EMAIL_TYPE, 'email Template Name', 'Template subject'), ]) @pytest.mark.parametrize("version", valid_version_params) def test_get_template_by_id_returns_200( - client, sample_service, tmp_type, expected_name, expected_subject, version, postage + client, sample_service, tmp_type, expected_name, expected_subject, version ): - letter_contact_block_id = None - if tmp_type == 'letter': - letter_contact_block = create_letter_contact(sample_service, "Buckingham Palace, London, SW1A 1AA") - letter_contact_block_id = letter_contact_block.id - - template = create_template(sample_service, template_type=tmp_type, contact_block_id=(letter_contact_block_id)) + template = create_template(sample_service, template_type=tmp_type) auth_header = create_service_authorization_header(service_id=sample_service.id) version_path = '/version/{}'.format(version) if version else '' @@ -47,8 +41,6 @@ def test_get_template_by_id_returns_200( "subject": expected_subject, 'name': expected_name, 'personalisation': {}, - 'postage': postage, - 'letter_contact_block': letter_contact_block.contact_block if letter_contact_block_id else None, } assert json_response == expected_response @@ -105,44 +97,6 @@ def test_get_template_by_id_returns_placeholders( assert json_response['personalisation'] == expected_personalisation -@pytest.mark.parametrize("version", valid_version_params) -def test_get_letter_template_by_id_returns_placeholders( - client, - sample_service, - version, -): - contact_block = create_letter_contact( - service=sample_service, - contact_block='((contact block))', - ) - template = create_template( - sample_service, - template_type=LETTER_TYPE, - subject="((letterSubject))", - content="((letter_content))", - reply_to=contact_block.id, - ) - auth_header = create_service_authorization_header(service_id=sample_service.id) - - version_path = '/version/{}'.format(version) if version else '' - - response = client.get(path='/v2/template/{}{}'.format(template.id, version_path), - headers=[('Content-Type', 'application/json'), auth_header]) - - json_response = json.loads(response.get_data(as_text=True)) - assert json_response['personalisation'] == { - "letterSubject": { - "required": True, - }, - "letter_content": { - "required": True, - }, - "contact block": { - "required": True, - }, - } - - def test_get_template_with_non_existent_template_id_returns_404(client, fake_uuid, sample_service): auth_header = create_service_authorization_header(service_id=sample_service.id) diff --git a/tests/app/v2/template/test_post_template.py b/tests/app/v2/template/test_post_template.py index 271ec9792..223b24dc6 100644 --- a/tests/app/v2/template/test_post_template.py +++ b/tests/app/v2/template/test_post_template.py @@ -1,7 +1,7 @@ import pytest from flask import json -from app.models import EMAIL_TYPE, LETTER_TYPE, TEMPLATE_TYPES +from app.models import EMAIL_TYPE, TEMPLATE_TYPES from tests import create_service_authorization_header from tests.app.db import create_template @@ -96,10 +96,8 @@ def test_valid_post_template_returns_200( assert resp_json['id'] == str(template.id) - if tmp_type in {EMAIL_TYPE, LETTER_TYPE}: - assert expected_subject in resp_json['subject'] - if tmp_type == EMAIL_TYPE: + assert expected_subject in resp_json['subject'] assert resp_json['html'] == expected_html else: assert resp_json['html'] is None @@ -107,15 +105,13 @@ def test_valid_post_template_returns_200( assert expected_content in resp_json['body'] -@pytest.mark.parametrize("template_type", (EMAIL_TYPE, LETTER_TYPE)) -def test_email_and_letter_templates_not_rendered_into_content( +def test_email_templates_not_rendered_into_content( client, - sample_service, - template_type, + sample_service ): template = create_template( sample_service, - template_type=template_type, + template_type=EMAIL_TYPE, subject='Test', content=( 'Hello\n' diff --git a/tests/app/v2/template/test_template_schemas.py b/tests/app/v2/template/test_template_schemas.py index f5b804cb6..36ac2926b 100644 --- a/tests/app/v2/template/test_template_schemas.py +++ b/tests/app/v2/template/test_template_schemas.py @@ -34,7 +34,6 @@ valid_json_get_response_with_optionals = { 'body': 'some body', 'subject': "some subject", 'name': 'some name', - 'postage': 'first', } valid_request_args = [{"id": str(uuid.uuid4()), "version": 1}, {"id": str(uuid.uuid4())}] @@ -80,7 +79,6 @@ valid_json_post_response_with_optionals = { 'version': 1, 'body': "some body", 'subject': 'some subject', - 'postage': 'second', 'html': '

some body

', } diff --git a/tests/app/v2/templates/test_get_templates.py b/tests/app/v2/templates/test_get_templates.py index 29eacc6dc..67e8d705c 100644 --- a/tests/app/v2/templates/test_get_templates.py +++ b/tests/app/v2/templates/test_get_templates.py @@ -112,7 +112,7 @@ def test_get_all_templates_for_invalid_type_returns_400(client, sample_service): 'status_code': 400, 'errors': [ { - 'message': 'type coconut is not one of [sms, email, letter]', + 'message': 'type coconut is not one of [sms, email]', 'error': 'ValidationError' } ] diff --git a/tests/app/v2/templates/test_templates_schemas.py b/tests/app/v2/templates/test_templates_schemas.py index edcf9d629..d5f49f881 100644 --- a/tests/app/v2/templates/test_templates_schemas.py +++ b/tests/app/v2/templates/test_templates_schemas.py @@ -241,7 +241,7 @@ def test_get_all_template_request_schema_against_invalid_args_is_invalid(templat assert errors['status_code'] == 400 assert len(errors['errors']) == 1 - assert errors['errors'][0]['message'] == 'type unknown is not one of [sms, email, letter]' + assert errors['errors'][0]['message'] == 'type unknown is not one of [sms, email]' @pytest.mark.parametrize("response", valid_json_get_all_response)