From 36da78d7b78ded2b4567f7fab4fb825b40315782 Mon Sep 17 00:00:00 2001 From: codingfabi Date: Mon, 25 Aug 2025 09:17:24 +0200 Subject: [PATCH 1/5] initial test setup --- Pipfile | 16 ++ Pipfile.lock | 316 ++++++++++++++++++++++++++++++++++- README.md | 73 +++++++- __init__.py | 1 - retraction_check/__init__.py | 4 + setup.cfg | 31 ++++ tests/__init__.py | 1 + tests/run_tests.py | 31 ++++ tests/test_check_bib.py | 251 ++++++++++++++++++++++++++++ 9 files changed, 716 insertions(+), 8 deletions(-) delete mode 100644 __init__.py create mode 100644 retraction_check/__init__.py create mode 100644 setup.cfg create mode 100644 tests/__init__.py create mode 100644 tests/run_tests.py create mode 100644 tests/test_check_bib.py diff --git a/Pipfile b/Pipfile index 978d86f..2487143 100644 --- a/Pipfile +++ b/Pipfile @@ -8,6 +8,22 @@ bibtexparser = "*" requests = "*" [dev-packages] +pytest = "*" +pytest-cov = "*" +pytest-mock = "*" +black = "*" +flake8 = "*" +mypy = "*" [requires] python_version = "3.12" + +[scripts] +test = "pytest tests/ -v" +test-cov = "pytest tests/ --cov=retraction_check --cov-report=html --cov-report=term" +test-watch = "pytest tests/ -v --tb=short -x" +lint = "flake8 retraction_check/ tests/" +format = "black retraction_check/ tests/" +format-check = "black --check retraction_check/ tests/" +type-check = "mypy retraction_check/" +check-all = "bash -c 'pipenv run format-check && pipenv run lint && pipenv run type-check && pipenv run test-cov'" diff --git a/Pipfile.lock b/Pipfile.lock index ee7400e..a229be9 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a16172284bd5c1da580e9ff63f6e5d7d20dfa50854155f56ef89d719b0c78e0c" + "sha256": "61bad98d9fc46c1d11b0fc39b603ca39300a9b840337f256a5e6d1e782aecd3a" }, "pipfile-spec": 6, "requires": { @@ -134,12 +134,12 @@ }, "requests": { "hashes": [ - "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", - "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422" + "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", + "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2.32.4" + "markers": "python_version >= '3.9'", + "version": "==2.32.5" }, "urllib3": { "hashes": [ @@ -150,5 +150,309 @@ "version": "==2.5.0" } }, - "develop": {} + "develop": { + "black": { + "hashes": [ + "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", + "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", + "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", + "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", + "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", + "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", + "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", + "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", + "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", + "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", + "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", + "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", + "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0", + "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", + "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", + "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", + "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355", + "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", + "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", + "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", + "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", + "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==25.1.0" + }, + "click": { + "hashes": [ + "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", + "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b" + ], + "markers": "python_version >= '3.10'", + "version": "==8.2.1" + }, + "coverage": { + "extras": [ + "toml" + ], + "hashes": [ + "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754", + "sha256:02650a11324b80057b8c9c29487020073d5e98a498f1857f37e3f9b6ea1b2426", + "sha256:03f47dc870eec0367fcdd603ca6a01517d2504e83dc18dbfafae37faec66129a", + "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", + "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", + "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", + "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", + "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", + "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", + "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f", + "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", + "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf", + "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", + "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", + "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f", + "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a", + "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", + "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", + "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", + "sha256:2d488d7d42b6ded7ea0704884f89dcabd2619505457de8fc9a6011c62106f6e5", + "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b", + "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1", + "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", + "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", + "sha256:3876385722e335d6e991c430302c24251ef9c2a9701b2b390f5473199b1b8ebf", + "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf", + "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", + "sha256:4028e7558e268dd8bcf4d9484aad393cafa654c24b4885f6f9474bf53183a82a", + "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8", + "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", + "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", + "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", + "sha256:5255b3bbcc1d32a4069d6403820ac8e6dbcc1d68cb28a60a1ebf17e47028e898", + "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", + "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", + "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", + "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78", + "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", + "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a", + "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", + "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", + "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab", + "sha256:62835c1b00c4a4ace24c1a88561a5a59b612fbb83a525d1c70ff5720c97c0610", + "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", + "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", + "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", + "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8", + "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", + "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9", + "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f", + "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33", + "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", + "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82", + "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a", + "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", + "sha256:8048ce4b149c93447a55d279078c8ae98b08a6951a3c4d2d7e87f4efc7bfe100", + "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879", + "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", + "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", + "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", + "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", + "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", + "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df", + "sha256:b3dcf2ead47fa8be14224ee817dfc1df98043af568fe120a22f81c0eb3c34ad2", + "sha256:b45264dd450a10f9e03237b41a9a24e85cbb1e278e5a32adb1a303f58f0017f3", + "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", + "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", + "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a", + "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2", + "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34", + "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9", + "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801", + "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", + "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", + "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", + "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c", + "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff", + "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6", + "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", + "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", + "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", + "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", + "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6", + "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c", + "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", + "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", + "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3", + "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50" + ], + "markers": "python_version >= '3.9'", + "version": "==7.10.5" + }, + "flake8": { + "hashes": [ + "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e", + "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==7.3.0" + }, + "iniconfig": { + "hashes": [ + "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", + "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" + ], + "markers": "python_version >= '3.8'", + "version": "==2.1.0" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "mypy": { + "hashes": [ + "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", + "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", + "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849", + "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733", + "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81", + "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", + "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6", + "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", + "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91", + "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972", + "sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8", + "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd", + "sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9", + "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0", + "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", + "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", + "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", + "sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99", + "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7", + "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", + "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7", + "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a", + "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed", + "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94", + "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", + "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", + "sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8", + "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", + "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a", + "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df", + "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", + "sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d", + "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390", + "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b", + "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", + "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14", + "sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259", + "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==1.17.1" + }, + "mypy-extensions": { + "hashes": [ + "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", + "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558" + ], + "markers": "python_version >= '3.8'", + "version": "==1.1.0" + }, + "packaging": { + "hashes": [ + "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", + "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f" + ], + "markers": "python_version >= '3.8'", + "version": "==25.0" + }, + "pathspec": { + "hashes": [ + "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", + "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" + ], + "markers": "python_version >= '3.8'", + "version": "==0.12.1" + }, + "platformdirs": { + "hashes": [ + "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", + "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4" + ], + "markers": "python_version >= '3.9'", + "version": "==4.3.8" + }, + "pluggy": { + "hashes": [ + "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", + "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746" + ], + "markers": "python_version >= '3.9'", + "version": "==1.6.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", + "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d" + ], + "markers": "python_version >= '3.9'", + "version": "==2.14.0" + }, + "pyflakes": { + "hashes": [ + "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58", + "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f" + ], + "markers": "python_version >= '3.9'", + "version": "==3.4.0" + }, + "pygments": { + "hashes": [ + "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", + "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b" + ], + "markers": "python_version >= '3.8'", + "version": "==2.19.2" + }, + "pytest": { + "hashes": [ + "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", + "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==8.4.1" + }, + "pytest-cov": { + "hashes": [ + "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", + "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==6.2.1" + }, + "pytest-mock": { + "hashes": [ + "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", + "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==3.14.1" + }, + "typing-extensions": { + "hashes": [ + "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", + "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76" + ], + "markers": "python_version >= '3.9'", + "version": "==4.14.1" + } + } } diff --git a/README.md b/README.md index 7f3d4fe..589eac7 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,73 @@ # retraction_check -A python package to check whether a paper in your .bib file has been listed on retraction watch + +A Python package to check whether papers in your .bib file or a specific bibtex entry have been listed on [Retraction Watch](https://retractionwatch.com/). + +## Features +- Parse .bib files and extract paper information +- Query the Retraction Watch dataset for retracted papers +- Support both exact DOI matching and fuzzy title matching +- Robust error handling for various edge cases + +## Installation + +```bash +# Install dependencies +pipenv install + +# Install development dependencies +pipenv install --dev +``` + +## Usage + +### Command line +```bash +python -m retraction_check.check_bib yourfile.bib +``` + +## Development + +### Running tests +```bash +# Run all tests +pipenv run test + +# Run tests with coverage +pipenv run test-cov + +# Run tests in watch mode +pipenv run test-watch + +# Run tests directly with Python +python tests/run_tests.py +``` + +### Code quality +```bash +# Format code +pipenv run format + +# Lint code +pipenv run lint + +# Type checking +pipenv run type-check + +# Run all checks +pipenv run check-all +``` + +### Available test commands +- `test` - Run all tests with verbose output +- `test-cov` - Run tests with coverage report (HTML and terminal) +- `test-watch` - Run tests in watch mode with short traceback +- `lint` - Run flake8 linting +- `format` - Format code with black +- `format-check` - Check if code is properly formatted +- `type-check` - Run mypy type checking +- `check-all` - Run all quality checks (format, lint, type-check, test-cov) + +## Requirements +- Python 3.8+ +- bibtexparser +- requests diff --git a/__init__.py b/__init__.py deleted file mode 100644 index 06f6a8a..0000000 --- a/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .check_bib import check_bib_file diff --git a/retraction_check/__init__.py b/retraction_check/__init__.py new file mode 100644 index 0000000..517d2fd --- /dev/null +++ b/retraction_check/__init__.py @@ -0,0 +1,4 @@ +from .check_bib import check_bib_file, check_entry, parse_bib_file, BibEntry, MATCH_TYPE + +__version__ = "0.1.0" +__all__ = ["check_bib_file", "check_entry", "parse_bib_file", "BibEntry", "MATCH_TYPE"] diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..5fdf1e5 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,31 @@ +[tool:pytest] +testpaths = tests +python_files = test_*.py +python_classes = Test* +python_functions = test_* +addopts = + --strict-markers + --strict-config + --verbose + -ra + --tb=short + +[coverage:run] +source = retraction_check +omit = + */tests/* + */venv/* + */__pycache__/* + +[coverage:report] +exclude_lines = + pragma: no cover + def __repr__ + if self.debug: + if settings.DEBUG + raise AssertionError + raise NotImplementedError + if 0: + if __name__ == .__main__.: + class .*\bProtocol\): + @(abc\.)?abstractmethod diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..b5bdf3b --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# Test package for retraction_check diff --git a/tests/run_tests.py b/tests/run_tests.py new file mode 100644 index 0000000..f629b83 --- /dev/null +++ b/tests/run_tests.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +""" +Simple test runner for the retraction_check package. +Run this script to execute all tests. +""" + +import sys +import os +import unittest + +# Add the package directory to the path +package_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.insert(0, package_dir) + +def run_tests(): + """Discover and run all tests in the tests directory""" + # Discover tests + loader = unittest.TestLoader() + tests_dir = os.path.dirname(os.path.abspath(__file__)) + suite = loader.discover(tests_dir, pattern='test_*.py') + + # Run tests + runner = unittest.TextTestRunner(verbosity=2) + result = runner.run(suite) + + # Return exit code based on test results + return 0 if result.wasSuccessful() else 1 + +if __name__ == '__main__': + exit_code = run_tests() + sys.exit(exit_code) diff --git a/tests/test_check_bib.py b/tests/test_check_bib.py new file mode 100644 index 0000000..1a35dcf --- /dev/null +++ b/tests/test_check_bib.py @@ -0,0 +1,251 @@ +import unittest +from unittest.mock import patch, mock_open, MagicMock +import sys +import os + +# Add the parent directory to the path so we can import the module +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +from retraction_check.check_bib import ( + parse_bib_file, + download_retraction_watch_csv, + build_retraction_lookup, + fuzzy_title_match, + is_retracted, + check_entry, + BibEntry +) + + +class TestParseBibFile(unittest.TestCase): + + @patch('builtins.open', new_callable=mock_open, read_data='@article{test,\ntitle={Test Title},\nauthor={Test Author}\n}') + @patch('bibtexparser.load') + def test_parse_valid_bib_file(self, mock_load, mock_file): + mock_db = MagicMock() + mock_db.entries = [{'title': 'Test Title', 'author': 'Test Author'}] + mock_load.return_value = mock_db + + result = parse_bib_file('test.bib') + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['title'], 'Test Title') + + @patch('builtins.open', side_effect=FileNotFoundError) + def test_parse_file_not_found(self, mock_file): + result = parse_bib_file('nonexistent.bib') + self.assertEqual(result, []) + + @patch('builtins.open', side_effect=UnicodeDecodeError('utf-8', b'', 0, 1, 'test')) + def test_parse_unicode_decode_error(self, mock_file): + result = parse_bib_file('invalid_encoding.bib') + self.assertEqual(result, []) + + @patch('builtins.open', new_callable=mock_open) + @patch('bibtexparser.load') + def test_parse_empty_bib_file(self, mock_load, mock_file): + mock_db = MagicMock() + mock_db.entries = [] + mock_load.return_value = mock_db + + result = parse_bib_file('empty.bib') + self.assertEqual(result, []) + + +class TestDownloadRetractionWatchCSV(unittest.TestCase): + + @patch('requests.get') + def test_download_success(self, mock_get): + mock_response = MagicMock() + mock_response.text = 'Title,OriginalPaperDOI\nTest Paper,10.1234/test' + mock_response.raise_for_status.return_value = None + mock_get.return_value = mock_response + + result = download_retraction_watch_csv() + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['Title'], 'Test Paper') + + @patch('requests.get') + def test_download_network_error(self, mock_get): + mock_get.side_effect = Exception('Network error') + + result = download_retraction_watch_csv() + self.assertEqual(result, []) + + @patch('requests.get') + def test_download_missing_columns(self, mock_get): + mock_response = MagicMock() + mock_response.text = 'WrongColumn\nTest Value' + mock_response.raise_for_status.return_value = None + mock_get.return_value = mock_response + + result = download_retraction_watch_csv() + self.assertEqual(result, []) + + +class TestBuildRetractionLookup(unittest.TestCase): + + def test_build_lookup_with_valid_data(self): + csv_rows = [ + {'Title': 'Test Paper 1', 'OriginalPaperDOI': '10.1234/test1'}, + {'Title': 'Test Paper 2', 'OriginalPaperDOI': '10.1234/test2'}, + {'Title': '', 'OriginalPaperDOI': ''} # Empty row + ] + + titles, dois = build_retraction_lookup(csv_rows) + + self.assertEqual(len(titles), 2) + self.assertEqual(len(dois), 2) + self.assertIn('Test Paper 1', titles) + self.assertIn('10.1234/test1', dois) + + def test_build_lookup_with_empty_data(self): + csv_rows = [] + + titles, dois = build_retraction_lookup(csv_rows) + + self.assertEqual(len(titles), 0) + self.assertEqual(len(dois), 0) + + +class TestFuzzyTitleMatch(unittest.TestCase): + + def test_fuzzy_match_exact(self): + titles = {'Test Paper Title'} + result = fuzzy_title_match('Test Paper Title', titles) + self.assertTrue(result) + + def test_fuzzy_match_similar(self): + titles = {'Test Paper Title'} + result = fuzzy_title_match('Test Paper Titl', titles) + self.assertTrue(result) + + def test_fuzzy_match_no_match(self): + titles = {'Machine Learning Applications in Healthcare'} + result = fuzzy_title_match('Quantum Computing Algorithms', titles) + self.assertFalse(result) + + def test_fuzzy_match_empty_title(self): + titles = {'Test Paper Title'} + result = fuzzy_title_match('', titles) + self.assertFalse(result) + + +class TestIsRetracted(unittest.TestCase): + + def setUp(self): + self.titles = {'Retracted Paper Title'} + self.dois = {'10.1234/retracted'} + + def test_doi_match(self): + entry: BibEntry = {'title': 'Some Title', 'doi': '10.1234/retracted'} + result = is_retracted(entry, self.titles, self.dois) + self.assertEqual(result, 'doi') + + def test_fuzzy_title_match(self): + entry: BibEntry = {'title': 'Retracted Paper Title', 'doi': ''} + result = is_retracted(entry, self.titles, self.dois) + self.assertEqual(result, 'fuzzy') + + def test_no_match(self): + entry: BibEntry = {'title': 'Completely Different Research Topic', 'doi': '10.1234/clean'} + result = is_retracted(entry, self.titles, self.dois) + self.assertIsNone(result) + + def test_invalid_entry(self): + # Test with a malformed entry that could cause exceptions + entry = None + result = is_retracted(entry, self.titles, self.dois) + self.assertIsNone(result) + + +class TestCheckEntry(unittest.TestCase): + + @patch('retraction_check.check_bib.download_retraction_watch_csv') + @patch('retraction_check.check_bib.build_retraction_lookup') + def test_check_entry_with_provided_lookup(self, mock_build, mock_download): + titles = {'Retracted Paper'} + dois = {'10.1234/retracted'} + entry: BibEntry = {'title': 'Retracted Paper', 'doi': ''} + + result = check_entry(entry, titles, dois) + + # Should not call download or build since lookup is provided + mock_download.assert_not_called() + mock_build.assert_not_called() + self.assertEqual(result, 'fuzzy') + + @patch('retraction_check.check_bib.download_retraction_watch_csv') + @patch('retraction_check.check_bib.build_retraction_lookup') + def test_check_entry_without_lookup(self, mock_build, mock_download): + mock_download.return_value = [] + mock_build.return_value = (set(), set()) + entry: BibEntry = {'title': 'Clean Paper', 'doi': ''} + + result = check_entry(entry) + + # Should call download and build since lookup is not provided + mock_download.assert_called_once() + mock_build.assert_called_once() + self.assertIsNone(result) + + +class TestCheckBibFileEndToEnd(unittest.TestCase): + """End-to-end tests for the complete workflow""" + + @patch('retraction_check.check_bib.download_retraction_watch_csv') + @patch('builtins.open', new_callable=mock_open, read_data='@article{test,\ntitle={Test Paper},\ndoi={10.1234/test}\n}') + @patch('bibtexparser.load') + def test_check_bib_file_no_retractions(self, mock_load, mock_file, mock_download): + # Mock bib file parsing + mock_db = MagicMock() + mock_db.entries = [{'title': 'Test Paper', 'doi': '10.1234/test'}] + mock_load.return_value = mock_db + + # Mock CSV download with no matching retractions + mock_download.return_value = [ + {'Title': 'Different Paper', 'OriginalPaperDOI': '10.1234/different'} + ] + + # This should run without errors + from retraction_check.check_bib import check_bib_file + try: + check_bib_file('test.bib') + except Exception as e: + self.fail(f"check_bib_file raised an exception: {e}") + + @patch('retraction_check.check_bib.download_retraction_watch_csv') + @patch('builtins.open', new_callable=mock_open, read_data='@article{test,\ntitle={Retracted Paper},\ndoi={10.1234/retracted}\n}') + @patch('bibtexparser.load') + def test_check_bib_file_with_retractions(self, mock_load, mock_file, mock_download): + # Mock bib file parsing + mock_db = MagicMock() + mock_db.entries = [{'title': 'Retracted Paper', 'doi': '10.1234/retracted'}] + mock_load.return_value = mock_db + + # Mock CSV download with matching retraction + mock_download.return_value = [ + {'Title': 'Retracted Paper', 'OriginalPaperDOI': '10.1234/retracted'} + ] + + # This should run without errors and find the retraction + from retraction_check.check_bib import check_bib_file + try: + check_bib_file('test.bib') + except Exception as e: + self.fail(f"check_bib_file raised an exception: {e}") + + @patch('retraction_check.check_bib.download_retraction_watch_csv') + def test_check_bib_file_nonexistent_file(self, mock_download): + # Mock CSV download + mock_download.return_value = [] + + # Test with a file that doesn't exist + from retraction_check.check_bib import check_bib_file + try: + check_bib_file('nonexistent_file.bib') + except Exception as e: + self.fail(f"check_bib_file should handle nonexistent files gracefully: {e}") + + +if __name__ == '__main__': + unittest.main() From fec5160a6844ea443e2dbb6024c45b344b5cbef6 Mon Sep 17 00:00:00 2001 From: codingfabi Date: Mon, 25 Aug 2025 09:38:10 +0200 Subject: [PATCH 2/5] add publishing and pipeline steps --- .github/workflows/build.yml | 95 +++++++++++++++++++++++++ .github/workflows/code-quality.yml | 45 ++++++++++++ .github/workflows/test.yml | 51 ++++++++++++++ Pipfile | 2 +- README.md | 15 +++- pyproject.toml | 108 +++++++++++++++++++++++++++++ retraction_check/check_bib.py | 7 ++ 7 files changed, 321 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/build.yml create mode 100644 .github/workflows/code-quality.yml create mode 100644 .github/workflows/test.yml create mode 100644 pyproject.toml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..304805d --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,95 @@ +name: Build and Package + +on: + push: + branches: [ main ] + tags: [ 'v*' ] + pull_request: + branches: [ main ] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build twine + + - name: Install pipenv + run: | + pip install pipenv + + - name: Install dependencies + run: | + pipenv install --dev --deploy + + - name: Run tests + run: | + pipenv run test + + - name: Build package + run: | + python -m build + + - name: Check package + run: | + twine check dist/* + + - name: Upload build artifacts + uses: actions/upload-artifact@v3 + with: + name: python-package-distributions + path: dist/ + + publish-to-testpypi: + name: Publish to TestPyPI + if: startsWith(github.ref, 'refs/tags/v') + needs: build + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/p/retraction-check + permissions: + id-token: write + + steps: + - name: Download build artifacts + uses: actions/download-artifact@v3 + with: + name: python-package-distributions + path: dist/ + + - name: Publish to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + + publish-to-pypi: + name: Publish to PyPI + if: startsWith(github.ref, 'refs/tags/v') + needs: build + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/retraction-check + permissions: + id-token: write + + steps: + - name: Download build artifacts + uses: actions/download-artifact@v3 + with: + name: python-package-distributions + path: dist/ + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml new file mode 100644 index 0000000..249b6b5 --- /dev/null +++ b/.github/workflows/code-quality.yml @@ -0,0 +1,45 @@ +name: Code Quality + +on: + push: + branches: [ main, develop ] + pull_request: + branches: [ main, develop ] + +jobs: + code-quality: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install pipenv + run: | + python -m pip install --upgrade pip + pip install pipenv + + - name: Install dependencies + run: | + pipenv install --dev --deploy + + - name: Check code formatting + run: | + pipenv run format-check + + - name: Run linting + run: | + pipenv run lint + + - name: Run type checking + run: | + pipenv run type-check + + - name: Run security check (if bandit is available) + run: | + pipenv run python -m pip list | grep bandit || echo "Bandit not installed, skipping security check" + continue-on-error: true diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..0e276f0 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,51 @@ +name: Tests + +on: + push: + branches: [ main, develop ] + pull_request: + branches: [ main, develop ] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.8, 3.9, '3.10', '3.11', '3.12'] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install pipenv + run: | + python -m pip install --upgrade pip + pip install pipenv + + - name: Install dependencies + run: | + pipenv install --dev --deploy + + - name: Run linting + run: | + pipenv run lint + + - name: Run type checking + run: | + pipenv run type-check + + - name: Run tests with coverage + run: | + pipenv run test-cov + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + file: ./coverage.xml + flags: unittests + name: codecov-umbrella + fail_ci_if_error: false diff --git a/Pipfile b/Pipfile index 2487143..cbdf0fd 100644 --- a/Pipfile +++ b/Pipfile @@ -20,7 +20,7 @@ python_version = "3.12" [scripts] test = "pytest tests/ -v" -test-cov = "pytest tests/ --cov=retraction_check --cov-report=html --cov-report=term" +test-cov = "pytest tests/ --cov=retraction_check --cov-report=html --cov-report=term --cov-report=xml" test-watch = "pytest tests/ -v --tb=short -x" lint = "flake8 retraction_check/ tests/" format = "black retraction_check/ tests/" diff --git a/README.md b/README.md index 589eac7..e1d710e 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,9 @@ # retraction_check +[![Tests](https://github.com/codingfabi/retraction_check/workflows/Tests/badge.svg)](https://github.com/codingfabi/retraction_check/actions/workflows/test.yml) +[![Code Quality](https://github.com/codingfabi/retraction_check/workflows/Code%20Quality/badge.svg)](https://github.com/codingfabi/retraction_check/actions/workflows/code-quality.yml) +[![Build](https://github.com/codingfabi/retraction_check/workflows/Build%20and%20Package/badge.svg)](https://github.com/codingfabi/retraction_check/actions/workflows/build.yml) + A Python package to check whether papers in your .bib file or a specific bibtex entry have been listed on [Retraction Watch](https://retractionwatch.com/). ## Features @@ -11,7 +15,12 @@ A Python package to check whether papers in your .bib file or a specific bibtex ## Installation ```bash -# Install dependencies +# Install from PyPI (when published) +pip install retraction-check + +# Or install from source +git clone https://github.com/codingfabi/retraction_check.git +cd retraction_check pipenv install # Install development dependencies @@ -22,6 +31,10 @@ pipenv install --dev ### Command line ```bash +# Using the installed command +retraction-check yourfile.bib + +# Or using the module python -m retraction_check.check_bib yourfile.bib ``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..c2efd2a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,108 @@ +[build-system] +requires = ["setuptools>=61.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "retraction-check" +version = "0.1.0" +authors = [ + {name = "Fabian Kneissl", email = "fknssl@gmail.com"}, +] +description = "A Python package to check whether papers in your .bib file have been listed on Retraction Watch" +readme = "README.md" +license = {text = "MIT"} +requires-python = ">=3.8" +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Scientific/Engineering", + "Topic :: Text Processing", +] +keywords = ["bibtex", "retraction", "research", "bibliography"] +dependencies = [ + "bibtexparser", + "requests", +] + +[project.optional-dependencies] +dev = [ + "pytest", + "pytest-cov", + "pytest-mock", + "black", + "flake8", + "mypy", +] + +[project.urls] +Homepage = "https://github.com/codingfabi/retraction_check" +Repository = "https://github.com/codingfabi/retraction_check" +"Bug Tracker" = "https://github.com/codingfabi/retraction_check/issues" + +[project.scripts] +retraction-check = "retraction_check.check_bib:main" + +[tool.setuptools.packages.find] +where = ["."] +include = ["retraction_check*"] + +[tool.black] +line-length = 88 +target-version = ['py38'] +include = '\.pyi?$' +extend-exclude = ''' +/( + # directories + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | build + | dist +)/ +''' + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +addopts = [ + "--strict-markers", + "--strict-config", + "--verbose", + "-ra", + "--tb=short", +] + +[tool.coverage.run] +source = ["retraction_check"] +omit = [ + "*/tests/*", + "*/venv/*", + "*/__pycache__/*", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + "if __name__ == .__main__.:", + "class .*\\bProtocol\\):", + "@(abc\\.)?abstractmethod", +] diff --git a/retraction_check/check_bib.py b/retraction_check/check_bib.py index a48e6b3..8fa0572 100644 --- a/retraction_check/check_bib.py +++ b/retraction_check/check_bib.py @@ -117,6 +117,13 @@ def check_bib_file(bib_path: str): if not matches['doi'] and not matches['fuzzy']: print("No retracted papers found.") +def main(): + """CLI entry point for the retraction-check command.""" + if len(sys.argv) < 2: + print("Usage: retraction-check yourfile.bib") + sys.exit(1) + check_bib_file(sys.argv[1]) + if __name__ == "__main__": if len(sys.argv) < 2: print("Usage: python -m retraction_check.check_bib yourfile.bib") From a24ec8a303dd69b459781b4282e581f9f7322ce8 Mon Sep 17 00:00:00 2001 From: codingfabi Date: Mon, 25 Aug 2025 09:39:44 +0200 Subject: [PATCH 3/5] add latest version of upload and download artifact --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 304805d..35630c0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -45,7 +45,7 @@ jobs: twine check dist/* - name: Upload build artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: python-package-distributions path: dist/ @@ -63,7 +63,7 @@ jobs: steps: - name: Download build artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: python-package-distributions path: dist/ @@ -86,7 +86,7 @@ jobs: steps: - name: Download build artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: python-package-distributions path: dist/ From fb1b7ba628f9a96b5a8e3c4e7095614ec46f8c32 Mon Sep 17 00:00:00 2001 From: codingfabi Date: Mon, 25 Aug 2025 09:53:59 +0200 Subject: [PATCH 4/5] change workflows --- .github/workflows/build.yml | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 35630c0..52a2a59 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,8 +4,18 @@ on: push: branches: [ main ] tags: [ 'v*' ] - pull_request: - branches: [ main ] + workflow_dispatch: + inputs: + publish_to_testpypi: + description: 'Publish to TestPyPI' + required: false + default: false + type: boolean + publish_to_pypi: + description: 'Publish to PyPI' + required: false + default: false + type: boolean jobs: build: @@ -52,7 +62,7 @@ jobs: publish-to-testpypi: name: Publish to TestPyPI - if: startsWith(github.ref, 'refs/tags/v') + if: startsWith(github.ref, 'refs/tags/v') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish_to_testpypi == 'true') needs: build runs-on: ubuntu-latest environment: @@ -75,7 +85,7 @@ jobs: publish-to-pypi: name: Publish to PyPI - if: startsWith(github.ref, 'refs/tags/v') + if: startsWith(github.ref, 'refs/tags/v') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish_to_pypi == 'true') needs: build runs-on: ubuntu-latest environment: From 3e471cca96ebbaeb83d8fa4c151269e898b9545f Mon Sep 17 00:00:00 2001 From: codingfabi Date: Mon, 25 Aug 2025 10:26:31 +0200 Subject: [PATCH 5/5] fix pipelines --- Pipfile | 3 +- Pipfile.lock | 19 ++- pyproject.toml | 7 ++ retraction_check/check_bib.py | 90 ++++++++----- setup.cfg | 13 ++ tests/run_tests.py | 10 +- tests/test_check_bib.py | 231 ++++++++++++++++++---------------- 7 files changed, 233 insertions(+), 140 deletions(-) diff --git a/Pipfile b/Pipfile index cbdf0fd..965f28d 100644 --- a/Pipfile +++ b/Pipfile @@ -14,6 +14,7 @@ pytest-mock = "*" black = "*" flake8 = "*" mypy = "*" +types-requests = "*" [requires] python_version = "3.12" @@ -22,7 +23,7 @@ python_version = "3.12" test = "pytest tests/ -v" test-cov = "pytest tests/ --cov=retraction_check --cov-report=html --cov-report=term --cov-report=xml" test-watch = "pytest tests/ -v --tb=short -x" -lint = "flake8 retraction_check/ tests/" +lint = "flake8 retraction_check/ tests/ --max-line-length=88 --extend-ignore=E203,W503" format = "black retraction_check/ tests/" format-check = "black --check retraction_check/ tests/" type-check = "mypy retraction_check/" diff --git a/Pipfile.lock b/Pipfile.lock index a229be9..222b32a 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "61bad98d9fc46c1d11b0fc39b603ca39300a9b840337f256a5e6d1e782aecd3a" + "sha256": "fbf1a270936ba5619579756b3d4a7fd40703ccb08547464334905435f5f6583d" }, "pipfile-spec": 6, "requires": { @@ -446,6 +446,15 @@ "markers": "python_version >= '3.8'", "version": "==3.14.1" }, + "types-requests": { + "hashes": [ + "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", + "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163" + ], + "index": "pypi", + "markers": "python_version >= '3.9'", + "version": "==2.32.4.20250809" + }, "typing-extensions": { "hashes": [ "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", @@ -453,6 +462,14 @@ ], "markers": "python_version >= '3.9'", "version": "==4.14.1" + }, + "urllib3": { + "hashes": [ + "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", + "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc" + ], + "markers": "python_version >= '3.9'", + "version": "==2.5.0" } } } diff --git a/pyproject.toml b/pyproject.toml index c2efd2a..5ab1e67 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,3 +106,10 @@ exclude_lines = [ "class .*\\bProtocol\\):", "@(abc\\.)?abstractmethod", ] + +[tool.mypy] +python_version = "3.9" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true +ignore_missing_imports = true diff --git a/retraction_check/check_bib.py b/retraction_check/check_bib.py index 8fa0572..3f0e93a 100644 --- a/retraction_check/check_bib.py +++ b/retraction_check/check_bib.py @@ -6,8 +6,11 @@ import difflib from typing import List, Literal, TypedDict, Set, Optional -RETRACTION_WATCH_CSV = "https://gitlab.com/crossref/retraction-watch-data/-/raw/main/retraction_watch.csv" -MATCH_TYPE = Literal['doi', 'fuzzy'] +RETRACTION_WATCH_CSV = ( + "https://gitlab.com/crossref/retraction-watch-data/-/raw/main/retraction_watch.csv" +) +MATCH_TYPE = Literal["doi", "fuzzy"] + class BibEntry(TypedDict, total=False): title: str @@ -16,14 +19,18 @@ class BibEntry(TypedDict, total=False): year: str doi: str + def parse_bib_file(bib_path: str) -> List[BibEntry]: try: - with open(bib_path, 'r', encoding='utf-8') as bibtex_file: + with open(bib_path, "r", encoding="utf-8") as bibtex_file: bib_database = bibtexparser.load(bibtex_file) if not bib_database.entries: - print(f"Error: The .bib file '{bib_path}' is empty or contains no valid entries.") + print( + f"Error: The .bib file '{bib_path}' is empty or contains no " + f"valid entries." + ) return [] - return bib_database.entries # type: ignore[return-value] + return bib_database.entries # type: ignore[no-any-return] except FileNotFoundError: print(f"Error: The .bib file '{bib_path}' was not found.") return [] @@ -34,6 +41,7 @@ def parse_bib_file(bib_path: str) -> List[BibEntry]: print(f"Error parsing .bib file '{bib_path}': {e}") return [] + def download_retraction_watch_csv() -> list: try: response = requests.get(RETRACTION_WATCH_CSV) @@ -42,12 +50,23 @@ def download_retraction_watch_csv() -> list: reader = csv.DictReader(csvfile) rows = list(reader) # Check for required columns - if not rows or 'Title' not in reader.fieldnames or 'OriginalPaperDOI' not in reader.fieldnames: - print("Error: Retraction Watch CSV is missing required columns or is corrupted.") + fieldnames = reader.fieldnames or [] + if ( + not rows + or "Title" not in fieldnames + or "OriginalPaperDOI" not in fieldnames + ): + print( + "Error: Retraction Watch CSV is missing required columns or " + "is corrupted." + ) return [] return rows except requests.RequestException as e: - print(f"Error: Could not fetch Retraction Watch CSV file. Connectivity issue or URL unreachable. Details: {e}") + print( + f"Error: Could not fetch Retraction Watch CSV file. " + f"Connectivity issue or URL unreachable. Details: {e}" + ) return [] except UnicodeDecodeError: print("Error: The Retraction Watch CSV file could not be decoded with utf-8.") @@ -56,37 +75,49 @@ def download_retraction_watch_csv() -> list: print(f"Error reading Retraction Watch CSV: {e}") return [] -def build_retraction_lookup(csv_rows): + +def build_retraction_lookup( + csv_rows: list[dict[str, str]], +) -> tuple[set[str], set[str]]: titles = set() dois = set() for row in csv_rows: - title = row.get('Title', '').strip() + title = row.get("Title", "").strip() if title: titles.add(title) - if row.get('OriginalPaperDOI'): - dois.add(row['OriginalPaperDOI'].strip()) + if row.get("OriginalPaperDOI"): + dois.add(row["OriginalPaperDOI"].strip()) return titles, dois -def fuzzy_title_match(title, titles): + +def fuzzy_title_match(title: str, titles: set[str]) -> bool: if not title: return False matches = difflib.get_close_matches(title.strip(), titles, n=1) return bool(matches) -def is_retracted(entry: BibEntry, titles: Set[str], dois: Set[str]) -> Optional[MATCH_TYPE]: + +def is_retracted( + entry: BibEntry, titles: Set[str], dois: Set[str] +) -> Optional[MATCH_TYPE]: try: - title = entry.get('title', '').strip() - doi = entry.get('doi', '').strip() + title = entry.get("title", "").strip() + doi = entry.get("doi", "").strip() except Exception as e: print(f"Invalid entry encountered: {entry}. Error: {e}") return None if doi and doi in dois: - return 'doi' + return "doi" if fuzzy_title_match(title, titles): - return 'fuzzy' + return "fuzzy" return None -def check_entry(entry, titles=None, dois=None): + +def check_entry( + entry: BibEntry, + titles: Optional[set[str]] = None, + dois: Optional[set[str]] = None, +) -> Optional[MATCH_TYPE]: """ Standalone function to check a single bibtex entry dict for retraction status. Downloads and builds lookup if titles/dois are not provided. @@ -97,33 +128,36 @@ def check_entry(entry, titles=None, dois=None): titles, dois = build_retraction_lookup(csv_rows) return is_retracted(entry, titles, dois) -def check_bib_file(bib_path: str): + +def check_bib_file(bib_path: str) -> None: entries = parse_bib_file(bib_path) csv_rows = download_retraction_watch_csv() titles, dois = build_retraction_lookup(csv_rows) - matches = {'doi': [], 'fuzzy': []} + matches: dict[str, list[str]] = {"doi": [], "fuzzy": []} for entry in entries: match_type = is_retracted(entry, titles, dois) if match_type: - matches[match_type].append(entry.get('title', 'Unknown Title')) - if matches['doi']: + matches[match_type].append(entry.get("title", "Unknown Title")) + if matches["doi"]: print("Retracted papers found (DOI match):") - for t in matches['doi']: + for t in matches["doi"]: print(f"- {t}") - if matches['fuzzy']: + if matches["fuzzy"]: print("\nRetracted papers found (fuzzy title match):") - for t in matches['fuzzy']: + for t in matches["fuzzy"]: print(f"- {t}") - if not matches['doi'] and not matches['fuzzy']: + if not matches["doi"] and not matches["fuzzy"]: print("No retracted papers found.") -def main(): + +def main() -> None: """CLI entry point for the retraction-check command.""" if len(sys.argv) < 2: print("Usage: retraction-check yourfile.bib") sys.exit(1) check_bib_file(sys.argv[1]) + if __name__ == "__main__": if len(sys.argv) < 2: print("Usage: python -m retraction_check.check_bib yourfile.bib") diff --git a/setup.cfg b/setup.cfg index 5fdf1e5..11d2969 100644 --- a/setup.cfg +++ b/setup.cfg @@ -29,3 +29,16 @@ exclude_lines = if __name__ == .__main__.: class .*\bProtocol\): @(abc\.)?abstractmethod + +[flake8] +max-line-length = 88 +extend-ignore = E203, W503 +exclude = + .git, + __pycache__, + .pytest_cache, + .venv, + venv, + build, + dist, + *.egg-info diff --git a/tests/run_tests.py b/tests/run_tests.py index f629b83..bb09eb1 100644 --- a/tests/run_tests.py +++ b/tests/run_tests.py @@ -12,20 +12,22 @@ package_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, package_dir) + def run_tests(): """Discover and run all tests in the tests directory""" # Discover tests loader = unittest.TestLoader() tests_dir = os.path.dirname(os.path.abspath(__file__)) - suite = loader.discover(tests_dir, pattern='test_*.py') - + suite = loader.discover(tests_dir, pattern="test_*.py") + # Run tests runner = unittest.TextTestRunner(verbosity=2) result = runner.run(suite) - + # Return exit code based on test results return 0 if result.wasSuccessful() else 1 -if __name__ == '__main__': + +if __name__ == "__main__": exit_code = run_tests() sys.exit(exit_code) diff --git a/tests/test_check_bib.py b/tests/test_check_bib.py index 1a35dcf..2f98d80 100644 --- a/tests/test_check_bib.py +++ b/tests/test_check_bib.py @@ -6,151 +6,158 @@ # Add the parent directory to the path so we can import the module sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -from retraction_check.check_bib import ( +from retraction_check.check_bib import ( # noqa: E402 parse_bib_file, download_retraction_watch_csv, build_retraction_lookup, fuzzy_title_match, is_retracted, check_entry, - BibEntry + BibEntry, ) class TestParseBibFile(unittest.TestCase): - - @patch('builtins.open', new_callable=mock_open, read_data='@article{test,\ntitle={Test Title},\nauthor={Test Author}\n}') - @patch('bibtexparser.load') + + @patch( + "builtins.open", + new_callable=mock_open, + read_data="@article{test,\ntitle={Test Title},\nauthor={Test Author}\n}", + ) + @patch("bibtexparser.load") def test_parse_valid_bib_file(self, mock_load, mock_file): mock_db = MagicMock() - mock_db.entries = [{'title': 'Test Title', 'author': 'Test Author'}] + mock_db.entries = [{"title": "Test Title", "author": "Test Author"}] mock_load.return_value = mock_db - - result = parse_bib_file('test.bib') + + result = parse_bib_file("test.bib") self.assertEqual(len(result), 1) - self.assertEqual(result[0]['title'], 'Test Title') - - @patch('builtins.open', side_effect=FileNotFoundError) + self.assertEqual(result[0]["title"], "Test Title") + + @patch("builtins.open", side_effect=FileNotFoundError) def test_parse_file_not_found(self, mock_file): - result = parse_bib_file('nonexistent.bib') + result = parse_bib_file("nonexistent.bib") self.assertEqual(result, []) - - @patch('builtins.open', side_effect=UnicodeDecodeError('utf-8', b'', 0, 1, 'test')) + + @patch("builtins.open", side_effect=UnicodeDecodeError("utf-8", b"", 0, 1, "test")) def test_parse_unicode_decode_error(self, mock_file): - result = parse_bib_file('invalid_encoding.bib') + result = parse_bib_file("invalid_encoding.bib") self.assertEqual(result, []) - - @patch('builtins.open', new_callable=mock_open) - @patch('bibtexparser.load') + + @patch("builtins.open", new_callable=mock_open) + @patch("bibtexparser.load") def test_parse_empty_bib_file(self, mock_load, mock_file): mock_db = MagicMock() mock_db.entries = [] mock_load.return_value = mock_db - - result = parse_bib_file('empty.bib') + + result = parse_bib_file("empty.bib") self.assertEqual(result, []) class TestDownloadRetractionWatchCSV(unittest.TestCase): - - @patch('requests.get') + + @patch("requests.get") def test_download_success(self, mock_get): mock_response = MagicMock() - mock_response.text = 'Title,OriginalPaperDOI\nTest Paper,10.1234/test' + mock_response.text = "Title,OriginalPaperDOI\nTest Paper,10.1234/test" mock_response.raise_for_status.return_value = None mock_get.return_value = mock_response - + result = download_retraction_watch_csv() self.assertEqual(len(result), 1) - self.assertEqual(result[0]['Title'], 'Test Paper') - - @patch('requests.get') + self.assertEqual(result[0]["Title"], "Test Paper") + + @patch("requests.get") def test_download_network_error(self, mock_get): - mock_get.side_effect = Exception('Network error') - + mock_get.side_effect = Exception("Network error") + result = download_retraction_watch_csv() self.assertEqual(result, []) - - @patch('requests.get') + + @patch("requests.get") def test_download_missing_columns(self, mock_get): mock_response = MagicMock() - mock_response.text = 'WrongColumn\nTest Value' + mock_response.text = "WrongColumn\nTest Value" mock_response.raise_for_status.return_value = None mock_get.return_value = mock_response - + result = download_retraction_watch_csv() self.assertEqual(result, []) class TestBuildRetractionLookup(unittest.TestCase): - + def test_build_lookup_with_valid_data(self): csv_rows = [ - {'Title': 'Test Paper 1', 'OriginalPaperDOI': '10.1234/test1'}, - {'Title': 'Test Paper 2', 'OriginalPaperDOI': '10.1234/test2'}, - {'Title': '', 'OriginalPaperDOI': ''} # Empty row + {"Title": "Test Paper 1", "OriginalPaperDOI": "10.1234/test1"}, + {"Title": "Test Paper 2", "OriginalPaperDOI": "10.1234/test2"}, + {"Title": "", "OriginalPaperDOI": ""}, # Empty row ] - + titles, dois = build_retraction_lookup(csv_rows) - + self.assertEqual(len(titles), 2) self.assertEqual(len(dois), 2) - self.assertIn('Test Paper 1', titles) - self.assertIn('10.1234/test1', dois) - + self.assertIn("Test Paper 1", titles) + self.assertIn("10.1234/test1", dois) + def test_build_lookup_with_empty_data(self): csv_rows = [] - + titles, dois = build_retraction_lookup(csv_rows) - + self.assertEqual(len(titles), 0) self.assertEqual(len(dois), 0) class TestFuzzyTitleMatch(unittest.TestCase): - + def test_fuzzy_match_exact(self): - titles = {'Test Paper Title'} - result = fuzzy_title_match('Test Paper Title', titles) + titles = {"Test Paper Title"} + result = fuzzy_title_match("Test Paper Title", titles) self.assertTrue(result) - + def test_fuzzy_match_similar(self): - titles = {'Test Paper Title'} - result = fuzzy_title_match('Test Paper Titl', titles) + titles = {"Test Paper Title"} + result = fuzzy_title_match("Test Paper Titl", titles) self.assertTrue(result) - + def test_fuzzy_match_no_match(self): - titles = {'Machine Learning Applications in Healthcare'} - result = fuzzy_title_match('Quantum Computing Algorithms', titles) + titles = {"Machine Learning Applications in Healthcare"} + result = fuzzy_title_match("Quantum Computing Algorithms", titles) self.assertFalse(result) - + def test_fuzzy_match_empty_title(self): - titles = {'Test Paper Title'} - result = fuzzy_title_match('', titles) + titles = {"Test Paper Title"} + result = fuzzy_title_match("", titles) self.assertFalse(result) class TestIsRetracted(unittest.TestCase): - + def setUp(self): - self.titles = {'Retracted Paper Title'} - self.dois = {'10.1234/retracted'} - + self.titles = {"Retracted Paper Title"} + self.dois = {"10.1234/retracted"} + def test_doi_match(self): - entry: BibEntry = {'title': 'Some Title', 'doi': '10.1234/retracted'} + entry: BibEntry = {"title": "Some Title", "doi": "10.1234/retracted"} result = is_retracted(entry, self.titles, self.dois) - self.assertEqual(result, 'doi') - + self.assertEqual(result, "doi") + def test_fuzzy_title_match(self): - entry: BibEntry = {'title': 'Retracted Paper Title', 'doi': ''} + entry: BibEntry = {"title": "Retracted Paper Title", "doi": ""} result = is_retracted(entry, self.titles, self.dois) - self.assertEqual(result, 'fuzzy') - + self.assertEqual(result, "fuzzy") + def test_no_match(self): - entry: BibEntry = {'title': 'Completely Different Research Topic', 'doi': '10.1234/clean'} + entry: BibEntry = { + "title": "Completely Different Research Topic", + "doi": "10.1234/clean", + } result = is_retracted(entry, self.titles, self.dois) self.assertIsNone(result) - + def test_invalid_entry(self): # Test with a malformed entry that could cause exceptions entry = None @@ -159,30 +166,30 @@ def test_invalid_entry(self): class TestCheckEntry(unittest.TestCase): - - @patch('retraction_check.check_bib.download_retraction_watch_csv') - @patch('retraction_check.check_bib.build_retraction_lookup') + + @patch("retraction_check.check_bib.download_retraction_watch_csv") + @patch("retraction_check.check_bib.build_retraction_lookup") def test_check_entry_with_provided_lookup(self, mock_build, mock_download): - titles = {'Retracted Paper'} - dois = {'10.1234/retracted'} - entry: BibEntry = {'title': 'Retracted Paper', 'doi': ''} - + titles = {"Retracted Paper"} + dois = {"10.1234/retracted"} + entry: BibEntry = {"title": "Retracted Paper", "doi": ""} + result = check_entry(entry, titles, dois) - + # Should not call download or build since lookup is provided mock_download.assert_not_called() mock_build.assert_not_called() - self.assertEqual(result, 'fuzzy') - - @patch('retraction_check.check_bib.download_retraction_watch_csv') - @patch('retraction_check.check_bib.build_retraction_lookup') + self.assertEqual(result, "fuzzy") + + @patch("retraction_check.check_bib.download_retraction_watch_csv") + @patch("retraction_check.check_bib.build_retraction_lookup") def test_check_entry_without_lookup(self, mock_build, mock_download): mock_download.return_value = [] mock_build.return_value = (set(), set()) - entry: BibEntry = {'title': 'Clean Paper', 'doi': ''} - + entry: BibEntry = {"title": "Clean Paper", "doi": ""} + result = check_entry(entry) - + # Should call download and build since lookup is not provided mock_download.assert_called_once() mock_build.assert_called_once() @@ -191,61 +198,73 @@ def test_check_entry_without_lookup(self, mock_build, mock_download): class TestCheckBibFileEndToEnd(unittest.TestCase): """End-to-end tests for the complete workflow""" - - @patch('retraction_check.check_bib.download_retraction_watch_csv') - @patch('builtins.open', new_callable=mock_open, read_data='@article{test,\ntitle={Test Paper},\ndoi={10.1234/test}\n}') - @patch('bibtexparser.load') + + @patch("retraction_check.check_bib.download_retraction_watch_csv") + @patch( + "builtins.open", + new_callable=mock_open, + read_data="@article{test,\ntitle={Test Paper},\ndoi={10.1234/test}\n}", + ) + @patch("bibtexparser.load") def test_check_bib_file_no_retractions(self, mock_load, mock_file, mock_download): # Mock bib file parsing mock_db = MagicMock() - mock_db.entries = [{'title': 'Test Paper', 'doi': '10.1234/test'}] + mock_db.entries = [{"title": "Test Paper", "doi": "10.1234/test"}] mock_load.return_value = mock_db - + # Mock CSV download with no matching retractions mock_download.return_value = [ - {'Title': 'Different Paper', 'OriginalPaperDOI': '10.1234/different'} + {"Title": "Different Paper", "OriginalPaperDOI": "10.1234/different"} ] - + # This should run without errors from retraction_check.check_bib import check_bib_file + try: - check_bib_file('test.bib') + check_bib_file("test.bib") except Exception as e: self.fail(f"check_bib_file raised an exception: {e}") - - @patch('retraction_check.check_bib.download_retraction_watch_csv') - @patch('builtins.open', new_callable=mock_open, read_data='@article{test,\ntitle={Retracted Paper},\ndoi={10.1234/retracted}\n}') - @patch('bibtexparser.load') + + @patch("retraction_check.check_bib.download_retraction_watch_csv") + @patch( + "builtins.open", + new_callable=mock_open, + read_data="@article{test,\ntitle={Retracted Paper}," + "\ndoi={10.1234/retracted}\n}", + ) + @patch("bibtexparser.load") def test_check_bib_file_with_retractions(self, mock_load, mock_file, mock_download): # Mock bib file parsing mock_db = MagicMock() - mock_db.entries = [{'title': 'Retracted Paper', 'doi': '10.1234/retracted'}] + mock_db.entries = [{"title": "Retracted Paper", "doi": "10.1234/retracted"}] mock_load.return_value = mock_db - + # Mock CSV download with matching retraction mock_download.return_value = [ - {'Title': 'Retracted Paper', 'OriginalPaperDOI': '10.1234/retracted'} + {"Title": "Retracted Paper", "OriginalPaperDOI": "10.1234/retracted"} ] - + # This should run without errors and find the retraction from retraction_check.check_bib import check_bib_file + try: - check_bib_file('test.bib') + check_bib_file("test.bib") except Exception as e: self.fail(f"check_bib_file raised an exception: {e}") - - @patch('retraction_check.check_bib.download_retraction_watch_csv') + + @patch("retraction_check.check_bib.download_retraction_watch_csv") def test_check_bib_file_nonexistent_file(self, mock_download): # Mock CSV download mock_download.return_value = [] - + # Test with a file that doesn't exist from retraction_check.check_bib import check_bib_file + try: - check_bib_file('nonexistent_file.bib') + check_bib_file("nonexistent_file.bib") except Exception as e: self.fail(f"check_bib_file should handle nonexistent files gracefully: {e}") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main()