From f11a2c62840875e1c30a2c80a6c8f14de9483833 Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Thu, 20 Nov 2025 18:28:51 +0900 Subject: [PATCH 1/9] =?UTF-8?q?56380=E4=BF=AE=E6=AD=A3=E5=88=86=E3=82=92?= =?UTF-8?q?=E5=AF=BE=E5=BF=9C/=E5=8D=98=E4=BD=93=E3=83=86=E3=82=B9?= =?UTF-8?q?=E3=83=88=E3=82=B3=E3=83=BC=E3=83=89=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/invenio-indexer/tests/test_api.py | 133 ++-- modules/weko-authors/tests/test_utils.py | 367 +--------- modules/weko-authors/weko_authors/utils.py | 45 -- modules/weko-deposit/tests/test_api.py | 35 +- modules/weko-deposit/tests/test_tasks.py | 49 +- modules/weko-deposit/weko_deposit/api.py | 21 +- modules/weko-deposit/weko_deposit/tasks.py | 54 +- .../weko-items-ui/tests/data/temp_data.json | 663 +++++++++++++++++- modules/weko-items-ui/tests/test_api.py | 1 - modules/weko-items-ui/tests/test_utils.py | 80 --- modules/weko-items-ui/weko_items_ui/api.py | 4 - modules/weko-items-ui/weko_items_ui/utils.py | 50 -- modules/weko-items-ui/weko_items_ui/views.py | 6 +- modules/weko-records/tests/conftest.py | 52 +- modules/weko-records/tests/test_utils.py | 79 +-- modules/weko-records/weko_records/utils.py | 22 +- scripts/demo/update_W2025-29.py | 3 - tools/update_weko_links.py | 582 --------------- 18 files changed, 888 insertions(+), 1358 deletions(-) delete mode 100644 tools/update_weko_links.py diff --git a/modules/invenio-indexer/tests/test_api.py b/modules/invenio-indexer/tests/test_api.py index ae174d7c24..9fd2f7afd9 100644 --- a/modules/invenio-indexer/tests/test_api.py +++ b/modules/invenio-indexer/tests/test_api.py @@ -149,7 +149,7 @@ def _mock_bulk(client, actions_iterator, **kwargs): with patch('invenio_indexer.api.bulk', _mock_bulk): # Exceptions are caught - assert RecordIndexer().process_bulk_queue() == 1 + assert RecordIndexer().process_bulk_queue() == (0, 0, 2) assert len(ret['actions']) == 1 assert ret['actions'][0]['_id'] == str(r2.id) @@ -169,8 +169,8 @@ def test_process_bulk_queue(app, queue): # BulkIndexError errors = [ - {"index": {"_id": str(records[4].id), "error": {"type": "version_conflict"}}}, - {"index": {"_id": str(records[5].id), "error": {"type": "version_conflict"}}} + {"index": {"_id": str(records[4].id), "error": {"type": "version_conflict", "reason": "BulkIndexError_reason"}}}, + {"index": {"_id": str(records[5].id), "error": {"type": "version_conflict", "reason": "BulkIndexError_reason"}}} ] app.config['SEARCH_UI_SEARCH_INDEX'] = 'test-index' def mock_reindex_bulk_be(self, client, actions, **kwargs): @@ -179,27 +179,39 @@ def mock_reindex_bulk_be(self, client, actions, **kwargs): with patch('invenio_indexer.api.RecordIndexer.reindex_bulk', new=mock_reindex_bulk_be): with patch('invenio_indexer.api.RecordIndexer._actionsiter', return_value=[{}]*10): - indexer = RecordIndexer() - result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) - assert result[0] == 8 # success数 - assert result[1] == 2 # fail数 - assert errors[0]['index']['error']['type'] == "version_conflict" + with patch('invenio_indexer.api.click.secho') as mock_secho: + indexer = RecordIndexer() + result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) + # エラーログの内容を検証 + assert any( + "type:version_conflict" in str(call) and + "reason:BulkIndexError_reason" in str(call) + for call in mock_secho.call_args_list + ) + assert result[0] == 8 # success数 + assert result[1] == 2 # fail数 # ConnectionError errors = [ - {"index": {"_id": str(records[9].id), "error": {"type": "ConnectionError"}}} + {"index": {"_id": str(records[9].id), "error": {"type": "ConnectionError", "reason": "ConnectionError_reason"}}} ] es_conn_error = ConnectionError("ConnectionError!", {}, {}) with patch('invenio_indexer.api.RecordIndexer.reindex_bulk', side_effect=DummyBulkConnectionError(success=8, failed=1, errors=errors, original_exception=es_conn_error)): with patch('invenio_indexer.api.RecordIndexer._actionsiter', return_value=[{}]*10): - indexer = RecordIndexer() - result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) - assert result[1] == 1 # fail数 - assert errors[0]['index']['error']['type'] == 'ConnectionError' + with patch('invenio_indexer.api.click.secho') as mock_secho: + indexer = RecordIndexer() + result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) + # エラーログの内容を検証 + assert any( + "type:ConnectionError" in str(call) and + "reason:ConnectionError_reason" in str(call) + for call in mock_secho.call_args_list + ) + assert result[1] == 1 # fail数 # ConnectionTimeout errors = [ - {"index": {"_id": str(records[9].id), "error": {"type": "ConnectionTimeout"}}} + {"index": {"_id": str(records[9].id), "error": {"type": "ConnectionTimeout", "reason": "ConnectionTimeout_reason"}}} ] es_conn_error = ConnectionTimeout("ConnectionTimeout!", {}, {}) def mock_reindex_bulk_ct(client, actions, **kwargs): @@ -207,14 +219,20 @@ def mock_reindex_bulk_ct(client, actions, **kwargs): raise DummyBulkConnectionTimeout(success=8, failed=1, errors=errors, original_exception=es_conn_error) with patch('invenio_indexer.api.RecordIndexer.reindex_bulk', side_effect=mock_reindex_bulk_ct): with patch('invenio_indexer.api.RecordIndexer._actionsiter', return_value=[{}]*10): - indexer = RecordIndexer() - result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) - assert result[1] == 1 - assert errors[0]['index']['error']['type'] == 'ConnectionTimeout' + with patch('invenio_indexer.api.click.secho') as mock_secho: + indexer = RecordIndexer() + result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) + # エラーログの内容を検証 + assert any( + "type:ConnectionTimeout" in str(call) and + "reason:ConnectionTimeout_reason" in str(call) + for call in mock_secho.call_args_list + ) + assert result[1] == 1 # Exception errors = [ - {"index": {"_id": str(records[9].id), "error": {"type": "Exception"}}} + {"index": {"_id": str(records[9].id), "error": {"type": "Exception", "reason": "Exception_reason"}}} ] es_conn_error = Exception("Exception!", {}, {}) def mock_reindex_bulk_ct(client, actions, **kwargs): @@ -222,10 +240,16 @@ def mock_reindex_bulk_ct(client, actions, **kwargs): raise DummyBulkException(success=8, failed=1, errors=errors, original_exception=es_conn_error) with patch('invenio_indexer.api.RecordIndexer.reindex_bulk', side_effect=mock_reindex_bulk_ct): with patch('invenio_indexer.api.RecordIndexer._actionsiter', return_value=[{}]*10): - indexer = RecordIndexer() - result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) - assert result[1] == 1 # fail数 - assert errors[0]['index']['error']['type'] == 'Exception' + with patch('invenio_indexer.api.click.secho') as mock_secho: + indexer = RecordIndexer() + result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) + # エラーログの内容を検証 + assert any( + "type:Exception" in str(call) and + "reason:Exception_reason" in str(call) + for call in mock_secho.call_args_list + ) + assert result[1] == 1 # fail数 # BulkIndexError (when errors is an empty list) errors = [] @@ -243,7 +267,7 @@ def mock_reindex_bulk_be_empty(self, client, actions, **kwargs): assert errors == [] #BulkIndexError (err_info is string) - errors = [{"index": {"_id": "dummy", "error": "Some string error"}}] + errors = [{"index": {"_id": "dummy", "error": {"type": "Some string error"}}}] def mock_reindex_bulk_be_str(self, client, actions, **kwargs): self.count = 10 raise DummyBulkIndexError(errors) @@ -253,7 +277,7 @@ def mock_reindex_bulk_be_str(self, client, actions, **kwargs): result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) assert result[0] == 9 # Number of successes assert result[1] == 1 # Number of failures - assert errors[0]['index']['error'] == "Some string error" + assert errors[0]['index']['error']['type'] == "Some string error" # ConnectionError (when errors is an empty list) errors = [] @@ -295,7 +319,7 @@ def mock_reindex_bulk_exception_empty(client, actions, **kwargs): assert errors == [] # BulkException (err_info is string) - errors = [{"index": {"_id": "dummy", "error": "Some string error"}}] + errors = [{"index": {"_id": "dummy", "error": {"type": "Some string error"}}}] def mock_reindex_bulk_exception_str(self, client, actions, **kwargs): self.count = 10 raise DummyBulkException(success=0, failed=1, errors=errors, original_exception=Exception("Exception!", {}, {})) @@ -305,7 +329,7 @@ def mock_reindex_bulk_exception_str(self, client, actions, **kwargs): result = indexer.process_bulk_queue(es_bulk_kwargs=es_bulk_kwargs) assert result[0] == 0 # Number of successes assert result[1] == 1 # Number of failures - assert errors[0]['index']['error'] == "Some string error" + assert errors[0]['index']['error']['type'] == "Some string error" def test_process_bulk_queue_for_error_loop(app): @@ -690,6 +714,7 @@ def test_bulkrecordindexer_index_delete_by_record(app, queue): assert data1['id'] == str(recid) assert data1['op'] == 'delete' +# .tox/c1/bin/pytest --cov=invenio_indexer tests/test_api.py::test__index_action_cases -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-workflow/.tox/c1/tmp @pytest.mark.parametrize( "body, expected_file, has_content, version_id, es_version, expect_commit, max_body_size, expect_error", [ @@ -706,7 +731,6 @@ def test_bulkrecordindexer_index_delete_by_record(app, queue): def test__index_action_cases(monkeypatch, body, expected_file, has_content, version_id, es_version, expect_commit, max_body_size, expect_error): import sys import sqlalchemy - def setup_indexer_and_env(): # Setup dummy record and monkeypatch Record.get_record dummy_record = DummyRecord('rid', version_id, 2) @@ -714,7 +738,7 @@ def setup_indexer_and_env(): # Setup dummy WekoIndexer class DummyWekoIndexer: def get_es_index(self): return None - def get_metadata_by_item_id(self, rid): return {'_version': es_version} + def get_metadata_by_item_id(self, rid, is_ignore=True): return {'_version': es_version, 'found': True} sys.modules['weko_deposit.api'] = types.SimpleNamespace(WekoIndexer=DummyWekoIndexer) # Setup dummy DB session committed = {'called': False} @@ -740,7 +764,7 @@ def error(self, *a, **k): called['error'] = True indexer = RecordIndexer(search_client=None) indexer.count = 0 indexer.record_to_index = lambda record: ('idx', 'doc') - indexer._prepare_record = lambda record, index, doc_type, arguments: body.copy() + indexer._prepare_record = lambda record, index, doc_type, arguments, with_deleted=None: body.copy() return indexer, committed, called payload = {'id': 'rid'} @@ -748,40 +772,65 @@ def error(self, *a, **k): called['error'] = True if has_content: result = indexer._index_action(payload) assert result['_source']['content'][0]['file'] == expected_file + assert result['_version'] == max(es_version, version_id) else: result = indexer._index_action(payload) assert result['_source']['file'] == expected_file assert 'content' not in result['_source'] +# .tox/c1/bin/pytest --cov=invenio_indexer tests/test_api.py::test__actionsiter_exception -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-workflow/.tox/c1/tmp def test__actionsiter_exception(monkeypatch): """Test that reject and logger.error are called when an exception occurs in _actionsiter.""" indexer = RecordIndexer(search_client=None) # Raise exception in index side (could be _delete_action or _index_action) - def raise_exc(payload): - raise Exception('test error') + + error_reason = "Exception_reason" + def raise_exc(payload, with_deleted=False): + raise Exception(error_reason) monkeypatch.setattr(indexer, '_index_action', raise_exc) - # Detect logger.error call - called = {'error': False} + logs = {} + def fake_error(msg, *args, **kwargs): + logs['msg'] = msg + monkeypatch.setattr('invenio_indexer.api.current_app', types.SimpleNamespace( - logger=types.SimpleNamespace(error=lambda *a, **k: called.update({'error': True})) - )) + logger=types.SimpleNamespace(error=fake_error)) + ) msg = DummyMessage({'op': 'index', 'id': 'rid'}) # Execute result = list(indexer._actionsiter([msg])) assert msg.rejected is True assert msg.acked is False - assert called['error'] is True + assert "type:Exception" in logs['msg'] + assert "message:Exception_reason" in logs['msg'] +# .tox/c1/bin/pytest --cov=invenio_indexer tests/test_api.py::test__actionsiter_noresultfound -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-workflow/.tox/c1/tmp def test__actionsiter_noresultfound(monkeypatch): """Test that reject is called when NoResultFound occurs in _actionsiter.""" indexer = RecordIndexer(search_client=None) from sqlalchemy.orm.exc import NoResultFound + error_reason = "NoResultFound_reason" + # Make Record.get_record raise NoResultFound - monkeypatch.setattr('invenio_indexer.api.Record', types.SimpleNamespace(get_record=lambda rid: (_ for _ in ()).throw(NoResultFound('not found')))) + monkeypatch.setattr( + 'invenio_indexer.api.Record', + types.SimpleNamespace(get_record=lambda rid: (_ for _ in ()).throw(NoResultFound(error_reason))) + ) + + logs = {} + def fake_error(msg, *args, **kwargs): + logs['msg'] = msg + + monkeypatch.setattr( + 'invenio_indexer.api.current_app', + types.SimpleNamespace(logger=types.SimpleNamespace(error=fake_error)) + ) + msg = DummyMessage({'op': 'index', 'id': 'rid'}) list(indexer._actionsiter([msg])) assert msg.rejected is True assert msg.acked is False + assert "type:NoResultFound" in logs['msg'] + assert "message:NoResultFound_reason" in logs['msg'] def test__actionsiter_delete(monkeypatch): """Test that _delete_action is called and acked when delete pattern in _actionsiter.""" @@ -798,14 +847,14 @@ def dummy_delete_action(payload): assert msg.acked is True assert msg.rejected is False - +# .tox/c1/bin/pytest --cov=invenio_indexer tests/test_api.py::test__actionsiter_sqlalchemyerror -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-workflow/.tox/c1/tmp def test__actionsiter_sqlalchemyerror(monkeypatch): """Test that rollback, logger.error, and reject are called on SQLAlchemyError in _actionsiter.""" - import sqlalchemy + from invenio_indexer.api import SQLAlchemyError indexer = RecordIndexer(search_client=None) # Raise SQLAlchemyError in _index_action - def raise_sqlalchemy(payload): - raise sqlalchemy.exc.SQLAlchemyError('sqlalchemy error') + def raise_sqlalchemy(payload, with_deleted=False): + raise SQLAlchemyError('sqlalchemy error') monkeypatch.setattr(indexer, '_index_action', raise_sqlalchemy) # Detect rollback and logger.error call called = {'rollback': False, 'error': False} diff --git a/modules/weko-authors/tests/test_utils.py b/modules/weko-authors/tests/test_utils.py index 64cff00dc5..593c2ee5b0 100644 --- a/modules/weko-authors/tests/test_utils.py +++ b/modules/weko-authors/tests/test_utils.py @@ -1878,7 +1878,7 @@ def delete(i): assert count_authors()['count'] == 0 from weko_authors.utils import validate_weko_id, check_weko_id_is_exists, check_period_date, delete_export_url,\ - handle_exception, export_prefix,check_file_name, clean_deep, update_data_for_weko_link + handle_exception, export_prefix,check_file_name, clean_deep from redis.exceptions import RedisError from sqlalchemy.exc import SQLAlchemyError # .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::TestValidateWekoId -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp @@ -3469,371 +3469,6 @@ def test_clean_deep_no_none_or_empty_string(self, app): expected = {'fullname': 'Jane Doe', 'email': {"test2": "test2"}, 'test': [{"test2": "test2"}]} assert clean_deep(data) == expected -# .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::TestUpdateDataForWekoLink -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp -class TestUpdateDataForWekoLink: - """update_data_for_weko_linkのテストクラス""" - -# .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::TestUpdateDataForWekoLink::test_update_data_normal_case -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp - def test_update_data_normal_case(self, app): - """ - 正常系 - 条件:weko_linkの内容が更新される場合 - 入力: - - data: nameIdentifiersを含むメタデータ - - weko_link: 更新前のweko_link - 期待結果: - - weko_linkが更新される - - dataのnameIdentifierが更新される - """ - # テスト用データ - data = { - "creators": [ - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "old_weko_id_1" - } - ] - } - ] - } - weko_link = {"1": "old_weko_id_1"} - - # AuthorsクラスのMock - author_mock = { - "authorIdInfo": [ - {"idType": "1", "authorId": "new_weko_id_1"} - ] - } - - with patch.object(Authors, 'get_author_by_id', return_value=author_mock): - update_data_for_weko_link(data, weko_link) - result = data["creators"][0]["nameIdentifiers"][0]["nameIdentifier"] - # 検証 - assert result == "new_weko_id_1" - - def test_no_change_in_weko_link(self, app): - """ - 正常系 - 条件:weko_linkの内容が変更されない場合 - 入力: - - data: nameIdentifiersを含むメタデータ - - weko_link: 更新前のweko_link - 期待結果: - - weko_linkは変更されない - - dataは変更されない - """ - data = { - "creators": [ - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "weko_id_1" - } - ] - } - ] - } - weko_link = {"1": "weko_id_1"} - data_copy = copy.deepcopy(data) - - author_mock = { - "authorIdInfo": [ - {"idType": "1", "authorId": "weko_id_1"} - ] - } - - with patch.object(Authors, 'get_author_by_id', return_value=author_mock): - update_data_for_weko_link(data, weko_link) - - assert data == data_copy - - def test_author_not_found(self, app): - """ - 正常系 - 条件:Authorsテーブルに該当するauthorが存在しない場合 - 入力: - - data: 任意のメタデータ - - weko_link: 更新前のweko_link - 期待結果: - - weko_linkは変更されない - - dataは変更されない - """ - data = {"creators": [{"name": "test"}]} - weko_link = {"1": "weko_id_1"} - data_copy = copy.deepcopy(data) - weko_link_copy = copy.deepcopy(weko_link) - - with patch.object(Authors, 'get_author_by_id', return_value=None): - update_data_for_weko_link(data, weko_link) - - assert data == data_copy - - def test_id_type_not_1(self, app): - """ - 正常系 - 条件:authorIdInfoのidTypeが1でない場合 - 入力: - - data: 任意のメタデータ - - weko_link: 更新前のweko_link - 期待結果: - - weko_linkは変更されない - - dataは変更されない - """ - data = {"creators": [{"name": "test"}]} - weko_link = {"1": "weko_id_1"} - data_copy = copy.deepcopy(data) - weko_link_copy = copy.deepcopy(weko_link) - - author_mock = { - "authorIdInfo": [ - {"idType": "2", "authorId": "other_id"} - ] - } - - with patch.object(Authors, 'get_author_by_id', return_value=author_mock): - update_data_for_weko_link(data, weko_link) - - assert data == data_copy - - def test_multiple_authors_and_identifiers(self, app): - """ - 正常系 - 条件:複数のauthorとnameIdentifiersがある場合 - 入力: - - data: 複数のauthorとnameIdentifiersを含むメタデータ - - weko_link: 複数のエントリを持つweko_link - 期待結果: - - dataの全てのnameIdentifierが更新される - """ - data = { - "creators": [ - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "old_weko_id_1" - }, - { - "nameIdentifierScheme": "OTHER", - "nameIdentifier": "other_id" - } - ] - }, - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "old_weko_id_2" - } - ] - } - ], - "contributors": [ - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "old_weko_id_3" - } - ] - } - ] - } - weko_link = { - "1": "old_weko_id_1", - "2": "old_weko_id_2", - "3": "old_weko_id_3" - } - - def mock_get_author_by_id(pk_id): - if pk_id == "1": - return {"authorIdInfo": [{"idType": "1", "authorId": "new_weko_id_1"}]} - elif pk_id == "2": - return {"authorIdInfo": [{"idType": "1", "authorId": "new_weko_id_2"}]} - elif pk_id == "3": - return {"authorIdInfo": [{"idType": "1", "authorId": "new_weko_id_3"}]} - return None - - with patch.object(Authors, 'get_author_by_id', side_effect=mock_get_author_by_id): - update_data_for_weko_link(data, weko_link) - - assert data["creators"][0]["nameIdentifiers"][0]["nameIdentifier"] == "new_weko_id_1" - assert data["creators"][1]["nameIdentifiers"][0]["nameIdentifier"] == "new_weko_id_2" - assert data["contributors"][0]["nameIdentifiers"][0]["nameIdentifier"] == "new_weko_id_3" - # 他のスキームのIDは変更されないことを確認 - assert data["creators"][0]["nameIdentifiers"][1]["nameIdentifier"] == "other_id" - - def test_non_list_data_fields(self, app): - """ - 正常系 - 条件:dataのフィールドがリストでない場合 - 入力: - - data: リスト以外のデータ型を含むフィールドを持つメタデータ - - weko_link: weko_link - 期待結果: - - dataの該当フィールドはスキップされる - """ - data = { - "creators": [ - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "old_weko_id_1" - } - ] - } - ], - "title": "Test Title", # 文字列フィールド - "description": {"text": "Test Description"} # 辞書フィールド - } - weko_link = {"1": "old_weko_id_1"} - - author_mock = { - "authorIdInfo": [ - {"idType": "1", "authorId": "new_weko_id_1"} - ] - } - - with patch.object(Authors, 'get_author_by_id', return_value=author_mock): - update_data_for_weko_link(data, weko_link) - - assert data["creators"][0]["nameIdentifiers"][0]["nameIdentifier"] == "new_weko_id_1" - # 文字列や辞書フィールドは変更されないこと - assert data["title"] == "Test Title" - assert data["description"] == {"text": "Test Description"} - - def test_string_items_in_list(self, app): - """ - 正常系 - 条件:dataのリストフィールド内に文字列アイテムがある場合 - 入力: - - data: リスト内に文字列アイテムを含むメタデータ - - weko_link: weko_link - 期待結果: - - 文字列アイテムはスキップされる - """ - data = { - "creators": [ - { - "other_identifiers":[], - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "old_weko_id_1" - } - ] - }, - "Simple String Creator" # 文字列アイテム - ] - } - weko_link = {"1": "old_weko_id_1"} - - author_mock = { - "authorIdInfo": [ - {"idType": "1", "authorId": "new_weko_id_1"} - ] - } - - with patch.object(Authors, 'get_author_by_id', return_value=author_mock): - update_data_for_weko_link(data, weko_link) - - assert data["creators"][0]["nameIdentifiers"][0]["nameIdentifier"] == "new_weko_id_1" - # 文字列アイテムは変更されないこと - assert data["creators"][1] == "Simple String Creator" - - def test_no_matching_nameIdentifier(self, app): - """ - 正常系 - 条件:nameIdentifierがweko_linkの値と一致しない場合 - 入力: - - data: weko_linkと一致しないnameIdentifierを含むメタデータ - - weko_link: weko_link - 期待結果: - - dataのnameIdentifierは更新されない - """ - data = { - "creators": [ - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "different_weko_id" - } - ] - } - ] - } - weko_link = {"1": "old_weko_id_1"} - data_copy = copy.deepcopy(data) - - author_mock = { - "authorIdInfo": [ - {"idType": "1", "authorId": "new_weko_id_1"} - ] - } - - with patch.object(Authors, 'get_author_by_id', return_value=author_mock): - update_data_for_weko_link(data, weko_link) - - # 一致するnameIdentifierがないため、データは変更されない - assert data == data_copy - - def test_empty_input_data(self, app): - """ - 正常系 - 条件:空のデータ辞書が入力された場合 - 入力: - - data: 空の辞書 - - weko_link: weko_link - 期待結果: - - dataは変更されない(空のまま) - """ - data = {} - weko_link = {"1": "old_weko_id_1"} - - author_mock = { - "authorIdInfo": [ - {"idType": "1", "authorId": "new_weko_id_1"} - ] - } - - with patch.object(Authors, 'get_author_by_id', return_value=author_mock): - update_data_for_weko_link(data, weko_link) - - assert data == {} - - def test_empty_weko_link(self, app): - """ - 正常系 - 条件:空のweko_linkが入力された場合 - 入力: - - data: 任意のメタデータ - - weko_link: 空の辞書 - 期待結果: - - dataは変更されない - """ - data = { - "creators": [ - { - "nameIdentifiers": [ - { - "nameIdentifierScheme": "WEKO", - "nameIdentifier": "weko_id_1" - } - ] - } - ] - } - weko_link = {} - data_copy = copy.deepcopy(data) - - update_data_for_weko_link(data, weko_link) - - assert data == data_copy # def write_to_tempfile(start, row_header, row_label_en, row_label_jp, row_data): # .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::test_write_to_tempfile -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp diff --git a/modules/weko-authors/weko_authors/utils.py b/modules/weko-authors/weko_authors/utils.py index f29b15cf22..d0bb809dd7 100644 --- a/modules/weko-authors/weko_authors/utils.py +++ b/modules/weko-authors/weko_authors/utils.py @@ -1755,51 +1755,6 @@ def import_affiliation_id_to_system(affiliation_id): traceback.print_exc(file=sys.stdout) raise -def update_data_for_weko_link(data, weko_link): - """Update weko_link based on authors table and update data if different. - - Args: - data (dict): Metadata, especially from workflowactivity temp_data column. - weko_link (dict): weko_link mapping. - """ - old_weko_link = weko_link - weko_link = copy.deepcopy(old_weko_link) - # Update weko_link with new values. - for pk_id in weko_link.keys(): - author = Authors.get_author_by_id(pk_id) - if author: - # Get weko_id. - author_id_info = author["authorIdInfo"] - for i in author_id_info: - # If idType is 1, get weko_id and update weko_link. - if i.get('idType') == '1': - weko_link[pk_id] = i.get('authorId') - break - if weko_link == old_weko_link: - # If weko_link has not changed, do nothing. - return - # If weko_link has changed, update metadata. - for x_key, x_value in data.items(): - if not isinstance(x_value, list): - continue - for y_index, y in enumerate(x_value, start=0): - if not isinstance(y, dict): - continue - for y_key, y_value in y.items(): - if not y_key == "nameIdentifiers": - continue - for z_index, z in enumerate(y_value, start=0): - if ( - z.get("nameIdentifierScheme","") != "WEKO" - or z.get("nameIdentifier") not in old_weko_link.values() - ): - continue - # Get pk_id whose value matches weko_id from weko_link. - pk_id = [ - k for k, v in old_weko_link.items() - if v == z.get("nameIdentifier") - ][0] - z["nameIdentifier"] = weko_link.get(pk_id) def get_check_base_name(): """Get base name for check file. diff --git a/modules/weko-deposit/tests/test_api.py b/modules/weko-deposit/tests/test_api.py index 347bbc8f86..7ac765ca48 100644 --- a/modules/weko-deposit/tests/test_api.py +++ b/modules/weko-deposit/tests/test_api.py @@ -288,8 +288,12 @@ def test_get_metadata_by_item_id(self,es_records): indexer, records = es_records record = records[0]['record'] record_data = records[0]['record_data'] - ret = indexer.get_metadata_by_item_id(record.id) - assert ret['_index']=='test-weko-item-v1.0.0' + ret1 = indexer.get_metadata_by_item_id(record.id) + assert ret1['_index']=='test-weko-item-v1.0.0' + + record.id = None + ret2 = indexer.get_metadata_by_item_id(record.id, is_ignore=True) + assert ret2['found'] is False # def update_feedback_mail_list(self, feedback_mail): # .tox/c1/bin/pytest --cov=weko_deposit tests/test_api.py::TestWekoIndexer::test_update_feedback_mail_list -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-deposit/.tox/c1/tmp @@ -310,17 +314,16 @@ def test_update_request_mail_list(selft,es_records): assert ret['_id'] == '{}'.format(record.id) and ret['result'] == 'updated' and ret['_shards'] == {'total': 2, 'successful': 1, 'failed': 0} - # def update_author_link_and_weko_link(self, author_link): - # .tox/c1/bin/pytest --cov=weko_deposit tests/test_api.py::TestWekoIndexer::test_update_author_link_and_weko_link -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-deposit/.tox/c1/tmp - def test_update_author_link_and_weko_link(self,es_records): + # def update_author_link(self, author_link): + # .tox/c1/bin/pytest --cov=weko_deposit tests/test_api.py::TestWekoIndexer::test_update_author_link -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-deposit/.tox/c1/tmp + def test_update_author_link(self,es_records): indexer, records = es_records record = records[0]['record'] author_link_info = { "id": record.id, - "author_link": ['1'], - "weko_link": {"1":"13"} + "author_link": ['1'] } - ret = indexer.update_author_link_and_weko_link(author_link_info) + ret = indexer.update_author_link(author_link_info) assert ret == {'_index': 'test-weko-item-v1.0.0', '_type': 'item-v1.0.0', '_id': str(record.id), '_version': 2, 'result': 'updated', '_shards': {'total': 2, 'successful': 1, 'failed': 0}, '_seq_no': 12, '_primary_term': 1} # def update_jpcoar_identifier(self, dc, item_id): @@ -1051,7 +1054,7 @@ def test_convert_item_metadata(self, app, db, db_itemtype, es_records, users): ('item_1617258105262', {'attribute_name': 'Resource Type', 'attribute_value_mlt': [{'resourceuri': 'http://purl.org/coar/resource_type/c_5794', 'resourcetype': 'conference paper'}]}), ('item_title', 'title'), ('item_type_id', '1'), ('control_number', 1), ('author_link', []), ('_oai', {'id': '1'}), ('weko_shared_ids', []), ('owner', 1), ('owners', [1]), ('publish_date', '2022-08-20'), - ('title', ['title']), ('relation_version_is_last', True), ('path', ['1']), ('publish_status','0'), ('weko_link', '')]) + ('title', ['title']), ('relation_version_is_last', True), ('path', ['1']), ('publish_status','0')]) test2 = None with patch("weko_index_tree.api.Indexes.get_path_list", return_value=['1']): @@ -1257,18 +1260,16 @@ def test_delete_es_index_attempt(sel,app,db,location): db.session.commit() deposit.delete_es_index_attempt(deposit.pid) - # def update_author_link_and_weko_link(self, author_link): - # .tox/c1/bin/pytest --cov=weko_deposit tests/test_api.py::TestWekoDeposit::test_update_author_link_and_weko_link -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-deposit/.tox/c1/tmp - def test_update_author_link_and_weko_link(sel,app,db,location,es_records, mocker): - with patch("weko_deposit.api.WekoIndexer.update_author_link_and_weko_link") as mocker_indexer_update: + # def update_author_link(self, author_link): + # .tox/c1/bin/pytest --cov=weko_deposit tests/test_api.py::TestWekoDeposit::test_update_author_link -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-deposit/.tox/c1/tmp + def test_update_author_link(self,es_records): + with patch("weko_deposit.api.WekoIndexer.update_author_link") as mocker_indexer_update: _, records = es_records record = records[0] deposit = record['deposit'] - assert deposit.update_author_link_and_weko_link([], {"1":"123"})==None - mocker_indexer_update.assert_not_called() - assert deposit.update_author_link_and_weko_link(["1"], {})==None + assert deposit.update_author_link([]) == None mocker_indexer_update.assert_not_called() - assert deposit.update_author_link_and_weko_link(["1"], {"1":"123"})==None + assert deposit.update_author_link(["1"]) == None mocker_indexer_update.assert_called() diff --git a/modules/weko-deposit/tests/test_tasks.py b/modules/weko-deposit/tests/test_tasks.py index 7cabd5d35a..8e7cd27775 100644 --- a/modules/weko-deposit/tests/test_tasks.py +++ b/modules/weko-deposit/tests/test_tasks.py @@ -419,7 +419,7 @@ def __init__(self): def get_record(self): return self - def update_author_link_and_weko_link(self): + def update_author_link(self): pass # .tox/c1/bin/pytest --cov=weko_deposit tests/test_tasks.py::TestProcess::test_process_with_data -v -s -vv --cov-branch --cov-report=html --cov-config=tox.ini --basetemp=/code/modules/weko-deposit/.tox/c1/tmp @@ -430,7 +430,7 @@ def test_process_with_data(self, mock_commit, mock_update_author_data, mock_reco mocker.patch('invenio_indexer.api.RecordIndexer.bulk_index') mocker.patch('invenio_indexer.api.RecordIndexer.process_bulk_queue') with patch('weko_deposit.api.WekoDeposit.get_record', return_value = WekoDeposit({})): - mocker.patch('weko_deposit.api.WekoDeposit.update_author_link_and_weko_link') + mocker.patch('weko_deposit.api.WekoDeposit.update_author_link') # 条件 data_size = 10 data_from = 0 @@ -1105,9 +1105,6 @@ def test_update_author_data_success(self, mock_update_item, mock_change_to_meta, ] } ] - }, - "weko_link": { - "1": "12345" } } @@ -1146,7 +1143,7 @@ def test_update_author_data_pid_not_exist(self, mock_get_record_items, mock_get_ result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) # 期待結果 - assert result == (None, set(), {}) + assert result == (None, set()) assert process_counter["fail_items"] == [{"record_id": "1", "author_ids": [], "message": "PID 1 does not exist."}] @patch('weko_deposit.tasks.PersistentIdentifier.get') @@ -1171,7 +1168,7 @@ def test_update_author_data_exception(self, mock_get_record_items, mock_get_reco result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) # 期待結果 - assert result == (None, set(), {}) + assert result == (None, set()) assert process_counter["fail_items"] == [{"record_id": "1", "author_ids": [], "message": "Test Exception"}] @@ -1387,7 +1384,7 @@ def test_process_with_record_ids(self, mock_commit, mock_update_author_data, moc mock_update_author_data.return_value = (uuid1, [uuid1], set()) # 実行 - result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id) + result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id, force_change) mock_bulk_index.assert_called() mock_process_bulk_queue.assert_called() @@ -1405,6 +1402,7 @@ def test_process_compare_data_size(self, mock_commit, mock_update_author_data, m origin_pkid_list = ["1"] author_prefix = {...} affiliation_id = {...} + force_change = False with patch('weko_deposit.api.WekoDeposit.get_record', return_value = WekoDeposit({})): mock_records_search.return_value.update_from_dict.return_value.execute.return_value.to_dict.return_value = { @@ -1415,7 +1413,7 @@ def test_process_compare_data_size(self, mock_commit, mock_update_author_data, m } uuid1 = uuid.uuid4() mock_update_author_data.return_value = (uuid1, [uuid1], set()) - result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id) + result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id, force_change) assert result == (2, True) data_size = 10 @@ -1425,7 +1423,7 @@ def test_process_compare_data_size(self, mock_commit, mock_update_author_data, m 'total': 1 } } - result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id) + result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id, force_change) assert result == (1, False) # 54702-24 @@ -1458,7 +1456,7 @@ def test_process_update_author_data_error(self, mock_commit, mock_update_author_ mock_update_author_data.return_value = (None, set(), {}) # 実行 - result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id) + result = _process(data_size, data_from, process_counter, target, origin_pkid_list, prepare_key_map, author_prefix, affiliation_id, force_change) assert result[0] == 0 # .tox/c1/bin/pytest --cov=weko_deposit tests/test_tasks.py::TestChangeToMeta -v -s -vv --cov-branch --cov-report=html --cov-config=tox.ini --basetemp=/code/modules/weko-deposit/.tox/c1/tmp @@ -1592,6 +1590,7 @@ def test_update_author_data_has_weko_id(self, mock_update_item, mock_change_to_m key_map = prepare_key_map author_prefix = {} affiliation_id = {} + force_change = False dep_items = { "title": "Sample Title", "creator": { @@ -1614,7 +1613,7 @@ def test_update_author_data_has_weko_id(self, mock_update_item, mock_change_to_m mock_get_record_items.return_value = WekoDeposit(dep_items) mock_change_to_meta.return_value = ("12345", {}) # 実行 - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) # 期待結果 mock_change_to_meta.assert_called() assert result == ('uuid1', ['uuid1'], {'12345'}) @@ -1639,6 +1638,7 @@ def test_update_author_data_no_weko_id(self, mock_update_item, mock_change_to_me key_map = prepare_key_map author_prefix = {} affiliation_id = {} + force_change = False dep_items = { "title": "Sample Title", "creator": { @@ -1660,7 +1660,7 @@ def test_update_author_data_no_weko_id(self, mock_update_item, mock_change_to_me mock_get_record_items.return_value = WekoDeposit(dep_items) mock_change_to_meta.return_value = ("12345", {}) # 実行 - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) # 期待結果 assert result == ('uuid1', [], {""}) assert process_counter["success_items"] == [{"record_id": "1", "author_ids": [], "message": ""}] @@ -1684,6 +1684,7 @@ def test_update_author_data_contributor_weko_id_not_match(self, mock_update_item key_map = prepare_key_map author_prefix = {} affiliation_id = {} + force_change = False dep_items = { "contributor": { "attribute_value_mlt": [ @@ -1704,7 +1705,7 @@ def test_update_author_data_contributor_weko_id_not_match(self, mock_update_item mock_get_record_items.return_value = WekoDeposit(dep_items) mock_change_to_meta.return_value = ("12345", {}) # 実行 - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) # 期待結果 mock_change_to_meta.assert_called() assert result == ('uuid1', ['uuid1'], {'12345'}) @@ -1729,6 +1730,7 @@ def test_update_author_data_full_name_weko_id_not_match(self, mock_update_item, key_map = prepare_key_map author_prefix = {} affiliation_id = {} + force_change = False dep_items = { "title": "Sample Title", "full_name":{ @@ -1755,7 +1757,7 @@ def test_update_author_data_full_name_weko_id_not_match(self, mock_update_item, mock_get_record.return_value = WekoDeposit({}) mock_get_record_items.return_value = WekoDeposit(dep_items) - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) assert result == ('uuid1', [], {'3'}) assert process_counter["success_items"] == [{"record_id": "3", "author_ids": [], "message": ""}] @@ -1779,6 +1781,7 @@ def test_update_author_data_not_match_key_map(self, mock_update_item, mock_chang key_map = prepare_key_map author_prefix = {} affiliation_id = {} + force_change = False dep_items = { "title": "Sample Title", "test": { @@ -1796,7 +1799,7 @@ def test_update_author_data_not_match_key_map(self, mock_update_item, mock_chang mock_get_record.return_value = WekoDeposit({}) mock_get_record_items.return_value = WekoDeposit(dep_items) - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) assert result == ('uuid1', [], set()) assert process_counter["success_items"] == [{"record_id": "1", "author_ids": [], "message": ""}] @@ -1820,6 +1823,7 @@ def test_update_author_data_scheme_is_not_WEKO(self, mock_update_item, mock_chan key_map = prepare_key_map author_prefix = {} affiliation_id = {} + force_change = False dep_items = { "title": "Sample Title", "creator": { @@ -1840,7 +1844,7 @@ def test_update_author_data_scheme_is_not_WEKO(self, mock_update_item, mock_chan mock_get_record.return_value = WekoDeposit({}) mock_get_record_items.return_value = WekoDeposit(dep_items) - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) assert result == ('uuid1', [], set()) assert process_counter["success_items"] == [{"record_id": "1", "author_ids": [], "message": ""}] @@ -1860,12 +1864,13 @@ def test_update_author_data_pid_not_exist(mock_get_record_items, mock_get_record key_map = {"creator": {}, "contributor": {}, "full_name": {}} author_prefix = {} affiliation_id = {} + force_change = False # モックの設定 mock_get_pid.side_effect = PIDDoesNotExistError("pid_type", "pid_value") # 実行 - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) # 期待結果 - assert result == (None, set(), {}) + assert result == (None, set()) assert process_counter["fail_items"] == [{"record_id": "1", "author_ids": [], "message": "PID 1 does not exist."}] # 54702-30 @@ -1883,12 +1888,13 @@ def test_update_author_data_exception(mock_get_record_items, mock_get_record, mo key_map = {"creator": {}, "contributor": {}, "full_name": {}} author_prefix = {} affiliation_id = {} + force_change = False # モックの設定 mock_get_pid.side_effect = Exception("Test Exception") # 実行 - result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id) + result = _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) # 期待結果 - assert result == (None, set(), {}) + assert result == (None, set()) assert process_counter["fail_items"] == [{"record_id": "1", "author_ids": [], "message": "Test Exception"}] # .tox/c1/bin/pytest --cov=weko_deposit tests/test_tasks.py::test_extract_pdf_and_update_file_contents -v -s -vv --cov-branch --cov-report=term --cov-config=tox.ini --basetemp=/code/modules/weko-deposit/.tox/c1/tmp @@ -1986,6 +1992,7 @@ def test_extract_pdf_and_update_file_contents(app, db, location, caplog): # .tox/c1/bin/pytest --cov=weko_deposit tests/test_tasks.py::test_extract_pdf_and_update_file_contents_api_cases -v -s -vv --cov-branch --cov-report=term --cov-config=tox.ini --basetemp=/code/modules/weko-deposit/.tox/c1/tmp +@pytest.mark.skip() @pytest.mark.parametrize("tika_path, isfile, storage_exception, subprocess_returncode, update_side_effect, expect_error_attr, expect_content", [ ("/tmp/tika.jar", True, None, 0, None, None, "abc"), # normal (None, True, None, 0, None, Exception, None), # tika jar not found diff --git a/modules/weko-deposit/weko_deposit/api.py b/modules/weko-deposit/weko_deposit/api.py index c632348a84..2260c94135 100644 --- a/modules/weko-deposit/weko_deposit/api.py +++ b/modules/weko-deposit/weko_deposit/api.py @@ -441,19 +441,17 @@ def update_request_mail_list(self, request_mail): body=body ) - def update_author_link_and_weko_link(self, link): + def update_author_link(self, author_link): """Update author_link info.""" # current_app.logger.error("author_link:{}".format(author_link)); self.get_es_index() pst = 'author_link' - pst2 = "weko_link" - body = {'doc': {pst: link.get('author_link'), - pst2: link.get('weko_link')}} + body = {'doc': {pst: author_link.get('author_link')}} return self.client.update( index=self.es_index, doc_type=self.es_doc_type, - id=str(link.get('id')), + id=str(author_link.get('id')), body=body ) @@ -1578,7 +1576,6 @@ def convert_item_metadata(self, index_obj, data=None): traceback.print_exc() raise except BaseException: - import traceback traceback.print_exc() abort(500, 'MAPPING_ERROR') @@ -1854,7 +1851,7 @@ def delete_es_index_attempt(self, pid): pass raise PIDResolveRESTError(description='This item has been deleted') - def update_author_link_and_weko_link(self, author_link, weko_link): + def update_author_link(self, author_link): """Summary line. I ndex author_link list. @@ -1867,14 +1864,12 @@ def update_author_link_and_weko_link(self, author_link, weko_link): """ item_id = self.id - if author_link and weko_link: - link_info = { + if author_link: + author_link_info = { "id": item_id, - "author_link": author_link, - "weko_link": weko_link - + "author_link": author_link } - self.indexer.update_author_link_and_weko_link(link_info) + self.indexer.update_author_link(author_link_info) def update_request_mail(self): """ diff --git a/modules/weko-deposit/weko_deposit/tasks.py b/modules/weko-deposit/weko_deposit/tasks.py index 2dc7e9d535..35b5ace595 100644 --- a/modules/weko-deposit/weko_deposit/tasks.py +++ b/modules/weko-deposit/weko_deposit/tasks.py @@ -259,11 +259,11 @@ def _process(data_size, data_from, process_counter, target, origin_pkid_list, ke update_es_authorinfo = [] for item in search['hits']['hits']: item_id = item['_source']['control_number'] - object_uuid, record_ids, author_link, weko_link = \ + object_uuid, record_ids, author_link = \ _update_author_data(item_id, record_ids, process_counter, target, origin_pkid_list, key_map, author_prefix, affiliation_id, force_change) if object_uuid: update_es_authorinfo.append({ - 'id': object_uuid, 'author_link': list(author_link), 'weko_link': weko_link + 'id': object_uuid, 'author_link': list(author_link) }) db.session.commit() # update record to ES @@ -300,10 +300,10 @@ def _process(data_size, data_from, process_counter, target, origin_pkid_list, ke while True: try: dep = WekoDeposit.get_record(d['id']) - dep.update_author_link_and_weko_link(d['author_link'], d["weko_link"]) + dep.update_author_link(d['author_link']) break except Exception as e: - current_app.logger.error("Failed to update record to ES. method:update_author_link_and_weko_link err:{}".format(e)) + current_app.logger.error("Failed to update record to ES. method:update_author_link err:{}".format(e)) current_app.logger.error("retrys:{} sleep{}".format(count, sleep_time)) if sleep_time > max_back_off_time: raise e @@ -325,19 +325,6 @@ def _update_author_data(item_id, record_ids, process_counter, target, origin_pki dep = WekoDeposit.get_record(pid.object_uuid) author_link = set() author_data = {} - current_weko_link = dep.get("weko_link", {}) - weko_link = copy.deepcopy(current_weko_link) - - # targetを用いてweko_linkを新しくする。 - if target: - # weko_idを取得する。 - target_pk_id = target["pk_id"] - author_id_info = target["authorIdInfo"] - for i in author_id_info: - # idTypeが1の場合、weko_idを取得し、weko_linkを更新する。 - if i.get('idType') == '1': - weko_link[target_pk_id] = i.get('authorId') - break for k, v in dep.items(): if isinstance(v, dict) \ and v.get('attribute_value_mlt') \ @@ -359,20 +346,12 @@ def _update_author_data(item_id, record_ids, process_counter, target, origin_pki change_flag = False for id in data.get('nameIdentifiers', []): if id.get('nameIdentifierScheme', '') == 'WEKO': - - # author_link.add(id['nameIdentifier']) - # 1.current_weko_linkの値にdataのweko_idが含まれているかを確認する。 - # 2.weko_idが含まれている場合、current_weko_linkでそのweko_id対応するpk_idを取得する。 - pk_ids = [k for k, v in current_weko_link.items() if v == id.get("nameIdentifier")] - if pk_ids: - pk_id = pk_ids[0] - author_link.add(pk_id) - # 3.origin_pkid_listにpk_idが含まれているかを確認する。 - if pk_id in origin_pkid_list: - # 4.含まれている場合change_flagをTrueにする。 - change_flag = True - record_ids.append(pid.object_uuid) - break + author_link.add(id['nameIdentifier']) + if id['nameIdentifier'] in origin_pkid_list: + origin_id = id['nameIdentifier'] + change_flag = True + record_ids.append(pid.object_uuid) + break else: continue if change_flag: @@ -384,24 +363,27 @@ def _update_author_data(item_id, record_ids, process_counter, target, origin_pki new_meta) author_data.update( {k: dep[k]['attribute_value_mlt']}) + if origin_id != target_id: + temp_list.append(origin_id) + author_link.remove(origin_id) + author_link.add(target_id) dep['author_link'] = list(author_link) - dep["weko_link"] = weko_link - + dep.update_item_by_task() obj = ItemsMetadata.get_record(pid.object_uuid) obj.update(author_data) obj.commit() process_counter[SUCCESS_LABEL].append({"record_id": item_id, "author_ids": temp_list, "message": ""}) - return pid.object_uuid, record_ids, author_link, weko_link + return pid.object_uuid, record_ids, author_link except PIDDoesNotExistError as pid_error: current_app.logger.error("PID {} does not exist.".format(item_id)) process_counter[FAIL_LABEL].append({"record_id": item_id, "author_ids": temp_list, "message": "PID {} does not exist.".format(item_id)}) - return None, set(), {}, {} + return None, set() except Exception as ex: current_app.logger.error(ex) process_counter[FAIL_LABEL].append({"record_id": item_id, "author_ids": temp_list, "message": str(ex)}) - return None, set(), {}, {} + return None, set() def _change_to_meta(target, author_prefix, affiliation_id, key_map, item_names_data, force_change=False): target_id = None diff --git a/modules/weko-items-ui/tests/data/temp_data.json b/modules/weko-items-ui/tests/data/temp_data.json index 230434645b..ba6c70685e 100644 --- a/modules/weko-items-ui/tests/data/temp_data.json +++ b/modules/weko-items-ui/tests/data/temp_data.json @@ -1 +1,662 @@ -{"metainfo": {"$schema": "1", "item_1617186331708": [{"subitem_1551255647225": "ja_conference paperITEM00000004(public_open_access_open_access_simple)", "subitem_1551255648112": "ja"}, {"subitem_1551255647225": "en_conference paperITEM00000004(public_open_access_simple)", "subitem_1551255648112": "en"}], "item_1617186385884": [{"subitem_1551255720400": "Alternative Title", "subitem_1551255721061": "en"}, {"subitem_1551255720400": "Alternative Title", "subitem_1551255721061": "ja"}], "item_1617186419668": [{"creatorAffiliations": [{"affiliationNameIdentifiers": [{"affiliationNameIdentifier": "0000000121691048", "affiliationNameIdentifierScheme": "ISNI", "affiliationNameIdentifierURI": "http://isni.org/isni/0000000121691048"}], "affiliationNames": [{"affiliationName": "University", "affiliationNameLang": "en"}]}], "creatorMails": [{"creatorMail": "wekosoftware@nii.ac.jp"}], "creatorNames": [{"creatorName": "u60c5u5831, u592au90ce", "creatorNameLang": "ja"}, {"creatorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", "creatorNameLang": "ja-Kana"}, {"creatorName": "Joho, Taro", "creatorNameLang": "en"}], "familyNames": [{"familyName": "u60c5u5831", "familyNameLang": "ja"}, {"familyName": "u30b8u30e7u30a6u30dbu30a6", "familyNameLang": "ja-Kana"}, {"familyName": "Joho", "familyNameLang": "en"}], "givenNames": [{"givenName": "u592au90ce", "givenNameLang": "ja"}, {"givenName": "u30bfu30edu30a6", "givenNameLang": "ja-Kana"}, {"givenName": "Taro", "givenNameLang": "en"}], "nameIdentifiers": [{"nameIdentifier": "4", "nameIdentifierScheme": "WEKO"}, {"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "ORCID", "nameIdentifierURI": "https://orcid.org/"}, {"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "CiNii", "nameIdentifierURI": "https://ci.nii.ac.jp/"}, {"nameIdentifier": "zzzzzzz", "nameIdentifierScheme": "KAKEN2", "nameIdentifierURI": "https://kaken.nii.ac.jp/"}], "creatorAlternatives": [{}]}, {"creatorMails": [{"creatorMail": "wekosoftware@nii.ac.jp"}], "creatorNames": [{"creatorName": "u60c5u5831, u592au90ce", "creatorNameLang": "ja"}, {"creatorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", "creatorNameLang": "ja-Kana"}, {"creatorName": "Joho, Taro", "creatorNameLang": "en"}], "familyNames": [{"familyName": "u60c5u5831", "familyNameLang": "ja"}, {"familyName": "u30b8u30e7u30a6u30dbu30a6", "familyNameLang": "ja-Kana"}, {"familyName": "Joho", "familyNameLang": "en"}], "givenNames": [{"givenName": "u592au90ce", "givenNameLang": "ja"}, {"givenName": "u30bfu30edu30a6", "givenNameLang": "ja-Kana"}, {"givenName": "Taro", "givenNameLang": "en"}], "nameIdentifiers": [{"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "ORCID", "nameIdentifierURI": "https://orcid.org/"}, {"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "CiNii", "nameIdentifierURI": "https://ci.nii.ac.jp/"}, {"nameIdentifier": "zzzzzzz", "nameIdentifierScheme": "KAKEN2", "nameIdentifierURI": "https://kaken.nii.ac.jp/"}], "creatorAlternatives": [{}], "creatorAffiliations": [{"affiliationNameIdentifiers": [{}], "affiliationNames": [{}]}]}, {"creatorMails": [{"creatorMail": "wekosoftware@nii.ac.jp"}], "creatorNames": [{"creatorName": "u60c5u5831, u592au90ce", "creatorNameLang": "ja"}, {"creatorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", "creatorNameLang": "ja-Kana"}, {"creatorName": "Joho, Taro", "creatorNameLang": "en"}], "familyNames": [{"familyName": "u60c5u5831", "familyNameLang": "ja"}, {"familyName": "u30b8u30e7u30a6u30dbu30a6", "familyNameLang": "ja-Kana"}, {"familyName": "Joho", "familyNameLang": "en"}], "givenNames": [{"givenName": "u592au90ce", "givenNameLang": "ja"}, {"givenName": "u30bfu30edu30a6", "givenNameLang": "ja-Kana"}, {"givenName": "Taro", "givenNameLang": "en"}], "nameIdentifiers": [{"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "ORCID", "nameIdentifierURI": "https://orcid.org/"}, {"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "CiNii", "nameIdentifierURI": "https://ci.nii.ac.jp/"}, {"nameIdentifier": "zzzzzzz", "nameIdentifierScheme": "KAKEN2", "nameIdentifierURI": "https://kaken.nii.ac.jp/"}], "creatorAlternatives": [{}], "creatorAffiliations": [{"affiliationNameIdentifiers": [{}], "affiliationNames": [{}]}]}], "item_1617186476635": {"subitem_1522299639480": "open access", "subitem_1600958577026": "http://purl.org/coar/access_right/c_abf2"}, "item_1617186499011": [{"subitem_1522650717957": "ja", "subitem_1522650727486": "http://localhost", "subitem_1522651041219": "Rights Information"}], "item_1617186609386": [{"subitem_1522299896455": "ja", "subitem_1522300014469": "Other", "subitem_1522300048512": "http://localhost/", "subitem_1523261968819": "Sibject1"}], "item_1617186626617": [{"subitem_description": "DescriptionnDescription
Description", "subitem_description_language": "en", "subitem_description_type": "Abstract"}, {"subitem_description": "u6982u8981nu6982u8981nu6982u8981nu6982u8981", "subitem_description_language": "ja", "subitem_description_type": "Abstract"}], "item_1617186643794": [{"subitem_1522300295150": "en", "subitem_1522300316516": "Publisher"}], "item_1617186660861": [{"subitem_1522300695726": "Available", "subitem_1522300722591": "2021-06-30"}], "item_1617186702042": [{"subitem_1551255818386": "jpn"}], "item_1617186783814": [{"subitem_identifier_type": "URI", "subitem_identifier_uri": "http://localhost"}], "item_1617186859717": [{"subitem_1522658018441": "en", "subitem_1522658031721": "Temporal"}], "item_1617186882738": [{"subitem_geolocation_place": [{"subitem_geolocation_place_text": "Japan"}]}], "item_1617186901218": [{"subitem_1522399143519": {"subitem_1522399281603": "ISNI", "subitem_1522399333375": "http://xxx"}, "subitem_1522399412622": [{"subitem_1522399416691": "en", "subitem_1522737543681": "Funder Name"}], "subitem_1522399571623": {"subitem_1522399585738": "Award URI", "subitem_1522399628911": "Award Number"}, "subitem_1522399651758": [{"subitem_1522721910626": "en", "subitem_1522721929892": "Award Title"}]}], "item_1617186920753": [{"subitem_1522646500366": "ISSN", "subitem_1522646572813": "xxxx-xxxx-xxxx"}], "item_1617186941041": [{"subitem_1522650068558": "en", "subitem_1522650091861": "Source Title"}], "item_1617186959569": {"subitem_1551256328147": "1"}, "item_1617186981471": {"subitem_1551256294723": "111"}, "item_1617186994930": {"subitem_1551256248092": "12"}, "item_1617187024783": {"subitem_1551256198917": "1"}, "item_1617187045071": {"subitem_1551256185532": "3"}, "item_1617187112279": [{"subitem_1551256126428": "Degree Name", "subitem_1551256129013": "en"}], "item_1617187136212": {"subitem_1551256096004": "2021-06-30"}, "item_1617187187528": [{"subitem_1599711633003": [{"subitem_1599711636923": "Conference Name", "subitem_1599711645590": "ja"}], "subitem_1599711655652": "1", "subitem_1599711660052": [{"subitem_1599711680082": "Sponsor", "subitem_1599711686511": "ja"}], "subitem_1599711699392": {"subitem_1599711704251": "2020/12/11", "subitem_1599711712451": "1", "subitem_1599711727603": "12", "subitem_1599711731891": "2000", "subitem_1599711735410": "1", "subitem_1599711739022": "12", "subitem_1599711743722": "2020", "subitem_1599711745532": "ja"}, "subitem_1599711758470": [{"subitem_1599711769260": "Conference Venue", "subitem_1599711775943": "ja"}], "subitem_1599711788485": [{"subitem_1599711798761": "Conference Place", "subitem_1599711803382": "ja"}], "subitem_1599711813532": "JPN"}], "item_1617258105262": {"resourcetype": "conference paper", "resourceuri": "http://purl.org/coar/resource_type/c_5794"}, "item_1617265215918": {"subitem_1522305645492": "AO", "subitem_1600292170262": "http://purl.org/coar/version/c_b1a7d7d4d402bcce"}, "item_1617349709064": [{"contributorMails": [{"contributorMail": "wekosoftware@nii.ac.jp"}], "contributorNames": [{"contributorName": "u60c5u5831, u592au90ce", "lang": "ja"}, {"contributorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", "lang": "ja-Kana"}, {"contributorName": "Joho, Taro", "lang": "en"}], "contributorType": "ContactPerson", "familyNames": [{"familyName": "u60c5u5831", "familyNameLang": "ja"}, {"familyName": "u30b8u30e7u30a6u30dbu30a6", "familyNameLang": "ja-Kana"}, {"familyName": "Joho", "familyNameLang": "en"}], "givenNames": [{"givenName": "u592au90ce", "givenNameLang": "ja"}, {"givenName": "u30bfu30edu30a6", "givenNameLang": "ja-Kana"}, {"givenName": "Taro", "givenNameLang": "en"}], "nameIdentifiers": [{"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "ORCID", "nameIdentifierURI": "https://orcid.org/"}, {"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "CiNii", "nameIdentifierURI": "https://ci.nii.ac.jp/"}, {"nameIdentifier": "xxxxxxx", "nameIdentifierScheme": "KAKEN2", "nameIdentifierURI": "https://kaken.nii.ac.jp/"}], "contributorAlternatives": [{}], "contributorAffiliations": [{"contributorAffiliationNameIdentifiers": [{}], "contributorAffiliationNames": [{}]}]}], "item_1617349808926": {"subitem_1523263171732": "Version"}, "item_1617351524846": {"subitem_1523260933860": "Unknown"}, "item_1617353299429": [{"subitem_1522306207484": "isVersionOf", "subitem_1522306287251": {"subitem_1522306382014": "arXiv", "subitem_1522306436033": "xxxxx"}, "subitem_1523320863692": [{"subitem_1523320867455": "en", "subitem_1523320909613": "Related Title"}]}], "item_1617605131499": [{"accessrole": "open_access", "date": [{"dateType": "Available", "dateValue": "2021-07-12"}], "displaytype": "simple", "filename": "1KB.pdf", "filesize": [{"value": "1 KB"}], "format": "text/plain", "mimetype": "application/pdf", "url": {"url": "https://weko3.example.org/record/10/files/1KB.pdf"}, "version_id": "6842535e-8805-4d6b-837e-60f4476c87b8", "fileDate": [{}], "provide": [{}]}], "item_1617610673286": [{"nameIdentifiers": [{"nameIdentifier": "xxxxxx", "nameIdentifierScheme": "ORCID", "nameIdentifierURI": "https://orcid.org/"}], "rightHolderNames": [{"rightHolderLanguage": "ja", "rightHolderName": "Right Holder Name"}]}], "item_1617620223087": [{"subitem_1565671149650": "ja", "subitem_1565671169640": "Banner Headline", "subitem_1565671178623": "Subheading"}, {"subitem_1565671149650": "en", "subitem_1565671169640": "Banner Headline", "subitem_1565671178623": "Subheding"}], "item_1617944105607": [{"subitem_1551256015892": [{"subitem_1551256027296": "xxxxxx", "subitem_1551256029891": "kakenhi"}], "subitem_1551256037922": [{"subitem_1551256042287": "Degree Grantor Name", "subitem_1551256047619": "en"}]}], "owner": "1", "pubdate": "2021-08-06", "title": "ja_conference paperITEM00000004(public_open_access_open_access_simple)", "weko_shared_ids": [], "item_1617187056579": {"bibliographic_titles": [{}]}, "shared_user_ids": []}, "files": [{"checksum": "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef", "completed": true, "displaytype": "simple", "filename": "1KB.pdf", "is_show": false, "is_thumbnail": false, "key": "1KB.pdf", "licensetype": null, "links": {"self": "/api/files/8a6828be-7242-4510-bb80-de170644145c/1KB.pdf?versionId=6842535e-8805-4d6b-837e-60f4476c87b8"}, "mimetype": "application/pdf", "progress": 100, "size": 1024, "version_id": "6842535e-8805-4d6b-837e-60f4476c87b8"}], "endpoints": {"initialization": "/api/deposits/redirect/1.0"}, "weko_link": {"2": "5678"}} \ No newline at end of file +{ + "metainfo": { + "$schema": "1", + "item_1617186331708": [ + { + "subitem_1551255647225": "ja_conference paperITEM00000004(public_open_access_open_access_simple)", + "subitem_1551255648112": "ja" + }, + { + "subitem_1551255647225": "en_conference paperITEM00000004(public_open_access_simple)", + "subitem_1551255648112": "en" + } + ], + "item_1617186385884": [ + { + "subitem_1551255720400": "Alternative Title", + "subitem_1551255721061": "en" + }, + { + "subitem_1551255720400": "Alternative Title", + "subitem_1551255721061": "ja" + } + ], + "item_1617186419668": [ + { + "creatorAffiliations": [ + { + "affiliationNameIdentifiers": [ + { + "affiliationNameIdentifier": "0000000121691048", + "affiliationNameIdentifierScheme": "ISNI", + "affiliationNameIdentifierURI": "http://isni.org/isni/0000000121691048" + } + ], + "affiliationNames": [ + { + "affiliationName": "University", + "affiliationNameLang": "en" + } + ] + } + ], + "creatorMails": [ + { + "creatorMail": "wekosoftware@nii.ac.jp" + } + ], + "creatorNames": [ + { + "creatorName": "u60c5u5831, u592au90ce", + "creatorNameLang": "ja" + }, + { + "creatorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", + "creatorNameLang": "ja-Kana" + }, + { + "creatorName": "Joho, Taro", + "creatorNameLang": "en" + } + ], + "familyNames": [ + { + "familyName": "u60c5u5831", + "familyNameLang": "ja" + }, + { + "familyName": "u30b8u30e7u30a6u30dbu30a6", + "familyNameLang": "ja-Kana" + }, + { + "familyName": "Joho", + "familyNameLang": "en" + } + ], + "givenNames": [ + { + "givenName": "u592au90ce", + "givenNameLang": "ja" + }, + { + "givenName": "u30bfu30edu30a6", + "givenNameLang": "ja-Kana" + }, + { + "givenName": "Taro", + "givenNameLang": "en" + } + ], + "nameIdentifiers": [ + { + "nameIdentifier": "4", + "nameIdentifierScheme": "WEKO" + }, + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "ORCID", + "nameIdentifierURI": "https://orcid.org/" + }, + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "CiNii", + "nameIdentifierURI": "https://ci.nii.ac.jp/" + }, + { + "nameIdentifier": "zzzzzzz", + "nameIdentifierScheme": "KAKEN2", + "nameIdentifierURI": "https://kaken.nii.ac.jp/" + } + ], + "creatorAlternatives": [ + {} + ] + }, + { + "creatorMails": [ + { + "creatorMail": "wekosoftware@nii.ac.jp" + } + ], + "creatorNames": [ + { + "creatorName": "u60c5u5831, u592au90ce", + "creatorNameLang": "ja" + }, + { + "creatorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", + "creatorNameLang": "ja-Kana" + }, + { + "creatorName": "Joho, Taro", + "creatorNameLang": "en" + } + ], + "familyNames": [ + { + "familyName": "u60c5u5831", + "familyNameLang": "ja" + }, + { + "familyName": "u30b8u30e7u30a6u30dbu30a6", + "familyNameLang": "ja-Kana" + }, + { + "familyName": "Joho", + "familyNameLang": "en" + } + ], + "givenNames": [ + { + "givenName": "u592au90ce", + "givenNameLang": "ja" + }, + { + "givenName": "u30bfu30edu30a6", + "givenNameLang": "ja-Kana" + }, + { + "givenName": "Taro", + "givenNameLang": "en" + } + ], + "nameIdentifiers": [ + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "ORCID", + "nameIdentifierURI": "https://orcid.org/" + }, + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "CiNii", + "nameIdentifierURI": "https://ci.nii.ac.jp/" + }, + { + "nameIdentifier": "zzzzzzz", + "nameIdentifierScheme": "KAKEN2", + "nameIdentifierURI": "https://kaken.nii.ac.jp/" + } + ], + "creatorAlternatives": [ + {} + ], + "creatorAffiliations": [ + { + "affiliationNameIdentifiers": [ + {} + ], + "affiliationNames": [ + {} + ] + } + ] + }, + { + "creatorMails": [ + { + "creatorMail": "wekosoftware@nii.ac.jp" + } + ], + "creatorNames": [ + { + "creatorName": "u60c5u5831, u592au90ce", + "creatorNameLang": "ja" + }, + { + "creatorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", + "creatorNameLang": "ja-Kana" + }, + { + "creatorName": "Joho, Taro", + "creatorNameLang": "en" + } + ], + "familyNames": [ + { + "familyName": "u60c5u5831", + "familyNameLang": "ja" + }, + { + "familyName": "u30b8u30e7u30a6u30dbu30a6", + "familyNameLang": "ja-Kana" + }, + { + "familyName": "Joho", + "familyNameLang": "en" + } + ], + "givenNames": [ + { + "givenName": "u592au90ce", + "givenNameLang": "ja" + }, + { + "givenName": "u30bfu30edu30a6", + "givenNameLang": "ja-Kana" + }, + { + "givenName": "Taro", + "givenNameLang": "en" + } + ], + "nameIdentifiers": [ + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "ORCID", + "nameIdentifierURI": "https://orcid.org/" + }, + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "CiNii", + "nameIdentifierURI": "https://ci.nii.ac.jp/" + }, + { + "nameIdentifier": "zzzzzzz", + "nameIdentifierScheme": "KAKEN2", + "nameIdentifierURI": "https://kaken.nii.ac.jp/" + } + ], + "creatorAlternatives": [ + {} + ], + "creatorAffiliations": [ + { + "affiliationNameIdentifiers": [ + {} + ], + "affiliationNames": [ + {} + ] + } + ] + } + ], + "item_1617186476635": { + "subitem_1522299639480": "open access", + "subitem_1600958577026": "http://purl.org/coar/access_right/c_abf2" + }, + "item_1617186499011": [ + { + "subitem_1522650717957": "ja", + "subitem_1522650727486": "http://localhost", + "subitem_1522651041219": "Rights Information" + } + ], + "item_1617186609386": [ + { + "subitem_1522299896455": "ja", + "subitem_1522300014469": "Other", + "subitem_1522300048512": "http://localhost/", + "subitem_1523261968819": "Sibject1" + } + ], + "item_1617186626617": [ + { + "subitem_description": "DescriptionnDescription
Description", + "subitem_description_language": "en", + "subitem_description_type": "Abstract" + }, + { + "subitem_description": "u6982u8981nu6982u8981nu6982u8981nu6982u8981", + "subitem_description_language": "ja", + "subitem_description_type": "Abstract" + } + ], + "item_1617186643794": [ + { + "subitem_1522300295150": "en", + "subitem_1522300316516": "Publisher" + } + ], + "item_1617186660861": [ + { + "subitem_1522300695726": "Available", + "subitem_1522300722591": "2021-06-30" + } + ], + "item_1617186702042": [ + { + "subitem_1551255818386": "jpn" + } + ], + "item_1617186783814": [ + { + "subitem_identifier_type": "URI", + "subitem_identifier_uri": "http://localhost" + } + ], + "item_1617186859717": [ + { + "subitem_1522658018441": "en", + "subitem_1522658031721": "Temporal" + } + ], + "item_1617186882738": [ + { + "subitem_geolocation_place": [ + { + "subitem_geolocation_place_text": "Japan" + } + ] + } + ], + "item_1617186901218": [ + { + "subitem_1522399143519": { + "subitem_1522399281603": "ISNI", + "subitem_1522399333375": "http://xxx" + }, + "subitem_1522399412622": [ + { + "subitem_1522399416691": "en", + "subitem_1522737543681": "Funder Name" + } + ], + "subitem_1522399571623": { + "subitem_1522399585738": "Award URI", + "subitem_1522399628911": "Award Number" + }, + "subitem_1522399651758": [ + { + "subitem_1522721910626": "en", + "subitem_1522721929892": "Award Title" + } + ] + } + ], + "item_1617186920753": [ + { + "subitem_1522646500366": "ISSN", + "subitem_1522646572813": "xxxx-xxxx-xxxx" + } + ], + "item_1617186941041": [ + { + "subitem_1522650068558": "en", + "subitem_1522650091861": "Source Title" + } + ], + "item_1617186959569": { + "subitem_1551256328147": "1" + }, + "item_1617186981471": { + "subitem_1551256294723": "111" + }, + "item_1617186994930": { + "subitem_1551256248092": "12" + }, + "item_1617187024783": { + "subitem_1551256198917": "1" + }, + "item_1617187045071": { + "subitem_1551256185532": "3" + }, + "item_1617187112279": [ + { + "subitem_1551256126428": "Degree Name", + "subitem_1551256129013": "en" + } + ], + "item_1617187136212": { + "subitem_1551256096004": "2021-06-30" + }, + "item_1617187187528": [ + { + "subitem_1599711633003": [ + { + "subitem_1599711636923": "Conference Name", + "subitem_1599711645590": "ja" + } + ], + "subitem_1599711655652": "1", + "subitem_1599711660052": [ + { + "subitem_1599711680082": "Sponsor", + "subitem_1599711686511": "ja" + } + ], + "subitem_1599711699392": { + "subitem_1599711704251": "2020/12/11", + "subitem_1599711712451": "1", + "subitem_1599711727603": "12", + "subitem_1599711731891": "2000", + "subitem_1599711735410": "1", + "subitem_1599711739022": "12", + "subitem_1599711743722": "2020", + "subitem_1599711745532": "ja" + }, + "subitem_1599711758470": [ + { + "subitem_1599711769260": "Conference Venue", + "subitem_1599711775943": "ja" + } + ], + "subitem_1599711788485": [ + { + "subitem_1599711798761": "Conference Place", + "subitem_1599711803382": "ja" + } + ], + "subitem_1599711813532": "JPN" + } + ], + "item_1617258105262": { + "resourcetype": "conference paper", + "resourceuri": "http://purl.org/coar/resource_type/c_5794" + }, + "item_1617265215918": { + "subitem_1522305645492": "AO", + "subitem_1600292170262": "http://purl.org/coar/version/c_b1a7d7d4d402bcce" + }, + "item_1617349709064": [ + { + "contributorMails": [ + { + "contributorMail": "wekosoftware@nii.ac.jp" + } + ], + "contributorNames": [ + { + "contributorName": "u60c5u5831, u592au90ce", + "lang": "ja" + }, + { + "contributorName": "u30b8u30e7u30a6u30dbu30a6, u30bfu30edu30a6", + "lang": "ja-Kana" + }, + { + "contributorName": "Joho, Taro", + "lang": "en" + } + ], + "contributorType": "ContactPerson", + "familyNames": [ + { + "familyName": "u60c5u5831", + "familyNameLang": "ja" + }, + { + "familyName": "u30b8u30e7u30a6u30dbu30a6", + "familyNameLang": "ja-Kana" + }, + { + "familyName": "Joho", + "familyNameLang": "en" + } + ], + "givenNames": [ + { + "givenName": "u592au90ce", + "givenNameLang": "ja" + }, + { + "givenName": "u30bfu30edu30a6", + "givenNameLang": "ja-Kana" + }, + { + "givenName": "Taro", + "givenNameLang": "en" + } + ], + "nameIdentifiers": [ + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "ORCID", + "nameIdentifierURI": "https://orcid.org/" + }, + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "CiNii", + "nameIdentifierURI": "https://ci.nii.ac.jp/" + }, + { + "nameIdentifier": "xxxxxxx", + "nameIdentifierScheme": "KAKEN2", + "nameIdentifierURI": "https://kaken.nii.ac.jp/" + } + ], + "contributorAlternatives": [ + {} + ], + "contributorAffiliations": [ + { + "contributorAffiliationNameIdentifiers": [ + {} + ], + "contributorAffiliationNames": [ + {} + ] + } + ] + } + ], + "item_1617349808926": { + "subitem_1523263171732": "Version" + }, + "item_1617351524846": { + "subitem_1523260933860": "Unknown" + }, + "item_1617353299429": [ + { + "subitem_1522306207484": "isVersionOf", + "subitem_1522306287251": { + "subitem_1522306382014": "arXiv", + "subitem_1522306436033": "xxxxx" + }, + "subitem_1523320863692": [ + { + "subitem_1523320867455": "en", + "subitem_1523320909613": "Related Title" + } + ] + } + ], + "item_1617605131499": [ + { + "accessrole": "open_access", + "date": [ + { + "dateType": "Available", + "dateValue": "2021-07-12" + } + ], + "displaytype": "simple", + "filename": "1KB.pdf", + "filesize": [ + { + "value": "1 KB" + } + ], + "format": "text/plain", + "mimetype": "application/pdf", + "url": { + "url": "https://weko3.example.org/record/10/files/1KB.pdf" + }, + "version_id": "6842535e-8805-4d6b-837e-60f4476c87b8", + "fileDate": [ + {} + ], + "provide": [ + {} + ] + } + ], + "item_1617610673286": [ + { + "nameIdentifiers": [ + { + "nameIdentifier": "xxxxxx", + "nameIdentifierScheme": "ORCID", + "nameIdentifierURI": "https://orcid.org/" + } + ], + "rightHolderNames": [ + { + "rightHolderLanguage": "ja", + "rightHolderName": "Right Holder Name" + } + ] + } + ], + "item_1617620223087": [ + { + "subitem_1565671149650": "ja", + "subitem_1565671169640": "Banner Headline", + "subitem_1565671178623": "Subheading" + }, + { + "subitem_1565671149650": "en", + "subitem_1565671169640": "Banner Headline", + "subitem_1565671178623": "Subheding" + } + ], + "item_1617944105607": [ + { + "subitem_1551256015892": [ + { + "subitem_1551256027296": "xxxxxx", + "subitem_1551256029891": "kakenhi" + } + ], + "subitem_1551256037922": [ + { + "subitem_1551256042287": "Degree Grantor Name", + "subitem_1551256047619": "en" + } + ] + } + ], + "owner": "1", + "pubdate": "2021-08-06", + "title": "ja_conference paperITEM00000004(public_open_access_open_access_simple)", + "weko_shared_ids": [], + "item_1617187056579": { + "bibliographic_titles": [ + {} + ] + }, + "shared_user_ids": [] + }, + "files": [ + { + "checksum": "sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef", + "completed": true, + "displaytype": "simple", + "filename": "1KB.pdf", + "is_show": false, + "is_thumbnail": false, + "key": "1KB.pdf", + "licensetype": null, + "links": { + "self": "/api/files/8a6828be-7242-4510-bb80-de170644145c/1KB.pdf?versionId=6842535e-8805-4d6b-837e-60f4476c87b8" + }, + "mimetype": "application/pdf", + "progress": 100, + "size": 1024, + "version_id": "6842535e-8805-4d6b-837e-60f4476c87b8" + } + ], + "endpoints": { + "initialization": "/api/deposits/redirect/1.0" + } +} \ No newline at end of file diff --git a/modules/weko-items-ui/tests/test_api.py b/modules/weko-items-ui/tests/test_api.py index 919cc7049e..6a2a83ef24 100644 --- a/modules/weko-items-ui/tests/test_api.py +++ b/modules/weko-items-ui/tests/test_api.py @@ -117,7 +117,6 @@ def test_view2(): with open('tests/data/temp_data.json', 'r') as f: tmp = json.load(f) - tmp.pop("weko_link") temp_data = json.dumps(tmp) with app.test_request_context(): diff --git a/modules/weko-items-ui/tests/test_utils.py b/modules/weko-items-ui/tests/test_utils.py index 75cf0fff4e..5eb4f865ed 100644 --- a/modules/weko-items-ui/tests/test_utils.py +++ b/modules/weko-items-ui/tests/test_utils.py @@ -127,7 +127,6 @@ write_bibtex_files, write_files, get_file_download_data, - get_weko_link, check_duplicate, create_item_deleted_data, create_direct_registered_data, @@ -11264,85 +11263,6 @@ def test_get_file_download_data(app, client, records): with pytest.raises(AvailableFilesNotFoundRESTError): get_file_download_data(record.id, record, filenames) -# def get_weko_link(metadata): -# .tox/c1/bin/pytest --cov=weko_items_ui tests/test_utils.py::test_get_weko_link -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-items-ui/.tox/c1/tmp -def test_get_weko_link(app, client, users, db_records, mocker): - mocker.patch("weko_items_ui.utils.WekoAuthors.get_pk_id_by_weko_id",side_effect=["2","0"]) - res = get_weko_link( - { - "metainfo": { - "item_30002_creator2": [ - { - "nameIdentifiers": [ - { - "nameIdentifier": "8", - "nameIdentifierScheme": "WEKO", - "nameIdentifierURI": "", - } - ] - } - ], - "item_30003_creator2": [ - { - "nameIdentifiers": [ - { - "nameIdentifier": "8", - "nameIdentifierScheme": "WEKO", - "nameIdentifierURI": "", - } - ] - } - ], - "item_30004_creator2": [ - { - "nameIdentifiers": [ - { - "nameIdentifier": "12", - "nameIdentifierScheme": "WEKO", - "nameIdentifierURI": "", - } - ] - } - ] - }, - "files": [], - "endpoints": {"initialization": "/api/deposits/items"}, - } - ) - assert res == {"2": "8"} - res = get_weko_link( - { - "metainfo": { - "item_30002_creator2": [ - { - "nameIdentifiers": [ - { - "nameIdentifier": "8", - "nameIdentifierScheme": "OTHER", - "nameIdentifierURI": "", - } - ] - } - ] - }, - "files": [], - "endpoints": {"initialization": "/api/deposits/items"}, - } - ) - assert res == {} - - # not isinstance(x, list) is true - res = get_weko_link({"metainfo": {"field1": "string_value"}}) - assert res == {} - - # not isinstance(y, dict) is true - res = get_weko_link({"metainfo": {"field1": ["string_value"]}}) - assert res == {} - - # not key == "nameIdentifiers" is true - res = get_weko_link({"metainfo": {"field1": [{"field2": {}}]}}) - assert res == {} - # .tox/c1/bin/pytest --cov=weko_items_ui tests/test_utils.py::test_get_duplicate_fields -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-items-ui/.tox/c1/tmp def test_get_duplicate_fields(): data_author_list = [ diff --git a/modules/weko-items-ui/weko_items_ui/api.py b/modules/weko-items-ui/weko_items_ui/api.py index 65692401bb..d6f5da2a70 100644 --- a/modules/weko-items-ui/weko_items_ui/api.py +++ b/modules/weko-items-ui/weko_items_ui/api.py @@ -16,7 +16,6 @@ """API for item login.""" from flask import current_app, json, session, url_for -from weko_authors.utils import update_data_for_weko_link from weko_records.api import ItemTypes from weko_records.utils import find_items from weko_workflow.api import WorkActivity @@ -78,9 +77,6 @@ def item_login(item_type_id: int = 0): ] if "endpoints" in item_json: endpoints = item_json.get("endpoints") - if "weko_link" in item_json: - weko_link = item_json.get("weko_link") - update_data_for_weko_link(item_json.get("metainfo"), weko_link) if "cris_linkage" in item_json: cris_linkage = item_json.get("cris_linkage") diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py index a5fba0d75e..c39af5da4c 100644 --- a/modules/weko-items-ui/weko_items_ui/utils.py +++ b/modules/weko-items-ui/weko_items_ui/utils.py @@ -3383,56 +3383,6 @@ def get_data_authors_affiliation_settings(): current_app.logger.error(e) return None -def get_weko_link(metadata): - """ - メタデータからweko_idを取得し、weko_idに対応するpk_idと一緒に - weko_linkを作成します。 - args - metadata: dict - 例:{ - "metainfo": { - "item_30002_creator2": [ - { - "nameIdentifiers": [ - { - "nameIdentifier": "8", - "nameIdentifierScheme": "WEKO", - "nameIdentifierURI": "" - } - ] - } - ] - }, - "files": [], - "endpoints": { - "initialization": "/api/deposits/items" - } - } - return - weko_link: dict - 例:{"2": "10002"} - """ - weko_link = {} - weko_id_list=[] - for x in metadata["metainfo"].values(): - if not isinstance(x, list): - continue - for y in x: - if not isinstance(y, dict): - continue - for key, value in y.items(): - if not key == "nameIdentifiers": - continue - for z in value: - if z.get("nameIdentifierScheme","") == "WEKO": - if z.get("nameIdentifier","") not in weko_id_list: - weko_id_list.append(z.get("nameIdentifier")) - weko_link = {} - for weko_id in weko_id_list: - pk_id = WekoAuthors.get_pk_id_by_weko_id(weko_id) - if int(pk_id) > 0: - weko_link[pk_id] = weko_id - return weko_link def hide_meta_data_for_role(record): """ diff --git a/modules/weko-items-ui/weko_items_ui/views.py b/modules/weko-items-ui/weko_items_ui/views.py index b829efb775..ce7ba698bb 100644 --- a/modules/weko-items-ui/weko_items_ui/views.py +++ b/modules/weko-items-ui/weko_items_ui/views.py @@ -81,8 +81,7 @@ translate_schema_form, translate_validation_message, update_index_tree_for_record, update_json_schema_by_activity_id, update_schema_form_by_activity_id, update_sub_items_by_user_role, validate_form_input_data, validate_user, - validate_user_mail_and_index, get_weko_link, is_duplicate_record, - lock_item_will_be_edit + validate_user_mail_and_index, is_duplicate_record, lock_item_will_be_edit ) from .config import WEKO_ITEMS_UI_FORM_TEMPLATE,WEKO_ITEMS_UI_ERROR_TEMPLATE from weko_theme.config import WEKO_THEME_DEFAULT_COMMUNITY @@ -357,9 +356,6 @@ def iframe_save_model(): if activity_id: sanitize_input_data(data) save_title(activity_id, data) - # メタデータからweko_linkを作成します。 - weko_link = get_weko_link(data) - data["weko_link"] = weko_link work_activity.upt_activity_metadata(activity_id, json.dumps(data)) db.session.commit() except Exception as ex: diff --git a/modules/weko-records/tests/conftest.py b/modules/weko-records/tests/conftest.py index edbec19499..edc2a9dec3 100644 --- a/modules/weko-records/tests/conftest.py +++ b/modules/weko-records/tests/conftest.py @@ -418,34 +418,38 @@ def db_register(app, db, users, records, action_data, item_type): \"givenNameLang\": \"ja\"}], \"creatorAlternatives\": [{}], \"creatorAffiliations\":\ [{\"affiliationNames\": [], \"affiliationNameIdentifiers\": []}], \"creatorMails\": [{}]},\ {\"nameIdentifiers\": [{\"nameIdentifierScheme\": \"WEKO\", \"nameIdentifier\": \"111\",\ - \"nameIdentifierURI\": \"\"}, {\"nameIdentifierScheme\": \"ORCID\", \"nameIdentifier\": \"111\", \"nameIdentifierURI\": \"https://orcid.org/111\"}], \"creatorNames\": [{}], \"familyNames\": [{}], \"givenNames\": [{}], \"creatorAlternatives\": [{}], \"creatorAffiliations\": [{\"affiliationNameIdentifiers\": [{}], \"affiliationNames\": [{}]}], \"creatorMails\": [{}]}], \"item_30002_contributor3\": [{\"nameIdentifiers\": [{}], \"contributorNames\": [{}], \"familyNames\": [{}], \"givenNames\": [{}], \"contributorAlternatives\": [{}], \"contributorAffiliations\": [{\"contributorAffiliationNameIdentifiers\": [{}], \"contributorAffiliationNames\": [{}]}], \"contributorMails\": [{}]}], \"item_30002_rights6\": [{}], \"item_30002_rights_holder7\": [{\"nameIdentifiers\": [{}], \"rightHolderNames\": [{}]}], \"item_30002_subject8\": [{}], \"item_30002_description9\": [{}], \"item_30002_publisher10\": [{}], \"item_30002_date11\": [{}], \"item_30002_language12\": [{}], \"item_30002_identifier16\": [{}], \"item_30002_relation18\": [{\"subitem_relation_name\": [{}]}], \"item_30002_temporal19\": [{}], \"item_30002_geolocation20\": [{\"subitem_geolocation_place\": [{}]}], \"item_30002_funding_reference21\": [{\"subitem_funder_names\": [{}], \"subitem_funding_streams\": [{}], \"subitem_award_titles\": [{}]}], \"item_30002_source_identifier22\": [{}], \"item_30002_source_title23\": [{}], \"item_30002_degree_name31\": [{}], \"item_30002_degree_grantor33\": [{\"subitem_degreegrantor_identifier\": [{}], \"subitem_degreegrantor\": [{}]}], \"item_30002_conference34\": [{\"subitem_conference_names\": [{}], \"subitem_conference_sponsors\": [{}], \"subitem_conference_venues\": [{}], \"subitem_conference_places\": [{}]}], \"item_30002_file35\": [{\"filesize\": [{}], \"fileDate\": [{}]}], \"item_30002_heading36\": [{}], \"item_30002_holding_agent_name37\": [{\"holding_agent_names\": [{}]}], \"item_30002_original_language43\": [{}], \"item_30002_dcterms_extent46\": [{\"publisher_names\": [{}], \"publisher_descriptions\": [{}], \"publisher_locations\": [{}], \"publication_places\": [{}]}], \"item_30002_publisher_information45\": [{}], \"item_30002_catalog39\": [{\"catalog_contributors\": [{\"contributor_names\": [{}]}], \"catalog_identifiers\": [{}], \"catalog_titles\": [{}], \"catalog_subjects\": [{}], \"catalog_licenses\": [{}], \"catalog_rights\": [{}], \"catalog_access_rights\": [{}]}], \"item_30002_jpcoar_format40\": [{}], \"item_30002_volume_title44\": [{}], \"item_30002_edition41\": [{}], \"item_30002_dcterms_date38\": [{}], \"item_30002_bibliographic_information29\": {\"bibliographic_titles\": [{}]}, \"item_30002_resource_type13\": {\"resourcetype\": \"data paper\", \"resourceuri\": \"http://purl.org/coar/resource_type/c_beb9\"}, \"shared_user_ids\": []}, \"files\": [], \"endpoints\": {\"initialization\": \"/api/deposits/items\"}, \ - \"weko_link\": {\"1\": \"11\"}}" - ) - activity_without_weko_link = Activity(activity_id='2',workflow_id=1, flow_id=flow_define.id, - item_id=_pid2, - action_id=1, activity_login_user=1, - activity_update_user=1, - activity_start=datetime.strptime('2022/04/14 3:01:53.931', '%Y/%m/%d %H:%M:%S.%f'), - activity_community_id=3, - activity_confirm_term_of_use=True, - title='test', shared_user_ids=[], extra_info={}, - action_order=1, - temp_data="{\"metainfo\": {\"pubdate\": \"2025-03-13\", \"item_30002_title0\": \ - [{\"subitem_title\": \"test\", \"subitem_title_language\": \"ja\"}], \ - \"item_30002_alternative_title1\": [{}], \"item_30002_creator2\": \ - [{\"nameIdentifiers\": [{\"nameIdentifier\": \"11\", \"nameIdentifierScheme\":\ - \"WEKO\", \"nameIdentifierURI\": \"\"}], \"creatorNames\": [{\"creatorName\": \ - \"test, jiro\", \"creatorNameLang\": \"ja\"}], \"familyNames\": [{\"familyName\":\ - \"test\", \"familyNameLang\": \"ja\"}], \"givenNames\": [{\"givenName\": \"jiro\", \ - \"givenNameLang\": \"ja\"}], \"creatorAlternatives\": [{}], \"creatorAffiliations\":\ - [{\"affiliationNames\": [], \"affiliationNameIdentifiers\": []}], \"creatorMails\": [{}]},\ - {\"nameIdentifiers\": [{\"nameIdentifierScheme\": \"WEKO\", \"nameIdentifier\": \"111\",\ - \"nameIdentifierURI\": \"\"}, {\"nameIdentifierScheme\": \"ORCID\", \"nameIdentifier\": \"111\", \"nameIdentifierURI\": \"https://orcid.org/111\"}], \"creatorNames\": [{}], \"familyNames\": [{}], \"givenNames\": [{}], \"creatorAlternatives\": [{}], \"creatorAffiliations\": [{\"affiliationNameIdentifiers\": [{}], \"affiliationNames\": [{}]}], \"creatorMails\": [{}]}], \"item_30002_contributor3\": [{\"nameIdentifiers\": [{}], \"contributorNames\": [{}], \"familyNames\": [{}], \"givenNames\": [{}], \"contributorAlternatives\": [{}], \"contributorAffiliations\": [{\"contributorAffiliationNameIdentifiers\": [{}], \"contributorAffiliationNames\": [{}]}], \"contributorMails\": [{}]}], \"item_30002_rights6\": [{}], \"item_30002_rights_holder7\": [{\"nameIdentifiers\": [{}], \"rightHolderNames\": [{}]}], \"item_30002_subject8\": [{}], \"item_30002_description9\": [{}], \"item_30002_publisher10\": [{}], \"item_30002_date11\": [{}], \"item_30002_language12\": [{}], \"item_30002_identifier16\": [{}], \"item_30002_relation18\": [{\"subitem_relation_name\": [{}]}], \"item_30002_temporal19\": [{}], \"item_30002_geolocation20\": [{\"subitem_geolocation_place\": [{}]}], \"item_30002_funding_reference21\": [{\"subitem_funder_names\": [{}], \"subitem_funding_streams\": [{}], \"subitem_award_titles\": [{}]}], \"item_30002_source_identifier22\": [{}], \"item_30002_source_title23\": [{}], \"item_30002_degree_name31\": [{}], \"item_30002_degree_grantor33\": [{\"subitem_degreegrantor_identifier\": [{}], \"subitem_degreegrantor\": [{}]}], \"item_30002_conference34\": [{\"subitem_conference_names\": [{}], \"subitem_conference_sponsors\": [{}], \"subitem_conference_venues\": [{}], \"subitem_conference_places\": [{}]}], \"item_30002_file35\": [{\"filesize\": [{}], \"fileDate\": [{}]}], \"item_30002_heading36\": [{}], \"item_30002_holding_agent_name37\": [{\"holding_agent_names\": [{}]}], \"item_30002_original_language43\": [{}], \"item_30002_dcterms_extent46\": [{\"publisher_names\": [{}], \"publisher_descriptions\": [{}], \"publisher_locations\": [{}], \"publication_places\": [{}]}], \"item_30002_publisher_information45\": [{}], \"item_30002_catalog39\": [{\"catalog_contributors\": [{\"contributor_names\": [{}]}], \"catalog_identifiers\": [{}], \"catalog_titles\": [{}], \"catalog_subjects\": [{}], \"catalog_licenses\": [{}], \"catalog_rights\": [{}], \"catalog_access_rights\": [{}]}], \"item_30002_jpcoar_format40\": [{}], \"item_30002_volume_title44\": [{}], \"item_30002_edition41\": [{}], \"item_30002_dcterms_date38\": [{}], \"item_30002_bibliographic_information29\": {\"bibliographic_titles\": [{}]}, \"item_30002_resource_type13\": {\"resourcetype\": \"data paper\", \"resourceuri\": \"http://purl.org/coar/resource_type/c_beb9\"}, \"shared_user_ids\": []}, \"files\": [], \"endpoints\": {\"initialization\": \"/api/deposits/items\"}}" + \"nameIdentifierURI\": \"\"}, {\"nameIdentifierScheme\": \"ORCID\", \"nameIdentifier\": \"111\",\ + \"nameIdentifierURI\": \"https://orcid.org/111\"}], \"creatorNames\": [{}], \"familyNames\": [{}],\ + \"givenNames\": [{}], \"creatorAlternatives\": [{}], \"creatorAffiliations\": [{\"affiliationNameIdentifiers\": [{}],\ + \"affiliationNames\": [{}]}], \"creatorMails\": [{}]}], \"item_30002_contributor3\": [{\"nameIdentifiers\": [{}],\ + \"contributorNames\": [{}], \"familyNames\": [{}], \"givenNames\": [{}], \"contributorAlternatives\": [{}],\ + \"contributorAffiliations\": [{\"contributorAffiliationNameIdentifiers\": [{}],\ + \"contributorAffiliationNames\": [{}]}], \"contributorMails\": [{}]}], \"item_30002_rights6\": [{}],\ + \"item_30002_rights_holder7\": [{\"nameIdentifiers\": [{}], \"rightHolderNames\": [{}]}], \"item_30002_subject8\": [{}],\ + \"item_30002_description9\": [{}], \"item_30002_publisher10\": [{}], \"item_30002_date11\": [{}],\ + \"item_30002_language12\": [{}], \"item_30002_identifier16\": [{}], \"item_30002_relation18\": [{\"subitem_relation_name\": [{}]}],\ + \"item_30002_temporal19\": [{}], \"item_30002_geolocation20\": [{\"subitem_geolocation_place\": [{}]}],\ + \"item_30002_funding_reference21\": [{\"subitem_funder_names\": [{}], \"subitem_funding_streams\": [{}],\ + \"subitem_award_titles\": [{}]}], \"item_30002_source_identifier22\": [{}], \"item_30002_source_title23\": [{}],\ + \"item_30002_degree_name31\": [{}], \"item_30002_degree_grantor33\": [{\"subitem_degreegrantor_identifier\": [{}],\ + \"subitem_degreegrantor\": [{}]}], \"item_30002_conference34\": [{\"subitem_conference_names\": [{}],\ + \"subitem_conference_sponsors\": [{}], \"subitem_conference_venues\": [{}], \"subitem_conference_places\": [{}]}],\ + \"item_30002_file35\": [{\"filesize\": [{}], \"fileDate\": [{}]}], \"item_30002_heading36\": [{}],\ + \"item_30002_holding_agent_name37\": [{\"holding_agent_names\": [{}]}], \"item_30002_original_language43\": [{}],\ + \"item_30002_dcterms_extent46\": [{\"publisher_names\": [{}], \"publisher_descriptions\": [{}],\ + \"publisher_locations\": [{}], \"publication_places\": [{}]}], \"item_30002_publisher_information45\": [{}],\ + \"item_30002_catalog39\": [{\"catalog_contributors\": [{\"contributor_names\": [{}]}], \"catalog_identifiers\": [{}],\ + \"catalog_titles\": [{}], \"catalog_subjects\": [{}], \"catalog_licenses\": [{}], \"catalog_rights\": [{}],\ + \"catalog_access_rights\": [{}]}], \"item_30002_jpcoar_format40\": [{}], \"item_30002_volume_title44\": [{}],\ + \"item_30002_edition41\": [{}], \"item_30002_dcterms_date38\": [{}],\ + \"item_30002_bibliographic_information29\": {\"bibliographic_titles\": [{}]},\ + \"item_30002_resource_type13\": {\"resourcetype\": \"data paper\",\ + \"resourceuri\": \"http://purl.org/coar/resource_type/c_beb9\"}, \"shared_user_ids\": []}, \"files\": [],\ + \"endpoints\": {\"initialization\": \"/api/deposits/items\"}" ) with db.session.begin_nested(): db.session.add(workflow) db.session.add(activity) - db.session.add(activity_without_weko_link) db.session.commit() activity_action = ActivityAction(activity_id=activity.activity_id, diff --git a/modules/weko-records/tests/test_utils.py b/modules/weko-records/tests/test_utils.py index be0b331a85..f92e6db48e 100644 --- a/modules/weko-records/tests/test_utils.py +++ b/modules/weko-records/tests/test_utils.py @@ -71,9 +71,7 @@ def test_json_loader(app, db, item_type, item_type2, item_type_mapping2, db_regi item_type_scheme={"properties":{"control_number":{"type":"string"},"item_1":{"title":"item_1","type":"string"},"item_2":{"title":"item_2","type":"string"},"item_3":{"title":"item_3","type":"object","properties":{"item_3_1":{"title":"item_3_1","type":"string"},"iscreator":{"type":"string"}}},"item_4":{"title":"item_4","type":"object","properties":{"item_4_1":{"title":"item_4_1","type":"string"}}},"item_5":{"title":"item_5","type":"array","items":{"properties":{"filename":{"type":"string","title":"filename"},"iscreator":{"type":"string"}}},},"item_6":{"title":"item_6","type":"array","items":{"properties":{"item_6_1":{"title":"item_6_1","type":"string"}}},},"item_7":{"title":"item_7","type":"array","items":{"properties":{"nameIdentifiers":{"type":"array","items":{"properties":{"nameIdentifier":{"title":"name identifier","type":"string"},"nameIdentifierScheme":{"title":"name identifier scheme","type":"string"}}}}}}},"item_8":{"title":"item_8","type": "object","properties":{"nameIdentifiers":{"type":"array","items":{"properties":{"nameIdentifier":{"title":"name identifier","type":"string"},"nameIdentifierScheme":{"title":"name identifier scheme","type":"string"}}}}}}}} item_type_mapping = {"control_number":{},"item_1":{"jpcoar_mapping":""},"item_2":{"jpcoar_mapping":""},"item_3":{"jpcoar_mapping":{"item_3":{"@value":"item_3_1"}}},"item_4":{"jpcoar_mapping":{"item_4":{"@value":"item_4_1"}}},"item_5":{"jpcoar_mapping":{"item_5":{"@value":"filename"}}},"item_6":{"jpcoar_mapping":{"item_6":{"@value":"item_6_1"}}},"item_7":{"jpcoar_mapping":{"creator1":{"nameIdentifier":{"@value":"nameIdentifiers.nameIdentifier","@attributes":{"nameIdentifierScheme":"nameIdentifiers.nameIdentifierScheme"}}}}},"item_8":{"jpcoar_mapping":{"creator1":{"nameIdentifier":{"@value":"nameIdentifiers.nameIdentifier","@attributes":{"nameIdentifierScheme":"nameIdentifiers.nameIdentifierScheme"}}}}}} _pid = records[0][0] - _pid2 = records[1][0] - mocker.patch("weko_authors.api.WekoAuthors.get_pk_id_by_weko_id", return_value="2") - mocker.patch("weko_authors.utils.update_data_for_weko_link") + # not exist $schema data1={} result = json_loader(data1, _pid) @@ -124,12 +122,11 @@ def dumps(self): app.config['WEKO_SCHEMA_JPCOAR_V1_SCHEMA_NAME'] = 'jpcoar_v1_mapping' app.config['WEKO_SCHEMA_DDI_SCHEMA_NAME'] = 'ddi_mapping' dc, jrc, is_edit = json_loader(data3,_pid) - assert dc == OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': ''}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item1'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']), ('weko_shared_ids', []),("weko_link", {"2": "5678"}), ('_oai', {'id': '1'}), ('owner', 1), ('owners', [1])]) + ('author_link', ['1234', '5678']), ('weko_shared_ids', []), ('_oai', {'id': '1'}), ('owner', 1), ('owners', [1])]) assert jrc == {'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_5': ['item_5'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': ''}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item1'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']), ('weko_shared_ids', []),("weko_link", {"2": "5678"}), ('_oai', {'id': '1'}), ('owner', 1), ('owners', [1])]), 'itemtype': 'test10', 'publish_date': None, - 'author_link': ['2', '2'], "weko_link": {"2": "5678"}, 'weko_creator_id': '1', 'weko_shared_ids': []} + ('author_link', ['1234', '5678']), ('weko_shared_ids', []), ('_oai', {'id': '1'}), ('owner', 1), ('owners', [1])]), 'itemtype': 'test10', 'publish_date': None, + 'author_link': ['1234', '5678'], 'weko_creator_id': '1', 'weko_shared_ids': []} assert is_edit == False @@ -154,10 +151,10 @@ class MockSM: } dc, jrc, is_edit = json_loader(data4,_pid) assert dc == OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']),('weko_shared_ids', [2]),("weko_link", {"2": "5678"}), ('owner', 1), ('owners', [1])]) + ('author_link', ['1234', '5678']),('weko_shared_ids', [2]), ('owner', 1), ('owners', [1])]) assert jrc == {'item_6': ['item_6_1_v'], 'item_5': ['item_5'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_3': ['item_3_1_v'], 'item_4': ['item_4_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']), ('weko_shared_ids', [2]),("weko_link" ,{"2": "5678"}), ('owner', 1), ('owners', [1])]), 'itemtype': 'test10', 'publish_date': None, - 'author_link': ['2', '2'], "weko_link" :{"2": "5678"}, 'weko_creator_id': '1', 'weko_shared_ids': [2]} + ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]), ('owner', 1), ('owners', [1])]), 'itemtype': 'test10', 'publish_date': None, + 'author_link': ['1234', '5678'], 'weko_creator_id': '1', 'weko_shared_ids': [2]} assert is_edit == True with patch("weko_records.utils.COPY_NEW_FIELD",False): with patch("flask_login.utils._get_user", return_value=users[0]["obj"]): @@ -177,7 +174,7 @@ class MockSM: } dc, jrc, is_edit = json_loader(data5, _pid) assert dc == OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']), ('weko_shared_ids', [2]),("weko_link", {"2": "5678"}), ('owner', 5), ('owners', [5])]) + ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]), ('owner', 5), ('owners', [5])]) assert jrc == {'item_5': ['item_5'], 'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_6': ['item_6_1_v'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), @@ -188,9 +185,9 @@ class MockSM: ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']),('weko_shared_ids', [2]),("weko_link", {"2": "5678"}), ('owner', 5), ('owners', [5])]), + ('author_link', ['1234', '5678']),('weko_shared_ids', [2]), ('owner', 5), ('owners', [5])]), 'itemtype': 'test10', 'publish_date': None, - 'author_link': ['2', '2'], "weko_link": {"2": "5678"}, 'weko_creator_id': '5', 'weko_shared_ids': [2]} + 'author_link': ['1234', '5678'], 'weko_creator_id': '5', 'weko_shared_ids': [2]} assert is_edit == True @@ -214,7 +211,7 @@ class MockSM: } dc, jrc, is_edit = json_loader(data6, _pid) assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value': ["item_6_1","item_6_1_v"]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']),('weko_shared_ids', [2]),("weko_link", {"2": "5678"}), ('owner', 1),('owners',[1])]) + ('author_link', ['1234', '5678']),('weko_shared_ids', [2]), ('owner', 1),('owners',[1])]) assert jrc == {'item_5': ['item_5'], 'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_6': ["item_6_1",'item_6_1_v'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), @@ -226,9 +223,9 @@ class MockSM: ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']), ('weko_shared_ids', [2]),("weko_link", {"2": "5678"}), ('owner', 1),('owners',[1])]), + ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]), ('owner', 1),('owners',[1])]), 'itemtype': 'test10', 'publish_date': None, - 'author_link': ['2', '2'], "weko_link": {"2": "5678"}, 'weko_creator_id': '1', 'weko_shared_ids': [2]} + 'author_link': ['1234', '5678'], 'weko_creator_id': '1', 'weko_shared_ids': [2]} assert is_edit == True with patch("weko_records.utils.COPY_NEW_FIELD",False): with patch("flask_login.utils._get_user", return_value=users[7]["obj"]): @@ -252,7 +249,7 @@ class MockSM: } dc, jrc, is_edit = json_loader(data, _pid) assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value': ["item_6_1","item_6_1_v"]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']),('weko_shared_ids', [2]),("weko_link", {"2": "5678"}), ('owner', 1),('owners',[1])]) + ('author_link', ['1234', '5678']),('weko_shared_ids', [2]), ('owner', 1),('owners',[1])]) assert jrc == {'item_5': ['item_5'], 'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_6': ['item_6_1_v'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), @@ -264,9 +261,9 @@ class MockSM: ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), - ('author_link', ['2', '2']), ('weko_shared_ids', [2]), ("weko_link", {"2": "5678"}), ('owner', 1),('owners',[1])]), + ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]), ('owner', 1), ('owners',[1])]), 'itemtype': 'test10', 'publish_date': None, - 'author_link': ['2', '2'], "weko_link": {"2": "5678"}, 'weko_shared_ids': [2] ,'weko_creator_id': '1'} + 'author_link': ['1234', '5678'], 'weko_shared_ids': [2] ,'weko_creator_id': '1'} assert is_edit == True data={ '$schema': 'http://schema/3', @@ -284,17 +281,6 @@ class MockSM: "item_7":[{},{"nameIdentifiers":[{"nameIdentifierScheme":"WEKO","nameIdentifier":"1234"}]}], "item_8":{"nameIdentifiers":[{"nameIdentifierScheme":"WEKO","nameIdentifier":"5678"}]}, } - with patch("weko_authors.utils.update_data_for_weko_link") as update_data_for_weko_link: - - dc, jrc, is_edit = json_loader(data, _pid) - assert is_edit == True - with patch("weko_authors.api.WekoAuthors.get_pk_id_by_weko_id", return_value="-1"): - dc, jrc, is_edit = json_loader(data, _pid) - assert is_edit == True - dc, jrc, is_edit = json_loader(data, _pid2) - assert is_edit == False - update_data_for_weko_link.assert_not_called() - # .tox/c1/bin/pytest --cov=weko_records tests/test_utils.py::test_json_loader2 -v -s -vv --cov-branch --cov-report=term --cov-config=tox.ini --basetemp=/code/modules/weko-records/.tox/c1/tmp def test_json_loader2(app, db, item_type, item_type2, item_type3, item_type_mapping2, item_type_mapping3, records, users, mocker): @@ -356,8 +342,8 @@ def dumps(self): app.config['WEKO_SCHEMA_JPCOAR_V1_SCHEMA_NAME'] = 'jpcoar_v1_mapping' app.config['WEKO_SCHEMA_DDI_SCHEMA_NAME'] = 'ddi_mapping' dc, jrc, is_edit = json_loader(data3,_pid) - assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': ''}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item1'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[]),('weko_link', {'1234': '1234','5678': '5678'}), ('_oai', {'id': '1'}),('owner', 1),('owners',[1])]) - assert jrc == {'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_5': ['item_5'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': ''}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item1'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[]),('weko_link', {'1234': '1234','5678': '5678'}), ('_oai', {'id': '1'}),('owner', 1),('owners',[1])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['1234', '5678'],'weko_shared_ids':[],'weko_link': {'1234': '1234','5678': '5678'},'weko_creator_id': '1'} + assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': ''}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item1'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[]), ('_oai', {'id': '1'}),('owner', 1),('owners',[1])]) + assert jrc == {'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_5': ['item_5'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': ''}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item1'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[]), ('_oai', {'id': '1'}),('owner', 1),('owners',[1])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['1234', '5678'],'weko_shared_ids':[],'weko_creator_id': '1'} assert is_edit == False @@ -382,8 +368,8 @@ class MockSM: "item_8":{"nameIdentifiers":[{"nameIdentifierScheme":"WEKO","nameIdentifier":"5678"}]} } dc, jrc, is_edit = json_loader(data4,_pid) - assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[2]),('weko_link', {'1234': '1234','5678': '5678'}),('owner', 1),('owners',[1])]) - assert jrc == {'item_6': ['item_6_1_v'], 'item_5': ['item_5'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_3': ['item_3_1_v'], 'item_4': ['item_4_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[2]),('weko_link', {'1234': '1234','5678': '5678'}),('owner', 1),('owners',[1])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['1234', '5678'],'weko_creator_id': '1','weko_link': {'1234': '1234','5678': '5678'},'weko_shared_ids': [2]} + assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[2]),('owner', 1),('owners',[1])]) + assert jrc == {'item_6': ['item_6_1_v'], 'item_5': ['item_5'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_3': ['item_3_1_v'], 'item_4': ['item_4_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']),('weko_shared_ids',[2]),('owner', 1),('owners',[1])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['1234', '5678'],'weko_creator_id': '1','weko_shared_ids': [2]} assert is_edit == True mocker.patch("weko_authors.api.WekoAuthors.get_pk_id_by_weko_id", side_effect=["1234","5678"]) with patch("weko_records.utils.COPY_NEW_FIELD",False): @@ -403,8 +389,8 @@ class MockSM: "item_8":{"nameIdentifiers":[{"nameIdentifierScheme":"WEKO","nameIdentifier":"5678"}]} } dc, jrc, is_edit = json_loader(data5, _pid) - assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]),('weko_link', {'1234': '1234','5678': '5678'}),('owner', 5),('owners',[5])]) - assert jrc == {'item_5': ['item_5'], 'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_6': ['item_6_1_v'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]),('weko_link', {'1234': '1234','5678': '5678'}),('owner', 5),('owners',[5])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['1234', '5678'], 'weko_creator_id': '5','weko_link': {'1234': '1234', '5678': '5678'}, 'weko_shared_ids': [2]} + assert dc == OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]),('owner', 5),('owners',[5])]) + assert jrc == {'item_5': ['item_5'], 'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'item_6': ['item_6_1_v'], 'item_3': ['item_3_1_v'], 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'Publish Date', 'attribute_value': '2023-08-08'}), ('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value_mlt': [{'item_6_1': 'item_6_1_v'}]}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '4'), ('control_number', '1'), ('author_link', ['1234', '5678']), ('weko_shared_ids', [2]),('owner', 5),('owners',[5])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['1234', '5678'], 'weko_creator_id': '5', 'weko_shared_ids': [2]} assert is_edit == True @@ -449,8 +435,7 @@ class MockSM: 'item_type_id': '2', 'owner': 1, 'owners':[1], - 'weko_shared_ids': [], - 'weko_link':{} + 'weko_shared_ids': [] } _jrc_data = { '_item_metadata': { @@ -530,7 +515,6 @@ class MockSM: 'author_link': [], 'owner': 1, 'owners': [1], - 'weko_link': {}, 'weko_shared_ids': [] } @@ -550,7 +534,6 @@ class MockSM: 'control_number': '1', 'author_link': [], 'weko_shared_ids': [], - 'weko_link': {}, 'owner': 1, 'owners':[1] }, @@ -560,7 +543,6 @@ class MockSM: 'author_link': [], 'owner': 1, 'owners': [1], - 'weko_link': {}, 'weko_shared_ids': [] } @@ -628,7 +610,6 @@ class MockSM: 'item_type_id': '2', 'owner': 3, 'owners':[3], - 'weko_link': {}, 'weko_shared_ids': [1,2] } @@ -642,7 +623,6 @@ class MockSM: 'item_type_id': '2', 'owner': 1, 'owners':[1], - 'weko_link': {}, 'weko_shared_ids': [1,2] } @@ -818,8 +798,6 @@ def test_json_loader_with_out_workflow_activity(app, db, item_type, item_type2, item_type_mapping = { "control_number":{},"item_1":{"jpcoar_mapping":""},"item_2":{"jpcoar_mapping":""},"item_3":{"jpcoar_mapping":{"item_3":{"@value":"item_3_1"}}},"item_4":{"jpcoar_mapping":{"item_4":{"@value":"item_4_1"}}},"item_5":{"jpcoar_mapping":{"item_5":{"@value":"filename"}}},"item_6":{"jpcoar_mapping":{"item_6":{"@value":"item_6_1"}}},"item_7":{"jpcoar_mapping":{"creator1":{"nameIdentifier":{"@value":"nameIdentifiers.nameIdentifier","@attributes":{"nameIdentifierScheme":"nameIdentifiers.nameIdentifierScheme"}}}}},"item_8":{"jpcoar_mapping":{"creator1":{"nameIdentifier":{"@value":"nameIdentifiers.nameIdentifier","@attributes":{"nameIdentifierScheme":"nameIdentifiers.nameIdentifierScheme"}}}}}} _pid = records[0][0] - mocker.patch("weko_authors.api.WekoAuthors.get_pk_id_by_weko_id", return_value="2") - mocker.patch("weko_authors.utils.update_data_for_weko_link") ItemTypes.create( name='test10', item_type_name=ItemTypeName(name='test10'), @@ -854,8 +832,8 @@ def dumps(self): "item_8":{"nameIdentifiers":[{"nameIdentifierScheme":"WEKO","nameIdentifier":"5678"}]}, } dc, jrc, is_edit = json_loader(data, _pid) - assert dc == OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value': ['item_6_1', 'item_6_1_v']}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), ('author_link', ['2', '2']), ('weko_shared_ids', 2), ('weko_link', {'2': '5678'}), ('_oai', {'id': '1'}), ('owner', 1), ('owners', [1])]) - assert jrc == {'weko_creator_id': '1', 'item_5': ['item_5'], 'item_6': ['item_6_1_v'], 'item_3': ['item_3_1_v'], 'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value': ['item_6_1', 'item_6_1_v']}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), ('author_link', ['2', '2']), ('weko_shared_ids', 2), ('weko_link', {'2': '5678'}), ('_oai', {'id': '1'}), ('owner', 1), ('owners', [1])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['2', '2'], 'weko_link': {'2': '5678'}, 'weko_shared_ids': 2} + assert dc == OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}),('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}),('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}),('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}),('item_6', {'attribute_name': 'item_6', 'attribute_value': ['item_6_1', 'item_6_1_v']}),('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}),('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}),('item_title', 'test_item2'),('item_type_id', '3'),('control_number', '1'),('author_link', ['1234', '5678']),('weko_shared_ids', 2),('_oai', {'id': '1'}),('owner', 1),('owners', [1])]) + assert jrc == {'weko_creator_id': '1', 'item_5': ['item_5'], 'item_6': ['item_6_1_v'], 'item_3': ['item_3_1_v'], 'item_4': ['item_4_1_v'], 'creator1': {'nameIdentifier': ['1234', '5678']}, 'control_number': '1', '_oai': {'id': '1'}, '_item_metadata': OrderedDict([('item_1', {'attribute_name': 'item_1', 'attribute_value': 'item_1_v'}), ('item_2', {'attribute_name': 'item_2', 'attribute_value': 'item_2_v'}), ('item_3', {'attribute_name': 'item_3', 'attribute_type': 'creator', 'attribute_value_mlt': [{'item_3_1': 'item_3_1_v'}]}), ('item_4', {'attribute_name': 'item_4', 'attribute_value_mlt': [{'item_4_1': 'item_4_1_v'}]}), ('item_5', {'attribute_name': 'item_5', 'attribute_type': 'file', 'attribute_value_mlt': [{'filename': 'item_5'}]}), ('item_6', {'attribute_name': 'item_6', 'attribute_value': ['item_6_1', 'item_6_1_v']}), ('item_7', {'attribute_name': 'item_7', 'attribute_value_mlt': [{}, {'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '1234'}]}]}), ('item_8', {'attribute_name': 'item_8', 'attribute_value_mlt': [{'nameIdentifiers': [{'nameIdentifierScheme': 'WEKO', 'nameIdentifier': '5678'}]}]}), ('item_title', 'test_item2'), ('item_type_id', '3'), ('control_number', '1'), ('author_link', ['1234', '5678']), ('weko_shared_ids', 2), ('_oai', {'id': '1'}), ('owner', 1), ('owners', [1])]), 'itemtype': 'test10', 'publish_date': None, 'author_link': ['1234', '5678'], 'weko_shared_ids': 2} assert is_edit == False @@ -863,7 +841,6 @@ def dumps(self): # .tox/c1/bin/pytest --cov=weko_records tests/test_utils.py::test_get_author_link -v -s -vv --cov-branch --cov-report=term --cov-report=html --cov-config=tox.ini --basetemp=/code/modules/weko-records/.tox/c1/tmp def test_get_author_link(app,mocker): author_link = [] - weko_link=dict() value_list = [ { "nameIdentifiers":[{ @@ -879,7 +856,7 @@ def test_get_author_link(app,mocker): } ] mocker.patch("weko_authors.api.WekoAuthors.get_pk_id_by_weko_id", side_effect=["1"]) - ret = get_author_link(author_link, weko_link, value_list) + ret = get_author_link(author_link, value_list) assert ['1'] == author_link author_link = [] @@ -890,12 +867,12 @@ def test_get_author_link(app,mocker): }] } mocker.patch("weko_authors.api.WekoAuthors.get_pk_id_by_weko_id", side_effect=["2"]) - ret = get_author_link(author_link, weko_link, value_dict) + ret = get_author_link(author_link, value_dict) assert ['2'] == author_link author_link = [] value_str = 'v2' - ret = get_author_link(author_link,weko_link, value_str) + ret = get_author_link(author_link,value_str) assert [] == author_link diff --git a/modules/weko-records/weko_records/utils.py b/modules/weko-records/weko_records/utils.py index 80700de80e..0def1e6263 100644 --- a/modules/weko-records/weko_records/utils.py +++ b/modules/weko-records/weko_records/utils.py @@ -41,12 +41,11 @@ from weko_admin.models import SearchManagement as sm from weko_schema_ui.schema import SchemaTree from weko_authors.api import WekoAuthors -from weko_authors.utils import update_data_for_weko_link from .api import ItemTypes, Mapping from .config import COPY_NEW_FIELD, WEKO_TEST_FIELD from sqlalchemy import null -def get_author_link(author_link, weko_link, value): +def get_author_link(author_link, value): """Get author link data.""" if isinstance(value, list): for v in value: @@ -57,11 +56,7 @@ def get_author_link(author_link, weko_link, value): and v["nameIdentifiers"][0]["nameIdentifierScheme"] == "WEKO" and "nameIdentifier" in v["nameIdentifiers"][0] ): - weko_id = v["nameIdentifiers"][0]["nameIdentifier"] - pk_id = WekoAuthors.get_pk_id_by_weko_id(weko_id) - if int(pk_id) > 0: - author_link.append(pk_id) - weko_link[str(pk_id)] = weko_id + author_link.append(v["nameIdentifiers"][0]["nameIdentifier"]) elif ( isinstance(value, dict) and "nameIdentifiers" in value @@ -70,11 +65,7 @@ def get_author_link(author_link, weko_link, value): and value["nameIdentifiers"][0]["nameIdentifierScheme"] == "WEKO" and "nameIdentifier" in value["nameIdentifiers"][0] ): - weko_id = value["nameIdentifiers"][0]["nameIdentifier"] - pk_id = WekoAuthors.get_pk_id_by_weko_id(weko_id) - if int(pk_id) > 0: - author_link.append(pk_id) - weko_link[str(pk_id)] = weko_id + author_link.append(value["nameIdentifiers"][0]["nameIdentifier"]) def json_loader(data, pid, owner_id=None, with_deleted=False, replace_field=True, creator_id=None): """Convert the item data and mapping to jpcoar. @@ -126,7 +117,6 @@ def _set_shared_ids(data): mp = mjson.dumps() data.get("$schema") author_link = [] - weko_link= {} for k, v in data.items(): if k == "$schema" or mp.get(k) is None: continue @@ -172,14 +162,14 @@ def _set_shared_ids(data): if isinstance(v, list): if len(v) > 0 and isinstance(v[0], dict): item["attribute_value_mlt"] = v - get_author_link(author_link, weko_link, v) + get_author_link(author_link, v) else: item["attribute_value"] = v elif isinstance(v, dict): ar.append(v) item["attribute_value_mlt"] = ar ar = [] - get_author_link(author_link, weko_link, v) + get_author_link(author_link, v) else: item["attribute_value"] = v @@ -213,7 +203,6 @@ def _set_shared_ids(data): dc.update(dict(control_number=pid)) dc.update(dict(author_link=author_link)) dc.update(dict(weko_shared_ids=weko_shared_ids)) - dc.update(dict(weko_link=weko_link)) if COPY_NEW_FIELD: copy_field_test(dc, WEKO_TEST_FIELD, jrc) @@ -262,7 +251,6 @@ def _set_shared_ids(data): jrc.update(dict(itemtype=ojson.model.item_type_name.name)) jrc.update(dict(publish_date=pubdate)) jrc.update(dict(author_link=author_link)) - jrc.update(dict(weko_link=weko_link)) # save items's creator to check permission if current_user and current_user.get_id() is not None: diff --git a/scripts/demo/update_W2025-29.py b/scripts/demo/update_W2025-29.py index 18ad94488e..7746becee0 100644 --- a/scripts/demo/update_W2025-29.py +++ b/scripts/demo/update_W2025-29.py @@ -9,7 +9,6 @@ from properties import property_config from register_properties import del_properties, get_properties_id, register_properties_from_folder from tools import updateRestrictedRecords -# from tools import updateRestrictedRecords, update_weko_links from fix_metadata_53602 import main as fix_metadata_53602_main from invenio_files_rest.models import ( timestamp_before_update as ifr_timestamp_before_update, @@ -68,8 +67,6 @@ def main(restricted_item_type_id, start_time, batch_size=500): current_time = show_exec_time(current_time, "register_properties_only_specified") renew_all_item_types() # 更新されたプロパティを使用してアイテムタイプの更新 current_time = show_exec_time(current_time, "renew_all_item_types") - # update_weko_links.main(batch_size=batch_size) - # current_time = show_exec_time(current_time, "update_weko_links") current_app.logger.info("run update_feedback_mail_list_to_db") update_feedback_mail_list_to_db.main() # 著者DBのweko idの変更。それに伴うメタデータの変更 current_time = show_exec_time(current_time, "update_feedback_mail_list_to_db") diff --git a/tools/update_weko_links.py b/tools/update_weko_links.py deleted file mode 100644 index 206db9b609..0000000000 --- a/tools/update_weko_links.py +++ /dev/null @@ -1,582 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (c) 2025 National Institute of Informatics. -# WEKO is free software; you can redistribute it and/or modify it under the -# terms of the MIT License; see LICENSE file for more details. -""" Update weko_link in records_metadata, workflow_activity and Elasticsearch. - -Usage: -``` -invenio shell tools/update_weko_links.py -``` - -仕様: -* records_metadata(json), workflow_activity(temp_data) および Elasticsearch(item-v1.0.0) に weko_link を追加する - * weko_linkは pk_id をキー、WEKOID を値とする辞書型のデータ - * 改修前(マイグレーション前)は 必ず「pk_id = WEKOID」となる仕様であり、 - アイテムメタデータ内のauthor_link の配列内にはWEKOIDが登録される仕様となっていた。 - よってマイグレーション時は対象アイテムのメタデータ内のauthor_linkを取得し、 - その各要素をキーおよび値として持つ辞書を作成し、格納すればよい。 - * 例) - "author_link": ["1", "2", "3", … , "99"] - ↓ - "weko_link": {"1": "1", "2": "2", "3": "3", ... , "99": "99"} - * 既に weko_link が存在する場合は追加処理をスキップする - * 更新処理は invenio(model) を介して行う - * ES更新処理は実行有無を容易に切り替えられるようにしておく - 他のマイグレーションと併せてリインデックスする場合、当ツールではES更新が不要になる可能性がある - →現状は `skip_es_update` 変数で切り替え可能としている。実行時引数での切り替えは未実装。 -* ツールの実行対象は1機関分のみとする - * invenio shell でツール起動することを前提とし、 - 実行対象機関は当ツールを呼びだすシェル側で選択すること -* エラー発生時 - * アイテム単位で処理できたところまでコミットする - エラーが発生したアイテムはロールバックし、次のアイテムの処理を行う - →`db.session.begin_nested()` でネストトランザクションを利用している -* エラー発生後の再実行の際は処理済みのアイテムはスキップされるようにする - * 前述の通り既に weko_link が存在する場合スキップすることで実現 -* ログ出力(標準出力) - * 開始/終了、処理にかかった時間 - * どこまで処理が進んでいるかが分かるようにする - * どのアイテムでエラーが発生したかわかるようにする - -""" - -from datetime import datetime -from operator import or_ -import os -import json -import traceback - -from elasticsearch import Elasticsearch -from flask import current_app -from invenio_db import db -from invenio_pidstore.models import PersistentIdentifier -from invenio_records.models import RecordMetadata -from sqlalchemy import and_ -from weko_deposit.api import WekoIndexer -from weko_workflow.models import Activity - -# 後ほどまとめてリインデックスする等でElasticsearchの更新をスキップする場合はTrueに設定 -skip_es_update = True - -def get_weko_link(metadata): - """ - メタデータからweko_idを取得し、weko_idを使って - weko_linkを作成します。 - args - metadata: dict - 例:{ - "metainfo": { - "item_30002_creator2": [ - { - "nameIdentifiers": [ - { - "nameIdentifier": "8", - "nameIdentifierScheme": "WEKO", - "nameIdentifierURI": "" - } - ] - } - ] - }, - "files": [], - "endpoints": { - "initialization": "/api/deposits/items" - } - } - return - weko_link: dict - 例:{"2": "10002"} - """ - weko_link = {} - weko_id_list=[] - for x in metadata["metainfo"].values(): - if not isinstance(x, list): - continue - for y in x: - if not isinstance(y, dict) or "nameIdentifiers" not in y: - continue - name_identifiers = y["nameIdentifiers"] - for z in name_identifiers: - if z.get("nameIdentifierScheme","") == "WEKO" and z.get("nameIdentifier","") not in weko_id_list: - weko_id_list.append(z.get("nameIdentifier")) - - weko_link = {} - for weko_id in weko_id_list: - weko_link[weko_id] = weko_id - return weko_link - -def update_records_metadata(batch_size=500): - """ - アイテムメタデータおよび編集中ワークフローのtemp_dataを更新する - """ - current_app.logger.info(f" {datetime.now().isoformat()} - Updating records_metadata and workflow_activity...") - es = Elasticsearch( - 'http://' + os.environ.get('INVENIO_ELASTICSEARCH_HOST', 'localhost') + ':9200') - - # 対象アイテムのrecidのリストを取得 - - query = PersistentIdentifier.query.filter( - PersistentIdentifier.pid_type == 'recid', - PersistentIdentifier.status == 'R', - PersistentIdentifier.pid_value.notlike('%.%') - ).with_entities(PersistentIdentifier.pid_value).statement - results = db.engine.execution_options(stream_results=True).execute(query) - recids = [r[0] for r in results] - - current_app.logger.info(f" {datetime.now().isoformat()} - Found {len(recids)} recids to process.") - - pages = [recids[i:i + batch_size] for i in range(0, len(recids), batch_size)] - - for page in pages: - current_app.logger.info(f" Processing page {pages.index(page) + 1}/{len(pages)}...") - for recid in page: - try: - current_app.logger.info(f" Processing recid: {recid}") - - # ==ネストトランザクション開始:アイテム毎にコミットする== - with db.session.begin_nested(): - # 最新バージョンおよび x.0 の records_metadata を更新する - # recid または recid.0 のレコードを取得 - record_metadata_records = RecordMetadata.query.filter( - RecordMetadata.id.in_( - db.session.query(PersistentIdentifier.object_uuid).filter( - PersistentIdentifier.pid_type == 'recid', - PersistentIdentifier.pid_value.in_( - [str(recid), f"{recid}.0"] - ) - ) - ) - ).all() - current_app.logger.info(f" Found {len(record_metadata_records)} records_metadata entries to process.") - - for record in record_metadata_records: - # records_metadata の weko_link を更新する - json_data = {**record.json} - item_id = record.id - weko_link = {} - if 'weko_link' in json_data: - # すでにweko_linkが存在するrecords_metadataはスキップ - weko_link = json_data['weko_link'] - current_app.logger.info(f' weko_link already exists, skipping update records_metadata item_id: {item_id}') - pass - else: - if 'author_link' in json_data: - # author_linkからweko_linkを作成 - """ - weko_linkは pk_id をキー、WEKOID を値とする辞書型のデータ - 例:{"2": "10002"} - - 改修前(マイグレーション前)は 必ず「pk_id = WEKOID」となる仕様であり、 - アイテムメタデータ内のauthor_link の配列内にはWEKOIDが登録される仕様となっていた。 - よってマイグレーション時は対象アイテムのメタデータ内のauthor_linkを取得し、 - その各要素をキーおよび値として持つ辞書を作成し、格納すればよい。 - 例:author_link = [ "2", "3" ] - → weko_link = { "2": "2", "3": "3" } - """ - author_link = json_data['author_link'] - weko_link = {str(item): str(item) for item in author_link} - json_data['weko_link'] = weko_link - record.json = json_data - else: - # author_linkが存在しない場合はweko_linkを空で作成 - json_data['weko_link'] = weko_link - record.json = json_data - - db.session.merge(record) - current_app.logger.info(f' Updated records_metadata item_id: {item_id}') - - # 編集中の workflow_activity の temp_data を更新する - activities = Activity.query.filter( - Activity.item_id == item_id, - Activity.action_status.notin_(['F', 'C']), - Activity.temp_data.isnot(None), - Activity.temp_data != {} - ).all() - # results = db.engine.execution_options(stream_results=True).execute(query) - # activitiy_ids = [r[0] for r in results] - current_app.logger.info(f" Found {len(activities)} workflow_activity entries to process.") - for activity in activities: - json_str = activity.temp_data - if json_str: - json_data = json.loads(json_str) - if 'weko_link' in json_data: - # すでにweko_linkが存在する場合はスキップ - continue - - # weko_linkを追加 - activity_weko_link = get_weko_link(json_data) - json_data['weko_link'] = activity_weko_link - activity.temp_data = json.dumps(json_data, ensure_ascii=False) - - db.session.merge(activity) - current_app.logger.info(f' Updated workflow_activity id: {activity.id}') - - # Elasticsearchにweko_linkを追加する - if not skip_es_update: - indexer = WekoIndexer() - es_metadata = indexer.get_metadata_by_item_id(item_id) - if es_metadata and '_source' in es_metadata and 'weko_link' in es_metadata["_source"]: - # すでにweko_linkが存在する場合はスキップ - pass - else: - es_version = es_metadata["_version"] - es_metadata["_source"]["weko_link"] = weko_link - body = { - "doc": { - "_item_metadata": es_metadata["_source"]["_item_metadata"], - "weko_link": es_metadata["_source"]["weko_link"] - } - } - - es.update( - index=es_metadata["_index"], # [prefix]-weko-item-v1.0.0 - id=item_id, - doc_type="_doc", - body=body, - version=es_version - ) - current_app.logger.info(f' Updated Elasticsearch item_id: {item_id}') - # ==ネストトランザクション終了:アイテム毎にコミットする== - - current_app.logger.info(f' Finished processing recid: {recid}') - - except Exception as e: - # エラーが起きたアイテムはロールバックして次に進む - current_app.logger.error(f' Error occurred while processing recid: {recid}') - traceback.print_exc() - continue - - # 変更をデータベースに保存 - db.session.commit() - - current_app.logger.info(f" {datetime.now().isoformat()} - Finished updating records_metadata and workflow_activity.") - - -def bulk_update_records_metadata(batch_size=500): - """ - bulk update records_metadata and workflow_activity - Note: This function skipped elasticsearch update. - - """ - current_app.logger.info(f" {datetime.now().isoformat()} - Updating records_metadata and workflow_activity...") - - # get recids which status is 'R' and pid_type is 'recid' and pid_value is base recid (not like %.%) - query = PersistentIdentifier.query.filter( - PersistentIdentifier.pid_type == 'recid', - PersistentIdentifier.status == 'R', - or_( - PersistentIdentifier.pid_value.notlike('%.%'), - PersistentIdentifier.pid_value.like('%.0') - ) - ).with_entities(PersistentIdentifier.pid_value, PersistentIdentifier.object_uuid).statement - results = db.engine.execution_options(stream_results=True).execute(query) - recids = [{ - "pid_value": r[0], - "object_uuid": r[1] - } for r in results] - - current_app.logger.info(f" {datetime.now().isoformat()} - Found {len(recids)} recids to process.") - - bulk_batch_size = batch_size // 3 * 2 # considering recid and recid.0 - recid_chunks = [recids[i:i + bulk_batch_size] for i in range(0, len(recids), bulk_batch_size)] - - for i, recid_chunk in enumerate(recid_chunks): - current_app.logger.info(f" Processing page {i + 1}/{len(recid_chunks)}...") - object_uuids = [item['object_uuid'] for item in recid_chunk] - # get recid and recid.0 - records_metadata_query = RecordMetadata.query.filter( - RecordMetadata.id.in_(object_uuids) - ).with_entities(RecordMetadata.id, RecordMetadata.json, RecordMetadata.version_id).statement - results = db.engine.execution_options(stream_results=True).execute(records_metadata_query) - records_metadata_info = {r[0]: (r[1], r[2]) for r in results} - - skipped_record_metadata_ids = [] - bulk_records_metadata_data = [] - for item_id, (json_data, version_id) in records_metadata_info.items(): - current_app.logger.info(f" Creating new records_metadata id: {item_id}") - new_record_json = json_data - if 'weko_link' in json_data: - # すでにweko_linkが存在するrecords_metadataはスキップ - skipped_record_metadata_ids.append(item_id) - current_app.logger.info(f' weko_link already exists, skipping update records_metadata item_id: {item_id}') - continue - - if 'author_link' in json_data: - # author_linkからweko_linkを作成 - """ - weko_linkは pk_id をキー、WEKOID を値とする辞書型のデータ - 例:{"2": "10002"} - - 改修前(マイグレーション前)は 必ず「pk_id = WEKOID」となる仕様であり、 - アイテムメタデータ内のauthor_link の配列内にはWEKOIDが登録される仕様となっていた。 - よってマイグレーション時は対象アイテムのメタデータ内のauthor_linkを取得し、 - その各要素をキーおよび値として持つ辞書を作成し、格納すればよい。 - 例:author_link = [ "2", "3" ] - → weko_link = { "2": "2", "3": "3" } - """ - author_link = json_data["author_link"] - weko_link = {str(item): str(item) for item in author_link} - new_record_json["weko_link"] = weko_link - else: - # create empty weko_link when author_link not exists - new_record_json["weko_link"] = {} - - bulk_records_metadata_data.append({ - "id": item_id, - "json": new_record_json, - "version_id": version_id - }) - - # check if bulk update data length equal to records length - if (len(bulk_records_metadata_data) + len(skipped_record_metadata_ids)) != len(records_metadata_info): - current_app.logger.error("Bulk update data length not equal to records length") - current_app.logger.error(f" records length: {len(records_metadata_info)}") - current_app.logger.error(f" bulk update data length: {len(bulk_records_metadata_data)}") - current_app.logger.error(f" skipped records length: {len(skipped_record_metadata_ids)}") - raise Exception("Bulk update data length not equal to records length") - - # bulk update activities - activity_query = Activity.query.filter( - Activity.item_id.in_(list(records_metadata_info.keys())), - Activity.action_status.notin_(['F', 'C']), - Activity.temp_data.isnot(None), - Activity.temp_data != {} - ).with_entities(Activity.id, Activity.temp_data).statement - results = db.engine.execution_options(stream_results=True).execute(activity_query) - activities = {r[0]: r[1] for r in results} - - skipped_activity_ids = [] - new_activities = [] - for activity_id, temp_data in activities.items(): - json_str = temp_data - if not json_str: - current_app.logger.info(f' temp_data is empty, skipping activity id: {activity_id}') - skipped_activity_ids.append(activity_id) - continue - - json_data = json.loads(json_str) - if 'weko_link' in json_data: - # skip if weko_link already exists - skipped_activity_ids.append(activity_id) - current_app.logger.info(f' weko_link already exists, skipping activity id: {activity_id}') - continue - - # weko_linkを追加 - activity_weko_link = get_weko_link(json_data) - json_data['weko_link'] = activity_weko_link - new_activities.append({ - 'id': activity_id, - 'temp_data': json.dumps(json_data, ensure_ascii=False, default=str) - }) - - # check if bulk update data length equal to activities length - if (len(new_activities) + len(skipped_activity_ids)) != len(activities): - current_app.logger.error("Bulk update data length not equal to activities length") - current_app.logger.error(f" activities length: {len(activities)}") - current_app.logger.error(f" bulk update data length: {len(new_activities)}") - current_app.logger.error(f" skipped activities length: {len(skipped_activity_ids)}") - raise Exception("Bulk update data length not equal to activities length") - - try: - # transaction start - with db.session.begin_nested(): - # bulk update records_metadata - if bulk_records_metadata_data: - db.session.bulk_update_mappings(RecordMetadata, bulk_records_metadata_data) - # show bulk update records_metadata ids - updated_ids = [r['id'] for r in bulk_records_metadata_data] - # current_app.logger.info(f' Updated records_metadata item_ids: {updated_ids}') - for i in updated_ids: - current_app.logger.info(f"[FIX] records_metadata:{i}") - - # bulk update activities - if new_activities: - db.session.bulk_update_mappings(Activity, new_activities) - # show bulk update activity ids - updated_ids = [r['id'] for r in new_activities] - # current_app.logger.info(f' Updated workflow_activity ids: {updated_ids}') - for i in updated_ids: - current_app.logger.info(f"[FIX] workflow_activity:{i}") - - # current_app.logger.info(f' Finished processing recids: {recid_chunk}') - - except Exception as e: - # エラーが起きたアイテムはロールバックして次に進む - current_app.logger.error(e) - traceback.print_exc() - continue - - # 変更をデータベースに保存 - db.session.commit() - - current_app.logger.info(f" {datetime.now().isoformat()} - Finished updating records_metadata and workflow_activity.") - - -def get_working_activity_ids(): - """ - 登録途中のワークフローのアクティビティIDのリストを取得します。 - - Returns: - list: Activityテーブルのid(アクティビティID)のリスト - """ - # item_idがNoneかつtemp_dataがNoneでないActivity.idのリストを取得 - query = Activity.query.filter( - Activity.item_id.is_(None), - Activity.temp_data.isnot(None) - ).with_entities(Activity.id).statement - results = db.engine.execution_options(stream_results=True).execute(query) - return [r[0] for r in results] - -def update_workflow_activities(batch_size=500): - """ - 登録途中のワークフローのtemp_dataを更新する - """ - current_app.logger.info(f" {datetime.now().isoformat()} - Updating workflow_activity for in-progress workflows...") - # 対象アクティビティIDリストを取得 - working_activity_ids = get_working_activity_ids() - current_app.logger.info(f" {datetime.now().isoformat()} - Found {len(working_activity_ids)} workflow activities to process.") - - pages = [working_activity_ids[i:i + batch_size] for i in range(0, len(working_activity_ids), batch_size)] - - for page in pages: - current_app.logger.info(f" Processing page {pages.index(page) + 1}/{len(pages)}...") - activities = Activity.query.filter(Activity.id.in_(page)).all() - for activity in activities: - try: - current_app.logger.info(f" Processing workflow activity id: {activity.id}") - with db.session.begin_nested(): - # workflow_activity の temp_data を更新する - json_str = activity.temp_data - if json_str: - json_data = json.loads(json_str) - if 'weko_link' in json_data: - # すでにweko_linkが存在する場合はスキップ - current_app.logger.info(f' weko_link already exists, skipping activity id: {id}') - continue - - # # weko_linkを追加してコミット - weko_link = get_weko_link(json_data) - json_data['weko_link'] = weko_link - # del json_data['weko_link'] # テスト用にweko_linkを削除 - activity.temp_data = json.dumps(json_data, ensure_ascii=False) - - db.session.merge(activity) - - current_app.logger.info(f' Updated workflow id: {id}') - except Exception as e: - # このレコードはロールバックして次に進む - current_app.logger.error(f' Error occurred while processing activity id: {id}') - traceback.print_exc() - continue - - # 変更をデータベースに保存 - db.session.commit() - - current_app.logger.info(f" {datetime.now().isoformat()} - Finished updating workflow_activity.") - -def bulk_update_workflow_activities(batch_size=500): - """ - bulk update workflow_activity for in-progress workflows - Note: This function skipped elasticsearch update. - - """ - current_app.logger.info(f" {datetime.now().isoformat()} - Updating workflow_activity for in-progress workflows...") - - # get working activity ids - query = Activity.query.filter( - Activity.item_id.is_(None), - Activity.temp_data.isnot(None) - ).with_entities(Activity.id, Activity.temp_data).statement - results = db.engine.execution_options(stream_results=True).execute(query) - all_activities = [(r[0], r[1]) for r in results] - - current_app.logger.info(f" {datetime.now().isoformat()} - Found {len(all_activities)} workflow activities to process.") - - activity_chunks = [all_activities[i:i + batch_size] for i in range(0, len(all_activities), batch_size)] - - for i, activities in enumerate(activity_chunks): - current_app.logger.info(f" Processing page {i + 1}/{len(activity_chunks)}...") - - skipped_activity_ids = [] - new_activities = [] - for activity_id, temp_data in activities: - json_str = temp_data - if not json_str: - current_app.logger.info(f' temp_data is empty, skipping activity id: {activity_id}') - skipped_activity_ids.append(activity_id) - continue - - json_data = json.loads(json_str) - if 'weko_link' in json_data: - # skip if weko_link already exists - skipped_activity_ids.append(activity_id) - current_app.logger.info(f' weko_link already exists, skipping activity id: {activity_id}') - continue - - # weko_linkを追加 - activity_weko_link = get_weko_link(json_data) - json_data['weko_link'] = activity_weko_link - new_activities.append({ - 'id': activity_id, - 'temp_data': json.dumps(json_data, ensure_ascii=False, default=str) - }) - - # check if bulk update data length equal to activities length - if (len(new_activities) + len(skipped_activity_ids)) != len(activities): - current_app.logger.error("Bulk update data length not equal to activities length") - current_app.logger.error(f" activities length: {len(activities)}") - current_app.logger.error(f" bulk update data length: {len(new_activities)}") - current_app.logger.error(f" skipped activities length: {len(skipped_activity_ids)}") - raise Exception("Bulk update data length not equal to activities length") - - # transaction start - try: - with db.session.begin_nested(): - # bulk update activities - if new_activities: - db.session.bulk_update_mappings(Activity, new_activities) - # show bulk update activity ids - updated_ids = [r['id'] for r in new_activities] - # current_app.logger.info(f' Updated workflow_activity ids: {updated_ids}') - for i in updated_ids: - current_app.logger.info(f"[FIX] workflow_activity:{i}") - - # current_app.logger.info(f' Finished processing activities: {[r[0] for r in activities]}') - - except Exception as e: - # エラーが起きたアイテムはロールバックして次に進む - current_app.logger.error(e) - traceback.print_exc() - continue - - # commit changes to database - db.session.commit() - - current_app.logger.info(f" {datetime.now().isoformat()} - Finished updating workflow_activity.") - - -def main(batch_size=500): - """Main context. - Args: - batch_size: int - 一度に処理するアイテム数 - """ - # アイテムメタデータおよび編集中ワークフローのtemp_dataを更新する - # update_records_metadata(batch_size) - bulk_update_records_metadata(batch_size) - - # 登録途中のワークフローのtemp_dataを更新する - bulk_update_workflow_activities(batch_size) - - -if __name__ == '__main__': - starttime = datetime.now() - current_app.logger.info(f"{starttime.isoformat()} - Starting update_weko_links.py") - - main() - - endtime = datetime.now() - current_app.logger.info(f"{endtime.isoformat()} - Finished update_weko_links.py") - current_app.logger.info(f"Duration: {endtime - starttime}") \ No newline at end of file From c63aaa7ed38304b4aa031d98d15a404fb4a46791 Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Fri, 21 Nov 2025 09:56:30 +0900 Subject: [PATCH 2/9] =?UTF-8?q?=E4=BD=BF=E3=82=8F=E3=82=8C=E3=81=A6?= =?UTF-8?q?=E3=81=84=E3=81=AA=E3=81=84=E9=96=A2=E6=95=B0=E3=82=92=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/invenio-indexer/tests/test_api.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/modules/invenio-indexer/tests/test_api.py b/modules/invenio-indexer/tests/test_api.py index 9fd2f7afd9..284cab19e6 100644 --- a/modules/invenio-indexer/tests/test_api.py +++ b/modules/invenio-indexer/tests/test_api.py @@ -143,15 +143,18 @@ def test_process_bulk_queue_errors(app, queue): ret = {} - def _mock_bulk(client, actions_iterator, **kwargs): + def _mock_bulk(self, client, actions_iterator, **kwargs): ret['actions'] = list(actions_iterator) - return len(ret['actions']) - - with patch('invenio_indexer.api.bulk', _mock_bulk): - # Exceptions are caught - assert RecordIndexer().process_bulk_queue() == (0, 0, 2) - assert len(ret['actions']) == 1 - assert ret['actions'][0]['_id'] == str(r2.id) + return (len(ret['actions']), 0) + + with patch('invenio_indexer.api.RecordIndexer.reindex_bulk', _mock_bulk): + with patch('invenio_indexer.api.RecordIndexer._actionsiter', return_value=[ + {'_id': str(r2.id), '_op_type': 'index', '_source': {'title': 'valid'}} + ]): + # Exceptions are caught + assert RecordIndexer().process_bulk_queue() == (1, 0, 1) + assert len(ret['actions']) == 1 + assert ret['actions'][0]['_id'] == str(r2.id) # .tox/c1/bin/pytest --cov=invenio_indexer tests/test_api.py::test_process_bulk_queue -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-workflow/.tox/c1/tmp def test_process_bulk_queue(app, queue): From 19b332ca69fa8a0b4da052797a6e7b09a7660774 Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Fri, 21 Nov 2025 10:06:02 +0900 Subject: [PATCH 3/9] =?UTF-8?q?=E3=83=86=E3=82=B9=E3=83=88=E3=82=B9?= =?UTF-8?q?=E3=82=AD=E3=83=83=E3=83=97=E3=82=92=E5=89=8A=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/weko-deposit/tests/test_tasks.py | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/weko-deposit/tests/test_tasks.py b/modules/weko-deposit/tests/test_tasks.py index 8e7cd27775..9d7538956a 100644 --- a/modules/weko-deposit/tests/test_tasks.py +++ b/modules/weko-deposit/tests/test_tasks.py @@ -1992,7 +1992,6 @@ def test_extract_pdf_and_update_file_contents(app, db, location, caplog): # .tox/c1/bin/pytest --cov=weko_deposit tests/test_tasks.py::test_extract_pdf_and_update_file_contents_api_cases -v -s -vv --cov-branch --cov-report=term --cov-config=tox.ini --basetemp=/code/modules/weko-deposit/.tox/c1/tmp -@pytest.mark.skip() @pytest.mark.parametrize("tika_path, isfile, storage_exception, subprocess_returncode, update_side_effect, expect_error_attr, expect_content", [ ("/tmp/tika.jar", True, None, 0, None, None, "abc"), # normal (None, True, None, 0, None, Exception, None), # tika jar not found From fffc4bda524ff3ed1d1bb385c955597c9603ee80 Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Tue, 25 Nov 2025 18:27:25 +0900 Subject: [PATCH 4/9] =?UTF-8?q?=E8=BF=BD=E5=8A=A0=E5=88=86=E3=82=92?= =?UTF-8?q?=E4=BF=AE=E6=AD=A3=E3=80=81=E3=81=9D=E3=82=8C=E3=81=AB=E4=BC=B4?= =?UTF-8?q?=E3=81=84=E5=8D=98=E4=BD=93=E3=83=86=E3=82=B9=E3=83=88=E3=82=B3?= =?UTF-8?q?=E3=83=BC=E3=83=89=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/weko-authors/tests/test_admin.py | 25 +-- modules/weko-authors/tests/test_api.py | 93 ++------- modules/weko-authors/tests/test_rest.py | 19 -- modules/weko-authors/tests/test_tasks.py | 49 ++--- modules/weko-authors/tests/test_utils.py | 181 ++---------------- modules/weko-authors/tests/test_validation.py | 54 ------ modules/weko-authors/tests/test_views.py | 175 ----------------- modules/weko-authors/weko_authors/admin.py | 2 - modules/weko-authors/weko_authors/api.py | 78 +------- modules/weko-authors/weko_authors/config.py | 18 +- .../weko_authors/contrib/validation.py | 46 ----- modules/weko-authors/weko_authors/rest.py | 80 +------- modules/weko-authors/weko_authors/schema.py | 15 -- modules/weko-authors/weko_authors/tasks.py | 24 +-- .../weko_authors/admin/author_import.html | 5 +- .../translations/en/LC_MESSAGES/messages.po | 6 +- .../translations/ja/LC_MESSAGES/messages.po | 8 +- .../weko_authors/translations/messages.pot | 6 +- modules/weko-authors/weko_authors/utils.py | 136 ++----------- modules/weko-authors/weko_authors/views.py | 93 +-------- modules/weko-deposit/tests/test_tasks.py | 2 +- .../data/jsonld/record_metadata_full.json | 3 +- 22 files changed, 115 insertions(+), 1003 deletions(-) diff --git a/modules/weko-authors/tests/test_admin.py b/modules/weko-authors/tests/test_admin.py index 06368ae052..1067f89407 100644 --- a/modules/weko-authors/tests/test_admin.py +++ b/modules/weko-authors/tests/test_admin.py @@ -755,8 +755,8 @@ def __init__(self,id): current_cache.set(current_app.config["WEKO_AUTHORS_IMPORT_CACHE_RESULT_SUMMARY_KEY"], {"key":"result_summary_key"}) mocker.patch("weko_authors.admin.prepare_import_data",return_value=([ - {"pk_id": "test_id0", "current_weko_id": "1000", "weko_id": "1000"}, - {"pk_id": "test_id1", "current_weko_id": "1001", "weko_id": "1001"}, + {"pk_id": "test_id0", "weko_id": "1000"}, + {"pk_id": "test_id1", "weko_id": "1001"}, ], 1, 1)) mocker.patch("weko_authors.admin.group.apply_async",return_value=MockTaskGroup()) res = client.post(url, data=json.dumps(data), content_type='application/json') @@ -769,22 +769,18 @@ def __init__(self,id): { "task_id": 0, "record_id": "test_id0", - "previous_weko_id": "1000", - "new_weko_id": "1000", "status": "PENDING", }, { "task_id": 1, "record_id": "test_id1", - "previous_weko_id": "1001", - "new_weko_id": "1001", "status": "PENDING", }, ], }, "records": [ - {"pk_id": "test_id0", "current_weko_id": "1000", "weko_id": "1000"}, - {"pk_id": "test_id1", "current_weko_id": "1001", "weko_id": "1001"}, + {"pk_id": "test_id0", "weko_id": "1000"}, + {"pk_id": "test_id1", "weko_id": "1001"}, ], } assert json.loads(res.data) == test @@ -817,8 +813,8 @@ def __init__(self,id): current_cache.set(current_app.config["WEKO_AUTHORS_IMPORT_CACHE_RESULT_FILE_PATH_KEY"], None) current_cache.set(current_app.config["WEKO_AUTHORS_IMPORT_CACHE_RESULT_SUMMARY_KEY"],None) mocker.patch("weko_authors.admin.prepare_import_data",return_value=([ - {"pk_id": "test_id0", "current_weko_id": "1000", "weko_id": "1000"}, - {"pk_id": "test_id1", "current_weko_id": "1001", "weko_id": "1001"}, + {"pk_id": "test_id0", "weko_id": "1000"}, + {"pk_id": "test_id1", "weko_id": "1001"}, ], 1, 2000)) mocker.patch("weko_authors.admin.group.apply_async",return_value=MockTaskGroup()) mock_task = MagicMock() @@ -835,24 +831,21 @@ def __init__(self,id): { "task_id": 0, "record_id": "test_id0", - "previous_weko_id": "1000", - "new_weko_id": "1000", "status": "PENDING", }, { "task_id": 1, "record_id": "test_id1", - "previous_weko_id": "1001", - "new_weko_id": "1001", "status": "PENDING", }, ], }, "records": [ - {"pk_id": "test_id0", "current_weko_id": "1000", "weko_id": "1000"}, - {"pk_id": "test_id1", "current_weko_id": "1001", "weko_id": "1001"}, + {"pk_id": "test_id0", "weko_id": "1000"}, + {"pk_id": "test_id1", "weko_id": "1001"}, ], } + assert json.loads(res.data) == test # Exception (result_over_max_file_path is true) diff --git a/modules/weko-authors/tests/test_api.py b/modules/weko-authors/tests/test_api.py index ee04d7be84..e3fb98bbfd 100644 --- a/modules/weko-authors/tests/test_api.py +++ b/modules/weko-authors/tests/test_api.py @@ -58,7 +58,7 @@ def test_create(self,app,db,esindex, mocker, users): WekoAuthors.create(data) db.session.commit() author = Authors.query.filter_by(id=id).one() - test = {"authorIdInfo": [], "gather_flg": 0, "id": str(es_id), "pk_id": "1"} + test = {"authorIdInfo": [{'idType': '1', 'authorId': str(id), 'authorIdShowFlg': 'true'}], "gather_flg": 0, "id": str(es_id), "pk_id": "1"} assert author assert author.json == test res = current_search_client.get(index=current_app.config["WEKO_AUTHORS_ES_INDEX_NAME"],doc_type=current_app.config['WEKO_AUTHORS_ES_DOC_TYPE'],id=str(es_id)) @@ -86,7 +86,7 @@ def test_create(self,app,db,esindex, mocker, users): with patch("weko_authors.api.uuid.uuid4",return_value = es_id): data = { "authorNameInfo": [{"familyName": "テスト","firstName": "ハナコ","fullName": "","language": "ja-Kana","nameFormat": "familyNmAndNm","nameShowFlg": "true"}], - "authorIdInfo": [{"idType": "2","authorId": "01234","authorIdShowFlg": "true"}], + "authorIdInfo": [], "emailInfo": [{"email": "example@com"}], "is_deleted":"false", "communityIds": ["comm01"], @@ -96,7 +96,7 @@ def test_create(self,app,db,esindex, mocker, users): author = Authors.query.filter_by(id=id).one() test = { "authorNameInfo": [{"familyName": "テスト","firstName": "ハナコ","fullName": "","language": "ja-Kana","nameFormat": "familyNmAndNm","nameShowFlg": "true"}], - "authorIdInfo": [{"idType": "2","authorId": "01234","authorIdShowFlg": "true"}], + "authorIdInfo": [{"idType": "1","authorId": str(id),"authorIdShowFlg": "true"}], "emailInfo": [{"email": "example@com"}], "is_deleted":"false", "gather_flg": 0, @@ -393,7 +393,7 @@ def test_get_author_for_validation(self,authors,mocker): authors_result, external_result = WekoAuthors.get_author_for_validation() assert authors_result == {"1":True,"2":True,"3":True,"4":False} - assert external_result == {"1":{"1":["1"],"2":["2"]},"2":{"1234":["1"],"5678":["2"]},"3":{"12345":["1"]}} + assert external_result == {"2":{"1234":["1"],"5678":["2"]},"3":{"12345":["1"]}} # def get_id_prefix_all(cls): @@ -567,7 +567,6 @@ def test_prepare_export_data(self,db, authors,mocker): header, label_en,label_jp,data = WekoAuthors.prepare_export_data(None, None, None, None, None, None, 0, 10) assert header == [ "#pk_id", - "weko_id", "authorNameInfo[0].familyName", "authorNameInfo[0].firstName", "authorNameInfo[0].language", @@ -595,8 +594,7 @@ def test_prepare_export_data(self,db, authors,mocker): "communityIds[0]", ] assert label_en == [ - "#Author ID", - "WEKO ID", + "#WEKO ID", "Family Name[0]", "Given Name[0]", "Language[0]", @@ -624,8 +622,7 @@ def test_prepare_export_data(self,db, authors,mocker): "Community[0]", ] assert label_jp == [ - "#著者ID", - "WEKO ID", + "#WEKO ID", "姓[0]", "名[0]", "言語[0]", @@ -653,17 +650,17 @@ def test_prepare_export_data(self,db, authors,mocker): "コミュニティ[0]", ] - assert data == [["1","1","テスト","太郎","ja","familyNmAndNm","Y","ORCID","1234","Y","3","12345","Y","test.taro@test.org","",None,None,None,None,None,None,"","ja","Y",None,None,None], - ["2","2","test","smith","en","familyNmAndNm","Y","ORCID","5678","Y",None,None,None,"test.smith@test.org","","1","1234","Y","2","12345","Y","","ja","Y",None,None,None], - ["3",None,"test2","smith2","en","familyNmAndNm","Y",None,None,None,None,None,None,"test.smith2@test.org","","1","91011","Y",None,None,None,"","ja","Y",None,None,None], - ["4",None,None,None,None,None,None,None,None,None,None,None,None,None,"D",None,None,None,None,None,None,None,None,None,None,None,None]] + assert data == [["1","テスト","太郎","ja","familyNmAndNm","Y","ORCID","1234","Y","3","12345","Y","test.taro@test.org","",None,None,None,None,None,None,"","ja","Y",None,None,None], + ["2","test","smith","en","familyNmAndNm","Y","ORCID","5678","Y",None,None,None,"test.smith@test.org","","1","1234","Y","2","12345","Y","","ja","Y",None,None,None], + ["3","test2","smith2","en","familyNmAndNm","Y",None,None,None,None,None,None,"test.smith2@test.org","","1","91011","Y",None,None,None,"","ja","Y",None,None,None], + ["4",None,None,None,None,None,None,None,None,None,None,None,None,"D",None,None,None,None,None,None,None,None,None,None,None,None]] # authors is false mocker.patch("weko_authors.api.WekoAuthors.get_by_range",return_value=[]) header, label_en,label_jp,data = WekoAuthors.prepare_export_data(None, None, None, None, None, None, 0, 10) - assert header == ["#pk_id","weko_id","authorNameInfo[0].familyName","authorNameInfo[0].firstName","authorNameInfo[0].language","authorNameInfo[0].nameFormat","authorNameInfo[0].nameShowFlg","authorIdInfo[0].idType","authorIdInfo[0].authorId","authorIdInfo[0].authorIdShowFlg","emailInfo[0].email","is_deleted","communityIds[0]"] - assert label_en == ["#Author ID","WEKO ID","Family Name[0]","Given Name[0]","Language[0]","Name Format[0]","Name Display[0]","Identifier Scheme[0]","Identifier[0]","Identifier Display[0]","Mail Address[0]","Delete Flag","Community[0]"] - assert label_jp == ["#著者ID","WEKO ID","姓[0]","名[0]","言語[0]","フォーマット[0]","姓名・言語 表示/非表示[0]","外部著者ID 識別子[0]","外部著者ID[0]","外部著者ID 表示/非表示[0]","メールアドレス[0]","削除フラグ","コミュニティ[0]"] + assert header == ["#pk_id","authorNameInfo[0].familyName","authorNameInfo[0].firstName","authorNameInfo[0].language","authorNameInfo[0].nameFormat","authorNameInfo[0].nameShowFlg","authorIdInfo[0].idType","authorIdInfo[0].authorId","authorIdInfo[0].authorIdShowFlg","emailInfo[0].email","is_deleted","communityIds[0]"] + assert label_en == ["#WEKO ID","Family Name[0]","Given Name[0]","Language[0]","Name Format[0]","Name Display[0]","Identifier Scheme[0]","Identifier[0]","Identifier Display[0]","Mail Address[0]","Delete Flag","Community[0]"] + assert label_jp == ["#WEKO ID","姓[0]","名[0]","言語[0]","フォーマット[0]","姓名・言語 表示/非表示[0]","外部著者ID 識別子[0]","外部著者ID[0]","外部著者ID 表示/非表示[0]","メールアドレス[0]","削除フラグ","コミュニティ[0]"] assert data == [] @@ -681,11 +678,11 @@ def test_prepare_export_data(self,db, authors,mocker): mapping = WEKO_AUTHORS_FILE_MAPPING header, label_en,label_jp,data = WekoAuthors.prepare_export_data(mapping, None, None, [a], scheme_info, None, 0, 10) - assert header == ["#pk_id","weko_id","authorNameInfo[0].familyName","authorNameInfo[0].firstName","authorNameInfo[0].language","authorNameInfo[0].nameFormat","authorNameInfo[0].nameShowFlg","authorIdInfo[0].idType","authorIdInfo[0].authorId","authorIdInfo[0].authorIdShowFlg","emailInfo[0].email","is_deleted","communityIds[0]"] - assert label_en == ["#Author ID","WEKO ID","Family Name[0]","Given Name[0]","Language[0]","Name Format[0]","Name Display[0]","Identifier Scheme[0]","Identifier[0]","Identifier Display[0]","Mail Address[0]","Delete Flag","Community[0]"] - assert label_jp == ["#著者ID","WEKO ID","姓[0]","名[0]","言語[0]","フォーマット[0]","姓名・言語 表示/非表示[0]","外部著者ID 識別子[0]","外部著者ID[0]","外部著者ID 表示/非表示[0]","メールアドレス[0]","削除フラグ","コミュニティ[0]"] + assert header == ["#pk_id","authorNameInfo[0].familyName","authorNameInfo[0].firstName","authorNameInfo[0].language","authorNameInfo[0].nameFormat","authorNameInfo[0].nameShowFlg","authorIdInfo[0].idType","authorIdInfo[0].authorId","authorIdInfo[0].authorIdShowFlg","emailInfo[0].email","is_deleted","communityIds[0]"] + assert label_en == ["#WEKO ID","Family Name[0]","Given Name[0]","Language[0]","Name Format[0]","Name Display[0]","Identifier Scheme[0]","Identifier[0]","Identifier Display[0]","Mail Address[0]","Delete Flag","Community[0]"] + assert label_jp == ["#WEKO ID","姓[0]","名[0]","言語[0]","フォーマット[0]","姓名・言語 表示/非表示[0]","外部著者ID 識別子[0]","外部著者ID[0]","外部著者ID 表示/非表示[0]","メールアドレス[0]","削除フラグ","コミュニティ[0]"] - assert data == [[None,None,None,None,None,None,None,None,None,None,None,None,None]] + assert data == [[None,None,None,None,None,None,None,None,None,None,None,None]] # def get_by_range(cls, start_point, sum, with_deleted=True, with_gather=True): @@ -733,55 +730,6 @@ def test_get_by_range_with_community(self, app, db, authors, community): authors_copy.pop(0) assert authors_copy == result -# def get_pk_id_by_weko_id(cls, weko_id): -# .tox/c1/bin/pytest --cov=weko_authors tests/test_api.py::TestWekoAuthors::test_get_pk_id_by_weko_id -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp - @pytest.mark.parametrize('base_app',[dict( - is_es=True - )],indirect=['base_app']) - def test_get_pk_id_by_weko_id(self, app, mocker): - data = { - "hits": { - "total": 1, - "hits": [ - { - "_source": { - "authorIdInfo": [ - {"idType": "1", "authorId": "1111", "authorIdShowFlg": "true"}, - {"idType": "2", "authorId": "1111", "authorIdShowFlg": "true"}, - ], - "pk_id": "1", - }, - }, - ], - }, - } - mock_indexer = RecordIndexer() - mocker.patch("weko_authors.api.RecordIndexer",return_value=mock_indexer) - mock_indexer.client = MockClient() - mock_indexer.client.return_value=data - result = WekoAuthors.get_pk_id_by_weko_id("1111") - assert result == "1" - result = WekoAuthors.get_pk_id_by_weko_id("-1") - assert result == -1 - -# def get_weko_id_by_pk_id(cls, pk_id): -# .tox/c1/bin/pytest --cov=weko_authors tests/test_api.py::TestWekoAuthors::test_get_weko_id_by_pk_id -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp - @pytest.mark.parametrize('base_app',[dict( - is_es=True - )],indirect=['base_app']) - def test_get_weko_id_by_pk_id(self, app, mocker, authors): - result = WekoAuthors.get_weko_id_by_pk_id("1") - assert result == "1" - result = WekoAuthors.get_weko_id_by_pk_id("-1") - assert result == None - with pytest.raises(Exception): - result = WekoAuthors.get_weko_id_by_pk_id("3") - with pytest.raises(Exception): - result = WekoAuthors.get_weko_id_by_pk_id("4") - with pytest.raises(Exception): - WekoAuthors.get_weko_id_by_pk_id("test_pk_id") - - from sqlalchemy.exc import SQLAlchemyError # .tox/c1/bin/pytest --cov=weko_authors tests/test_api.py::TestWekoAuthorsMappingMaxItem -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp @@ -885,8 +833,8 @@ def test_prepare_export_data_full_data(self, app, db, authors_prefix_settings, a assert res_header assert res_label_en assert res_label_jp - assert res_row_data == [['1', '1', 'テスト', '太郎', 'ja', 'familyNmAndNm', 'Y', 'ORCID', '1234', 'Y', 'CiNii', '12345', 'Y', 'test.taro@test.org', '', None, None, None, None, None, None, '', 'ja', 'Y', None, None, None, None, None, None, None, None, None, None, None, None, None, None], - ['2', '2', 'test', 'smith', 'en', 'familyNmAndNm', 'Y', 'ORCID', '5678', 'Y', None, None, None, 'test.smith@test.org', '', 'ISNI', '1234', 'Y', 'GRID', '12345', 'Y', '', 'ja', 'Y', None, None, 'ISNI', '1234', 'Y', 'GRID', '12345', 'Y', '', 'ja', 'Y', None, None, None]] + assert res_row_data == [['1', 'テスト', '太郎', 'ja', 'familyNmAndNm', 'Y', 'ORCID', '1234', 'Y', 'CiNii', '12345', 'Y', 'test.taro@test.org', '', None, None, None, None, None, None, '', 'ja', 'Y', None, None, None, None, None, None, None, None, None, None, None, None, None, None], + ['2', 'test', 'smith', 'en', 'familyNmAndNm', 'Y', 'ORCID', '5678', 'Y', None, None, None, 'test.smith@test.org', '', 'ISNI', '1234', 'Y', 'GRID', '12345', 'Y', '', 'ja', 'Y', None, None, 'ISNI', '1234', 'Y', 'GRID', '12345', 'Y', '', 'ja', 'Y', None, None, None]] @pytest.fixture @@ -1007,9 +955,6 @@ def test_prepare_export_data_all_params_provided(self, app, db, mock_dependencie # マスク処理が正しく適用されていることを確認 assert 'one' in author_row - # WEKO IDが正しく抽出されていることを確認 - assert 'weko123' in author_row - # IDスキームが正しく変換されていることを確認 assert 'ORCID' in author_row diff --git a/modules/weko-authors/tests/test_rest.py b/modules/weko-authors/tests/test_rest.py index a667cf2075..29e1424693 100644 --- a/modules/weko-authors/tests/test_rest.py +++ b/modules/weko-authors/tests/test_rest.py @@ -215,8 +215,6 @@ def test_post_v1(self, app, client_api, auth_headers_noroleuser, auth_headers_sy self.run_post_author(app, client_api, auth_headers_sysadmin, {}, 400, "Bad Request: Invalid payload, {'author': ['Missing data for required field.']}") # 空のリクエスト self.run_post_author(app, client_api, auth_headers_sysadmin, {"author": None}, 400, "Bad Request: Invalid payload, {'author': ['Field may not be null.']}") # authorがNone self.run_post_author(app, client_api, auth_headers_sysadmin, {"author": {"authorIdInfo": [{"idType": "ORCID"}]}}, 400, "Both 'idType' and 'authorId' must be provided together.") # idTypeのみ指定 - self.run_post_author(app, client_api, auth_headers_sysadmin, {"author": {"authorIdInfo": [{"idType": "WEKO", "authorId": "A1"}]}}, 400, "The WEKO ID must be numeric characters only.") # WEKO IDが数字以外 - self.run_post_author(app, client_api, auth_headers_sysadmin, {"author": {"authorIdInfo": [{"idType": "WEKO", "authorId": "1"}]}}, 400, "The value is already in use as WEKO ID.") # 既存のWEKO ID self.run_post_author(app, client_api, auth_headers_sysadmin, {"author": {"authorNameInfo": [{"firstName": "John", "familyName": "Doe"}]}}, 400, "If 'firstName' or 'familyName' is provided, 'language' must also be specified.") # language未指定 self.run_post_author(app, client_api, auth_headers_sysadmin, {"author": {"affiliationInfo": "InvalidFormat"}}, 400, "Bad Request: Invalid payload, {'author': {'affiliationInfo': ['Not a valid list.']}}") # affiliationInfoがリストでない self.run_post_author(app, client_api, auth_headers_sysadmin, {"author": {"affiliationInfo": ["InvalidFormat"]}}, 400, "Bad Request: Invalid payload, {'author': {'affiliationInfo': {0: {'_schema': ['Invalid input type.']}}}}") # affiliationInfoのフォーマット不正 @@ -230,8 +228,6 @@ def test_post_v1(self, app, client_api, auth_headers_noroleuser, auth_headers_sy # システムエラーの確認 # DBエラーや例外発生時の動作を確認 self.run_post_author_db_error(app, client_api, auth_headers_sysadmin, self.valid_author_data("ORCID", "")) # DBエラー発生時 - with patch("invenio_search.current_search_client.search", side_effect=Exception): - self.run_post_author(app, client_api, auth_headers_sysadmin, self.valid_author_data("ORCID", ""), 500) # 検索時に例外が発生した場合 with patch("weko_authors.utils.get_author_prefix_obj", return_value=None): self.run_post_author(app, client_api, auth_headers_sysadmin, self.valid_author_data("ORCID", ""), 500) # 著者プレフィックス取得時に例外が発生した場合 with patch("weko_authors.utils.get_author_affiliation_obj", return_value=None): @@ -362,21 +358,6 @@ def test_put_v1(self, app, client_api, auth_headers_noroleuser, auth_headers_sys self.run_put_author(app, client_api, auth_headers_sysadmin, self.valid_update_data_with_community(["community1", "community1"]), es_id, 200, "Author successfully updated.") self.run_put_author(app, client_api, auth_headers_sysadmin, self.valid_update_data_with_community([]), es_id, 200, "Author successfully updated.") - - data_no_weko = { - "author": { - "emailInfo": [{"email": "updated@xxx.co.jp"}], - "authorIdInfo": [{"idType": "ORCID", "authorId": "5", "authorIdShowFlg": "true"}], - "authorNameInfo": [{"language": "en", "firstName": "Jane", "familyName": "Smith", "nameFormat": "familyNmAndNm", "nameShowFlg": "true"}], - "affiliationInfo": [{ - "identifierInfo": [{"affiliationId": "https://ror.org/5678", "affiliationIdType": "ISNI", "identifierShowFlg": "true"}], - "affiliationNameInfo": [{"affiliationName": "NII", "affiliationNameLang": "en", "affiliationNameShowFlg": "true"}], - "affiliationPeriodInfo": [{"periodStart": "2025-02-01", "periodEnd": "2025-04-01"}] - }] - } - } - self.run_put_author(app, client_api, auth_headers_sysadmin, data_no_weko, 1, 400, "At least one WEKO ID must be provided in update.") - # 認証なしのリクエストが拒否されることを確認 self.run_put_author_unauthorized(app, client_api) diff --git a/modules/weko-authors/tests/test_tasks.py b/modules/weko-authors/tests/test_tasks.py index 620e18a7ed..687b928a7b 100644 --- a/modules/weko-authors/tests/test_tasks.py +++ b/modules/weko-authors/tests/test_tasks.py @@ -53,7 +53,7 @@ def test_export_all(app,mocker): # .tox/c1/bin/pytest --cov=weko_authors tests/test_tasks.py::test_01_import_author -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp def test_01_import_author(app): with patch("weko_authors.tasks.import_author_to_system"): - result = import_author({"status":"", "weko_id":"", "current_weko_id":""}, True, {}) + result = import_author({"status":"", "weko_id":""}, True) assert result["status"] == "SUCCESS" @@ -61,7 +61,7 @@ def test_01_import_author(app): # .tox/c1/bin/pytest --cov=weko_authors tests/test_tasks.py::test_02_import_author -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp def test_02_import_author(app, caplog: LogCaptureFixture): with patch("weko_authors.tasks.import_author_to_system",side_effect=SQLAlchemyError("SQLAlchemyError")): - result = import_author({"status":"", "weko_id":"", "current_weko_id":""}, True, {}) + result = import_author({"status":"", "weko_id":""}, True) info_logs = [record for record in caplog.record_tuples if record[1] == logging.ERROR] expected = [('testapp', logging.ERROR, 'SQLAlchemyError')] * 6 assert info_logs == expected @@ -72,7 +72,7 @@ def test_02_import_author(app, caplog: LogCaptureFixture): # .tox/c1/bin/pytest --cov=weko_authors tests/test_tasks.py::test_03_import_author -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp def test_03_import_author(app, caplog: LogCaptureFixture): with patch("weko_authors.tasks.import_author_to_system",side_effect=ElasticsearchException("ElasticsearchException")): - result = import_author({"status":"", "weko_id":"", "current_weko_id":""}, True, {}) + result = import_author({"status":"", "weko_id":""}, True) info_logs = [record for record in caplog.record_tuples if record[1] == logging.ERROR] expected = [('testapp', logging.ERROR, 'ElasticsearchException')] * 6 assert info_logs == expected @@ -83,7 +83,7 @@ def test_03_import_author(app, caplog: LogCaptureFixture): # .tox/c1/bin/pytest --cov=weko_authors tests/test_tasks.py::test_04_import_author -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp def test_04_import_author(app, caplog: LogCaptureFixture): with patch("weko_authors.tasks.import_author_to_system",side_effect=TimeoutError("TimeoutError")): - result = import_author({"status":"", "weko_id":"", "current_weko_id":""}, True, {}) + result = import_author({"status":"", "weko_id":""}, True) info_logs = [record for record in caplog.record_tuples if record[1] == logging.ERROR] expected = [('testapp', logging.ERROR, 'TimeoutError')] * 6 assert info_logs == expected @@ -94,7 +94,7 @@ def test_04_import_author(app, caplog: LogCaptureFixture): # .tox/c1/bin/pytest --cov=weko_authors tests/test_tasks.py::test_05_import_author -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp def test_05_import_author(app, caplog: LogCaptureFixture): with patch("weko_authors.tasks.import_author_to_system",side_effect=TimeoutError({"error_id": 123, "message": "An error occurred"})): - result = import_author({"status":"", "weko_id":"", "current_weko_id":""}, True, {}) + result = import_author({"status":"", "weko_id":""}, True) info_logs = [record for record in caplog.record_tuples if record[1] == logging.ERROR] expected = [('testapp', logging.ERROR, "{'error_id': 123, 'message': 'An error occurred'}")] * 6 assert info_logs == expected @@ -220,14 +220,14 @@ def test_import_authors_from_temp_files_normal_case(self, app2, mock_update_cach # 一時ファイルの内容をモック part1_data = [ - {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001", "current_weko_id": "1000"}, - {"pk_id": 2, "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update", "weko_id": "1002", "current_weko_id": "1002"}, - {"pk_id": 3, "authorNameInfo": [{"familyName": "Brown", "firstName": "Bob"}], "status": "new", "weko_id": "1003", "current_weko_id": ""}, - {"pk_id": 4, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004", "current_weko_id": "1004"}, + {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001"}, + {"pk_id": 2, "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update", "weko_id": "1002"}, + {"pk_id": 3, "authorNameInfo": [{"familyName": "Brown", "firstName": "Bob"}], "status": "new", "weko_id": "1003"}, + {"pk_id": 4, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004"}, ] part2_data = [ - {"pk_id": 5, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004", "current_weko_id": "1004"}, + {"pk_id": 5, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004"}, ] # ファイル読み込みのモック @@ -284,11 +284,11 @@ def test_import_authors_from_temp_files_with_batch_size(self, app2, mock_update_ # 一時ファイルの内容をモック(5人の著者) part1_data = [ - {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001", "current_weko_id": ""}, - {"pk_id": 2, "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update", "weko_id": "1002", "current_weko_id": "1002"}, - {"pk_id": 3, "authorNameInfo": [{"familyName": "Brown", "firstName": "Bob"}], "status": "new", "weko_id": "1003", "current_weko_id": ""}, - {"pk_id": 4, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004", "current_weko_id": "1004"}, - {"pk_id": 5, "authorNameInfo": [{"familyName": "Wang", "firstName": "Chen"}], "status": "new", "weko_id": "1005", "current_weko_id": "", "errors":["error"]} + {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001"}, + {"pk_id": 2, "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update", "weko_id": "1002"}, + {"pk_id": 3, "authorNameInfo": [{"familyName": "Brown", "firstName": "Bob"}], "status": "new", "weko_id": "1003"}, + {"pk_id": 4, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004"}, + {"pk_id": 5, "authorNameInfo": [{"familyName": "Wang", "firstName": "Chen"}], "status": "new", "weko_id": "1005", "errors":["error"]} ] # ファイル読み込みのモック @@ -334,11 +334,11 @@ def test_import_authors_from_temp_files_file_not_found(self, app2, mock_update_c max_part = 1 # 一時ファイルの内容をモック(5人の著者) part1_data = [ - {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001", "current_weko_id": ""}, - {"pk_id": 2, "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update", "weko_id": "1002", "current_weko_id": "1002"}, - {"pk_id": 3, "authorNameInfo": [{"familyName": "Brown", "firstName": "Bob"}], "status": "new", "weko_id": "1003", "current_weko_id": ""}, - {"pk_id": 4, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004", "current_weko_id": "1004"}, - {"pk_id": 5, "authorNameInfo": [{"familyName": "Wang", "firstName": "Chen"}], "status": "new", "weko_id": "1005", "current_weko_id": "", "errors":["error"]} + {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001"}, + {"pk_id": 2, "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update", "weko_id": "1002"}, + {"pk_id": 3, "authorNameInfo": [{"familyName": "Brown", "firstName": "Bob"}], "status": "new", "weko_id": "1003"}, + {"pk_id": 4, "authorNameInfo": [{"familyName": "Lee", "firstName": "Alice"}], "status": "update", "weko_id": "1004"}, + {"pk_id": 5, "authorNameInfo": [{"familyName": "Wang", "firstName": "Chen"}], "status": "new", "weko_id": "1005", "errors":["error"]} ] # ファイル読み込みのモック @@ -419,7 +419,7 @@ def test_import_authors_from_temp_files_file_deletion_error(self, app2, mock_upd # 一時ファイルの内容をモック part1_data = [ - {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001", "current_weko_id": ""} + {"pk_id": 1, "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new", "weko_id": "1001"} ] # ファイル読み込みのモック @@ -499,14 +499,12 @@ def test_import_authors_for_over_max_success(self, app2, mock_cache, mock_group_obj, mock_task = mock_group author1 = { "pk_id": 1, - "current_weko_id": None, "weko_id": "author1", "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new" } author2 = { "pk_id": 2, - "current_weko_id": "old_author2", "weko_id": "author2", "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update" @@ -566,14 +564,12 @@ def test_import_authors_for_over_max_partial_failure(self, app2, mock_cache, mock_group_obj, mock_task = mock_group author1 = { "pk_id": 1, - "current_weko_id": None, "weko_id": "author1", "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new" } author2 = { "pk_id": 2, - "current_weko_id": "old_author2", "weko_id": "author2", "authorNameInfo": [{"familyName": "Smith", "firstName": "Jane"}], "status": "update" @@ -628,7 +624,6 @@ def test_import_authors_for_over_max_timeout(self, app2, mock_cache, mock_group_obj, mock_task = mock_group author = { "pk_id": 1, - "current_weko_id": None, "weko_id": "author1", "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}], "status": "new" @@ -677,7 +672,6 @@ def test_import_authors_for_over_max_no_summary(self, app2, mock_cache, mock_group_obj, mock_task = mock_group author = { "pk_id": 1, - "current_weko_id": None, "weko_id": "author1", "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}, {"familyName": "山田", "firstName": "太郎"}, @@ -722,7 +716,6 @@ def test_import_authors_for_over_max_for_cover(self, app2, mock_cache, mock_group_obj, mock_task = mock_group author = { "pk_id": 1, - "current_weko_id": None, "weko_id": "author1", "authorNameInfo": [{"familyName": "Doe", "firstName": "John"}, {"familyName": "山田", "firstName": "太郎"}, diff --git a/modules/weko-authors/tests/test_utils.py b/modules/weko-authors/tests/test_utils.py index 593c2ee5b0..fadba287e8 100644 --- a/modules/weko-authors/tests/test_utils.py +++ b/modules/weko-authors/tests/test_utils.py @@ -587,7 +587,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -604,8 +603,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): "communityIds":[] } ] - mocker.patch("weko_authors.utils.WekoAuthors.get_weko_id_by_pk_id",return_value="1") - mocker.patch("weko_authors.utils.check_weko_id_is_exits_for_import",return_value=[]) mock_author = mocker.patch("weko_authors.utils.Authors") mock_author.query.return_value.get.return_value = [MagicMock(communities=MagicMock(id="community_id"))] mocker.patch("weko_authors.utils.validate_community_ids", return_value=[]) @@ -653,13 +650,10 @@ def test_validate_import_data(authors_prefix_settings,mocker): }, ] list_import_id = [] - mocker.patch("weko_authors.utils.WekoAuthors.get_weko_id_by_pk_id",return_value="1") - mocker.patch("weko_authors.utils.check_weko_id_is_exits_for_import",return_value=[]) test = [ { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -678,7 +672,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -756,7 +749,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "", @@ -774,8 +766,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): "communityIds": [] } ] - mocker.patch("weko_authors.utils.WekoAuthors.get_weko_id_by_pk_id",return_value="1") - mocker.patch("weko_authors.utils.check_weko_id_is_exits_for_import",return_value=[]) result = validate_import_data(file_format,file_data,mapping_ids,mapping,list_import_id) assert result == test @@ -833,7 +823,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -851,8 +840,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): "communityIds": [] } ] - mocker.patch("weko_authors.utils.WekoAuthors.get_weko_id_by_pk_id",return_value="1") - mocker.patch("weko_authors.utils.check_weko_id_is_exits_for_import",return_value=[]) result = validate_import_data(file_format,file_data,mapping_ids,mapping,list_import_id) assert result == test @@ -922,7 +909,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): test = [ { "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -936,13 +922,11 @@ def test_validate_import_data(authors_prefix_settings,mocker): "emailInfo": [{"email": "test.taro@test.org"}], "is_deleted": "", "status": "new", - "errors": ["Specified WEKO ID already exist.", "validator error"], + "errors": ["validator error"], "warnings": ["idType warning"], "communityIds": [] } ] - mocker.patch("weko_authors.utils.WekoAuthors.get_weko_id_by_pk_id",return_value="1") - mocker.patch("weko_authors.utils.check_weko_id_is_exits_for_import",return_value=["Specified WEKO ID already exist."]) mocker.patch("weko_authors.utils.validate_by_extend_validator",return_value=["validator error"]) mocker.patch("weko_authors.utils.validate_external_author_identifier",return_value="idType warning") result = validate_import_data(file_format,file_data,mapping_ids,mapping,list_import_id) @@ -1015,7 +999,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -1032,8 +1015,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): "communityIds": [] } ] - mocker.patch("weko_authors.utils.WekoAuthors.get_weko_id_by_pk_id",return_value="1") - mocker.patch("weko_authors.utils.check_weko_id_is_exits_for_import",return_value=[]) mocker.patch("weko_authors.utils.validate_by_extend_validator",return_value=[]) mocker.patch("weko_authors.utils.validate_external_author_identifier",return_value="") mocker.patch("weko_authors.utils.autofill_data") @@ -1105,7 +1086,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -1188,7 +1168,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -1204,8 +1183,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): "status": "deleted" } ] - mocker.patch("weko_authors.utils.WekoAuthors.get_weko_id_by_pk_id",return_value="1") - mocker.patch("weko_authors.utils.check_weko_id_is_exits_for_import",return_value=[]) mocker.patch("weko_authors.utils.get_count_item_link", return_value=0) mocker.patch("weko_authors.utils.check_delete_author", return_value=(True, None)) result = validate_import_data(file_format,file_data,mapping_ids,mapping,list_import_id) @@ -1274,7 +1251,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -1359,7 +1335,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -1443,7 +1418,6 @@ def test_validate_import_data(authors_prefix_settings,mocker): { "pk_id": "1", "weko_id": "1", - "current_weko_id": "1", "authorNameInfo": [ { "familyName": "テスト", @@ -1627,7 +1601,7 @@ def test_set_record_status(): errors = [] warnings = [] set_record_status(file_format,existed_authors_id,item,errors,warnings) - assert errors == ["Specified Author ID does not exist."] + assert errors == ["Specified WEKO ID does not exist."] assert warnings == [] # is_deleted, existed_authors_id.pk_id is None @@ -1636,7 +1610,7 @@ def test_set_record_status(): warnings = [] set_record_status(file_format,existed_authors_id,item,errors,warnings) assert item["status"] == "deleted" - assert errors == ["Specified Author ID does not exist."] + assert errors == ["Specified WEKO ID does not exist."] assert warnings == [] existed_authors_id = {"1":True} @@ -1662,8 +1636,7 @@ def test_flatten_authors_mapping(): data = WEKO_AUTHORS_FILE_MAPPING test_all=[ - {'key': 'pk_id', 'label': {'en': 'Author ID', 'jp': '著者ID'}, 'mask': {}, 'validation': {}, 'autofill': ''}, - {'key': 'weko_id', 'label': {'en': 'WEKO ID', 'jp': 'WEKO ID'}, 'mask': {}, 'validation': {'validator': {'class_name': 'weko_authors.contrib.validation', 'func_name': 'validate_digits_for_wekoid'}}, 'autofill': ''}, + {'key': 'pk_id', 'label': {'en': 'WEKO ID', 'jp': 'WEKO ID'}, 'mask': {}, 'validation': {}, 'autofill': ''}, {"key":"authorNameInfo[0].familyName","label":{"en":"Family Name","jp":"姓"},"mask":{},"validation":{},"autofill":""}, {"key":"authorNameInfo[0].firstName","label":{"en":"Given Name","jp":"名"},"mask":{},"validation":{},"autofill":""}, {"key":"authorNameInfo[0].language","label":{"en":"Language","jp":"言語"},"mask":{},"validation":{'map': ['ja', 'ja-Kana', 'en', 'fr','it', 'de', 'es', 'zh-cn', 'zh-tw','ru', 'la', 'ms', 'eo', 'ar', 'el', 'ko']},"autofill":""}, @@ -1677,7 +1650,6 @@ def test_flatten_authors_mapping(): ] test_keys = ["pk_id", - 'weko_id', "authorNameInfo[0].familyName", "authorNameInfo[0].firstName", "authorNameInfo[0].language", @@ -1710,22 +1682,18 @@ def test_import_author_to_system(app, mocker): 'authorIdInfo': [], 'emailInfo': [] } - with patch('weko_authors.utils.check_weko_id_is_exists') as mock_check_weko_id, \ - patch('weko_authors.utils.WekoAuthors') as mock_weko_authors, \ + with patch('weko_authors.utils.WekoAuthors') as mock_weko_authors, \ patch('weko_authors.utils.db.session') as mock_session: - mock_check_weko_id.return_value = False - import_author_to_system(author, status, weko_id, force_change_mode) - mock_check_weko_id.assert_called_once_with(weko_id, '1') + import_author_to_system(author, status, force_change_mode) mock_weko_authors.create.assert_called_once() actual_author = mock_weko_authors.create.call_args[0][0] - assert actual_author == {'pk_id': '1', 'authorNameInfo': [{'familyName': 'テスト', 'firstName': '太郎', 'fullName': 'テスト 太郎'}], 'is_deleted': False, 'authorIdInfo': [{'idType': '1', 'authorId': '1234', 'authorIdShowFlg': 'true'}], 'emailInfo': []} + assert actual_author == {'pk_id': '1', 'authorNameInfo': [{'familyName': 'テスト', 'firstName': '太郎', 'fullName': 'テスト 太郎'}], 'is_deleted': False, 'authorIdInfo': [], 'emailInfo': []} mock_session.commit.assert_called_once() author = {'pk_id': '1', 'authorNameInfo': [{'familyName': 'テスト', 'firstName': '太郎'}]} status = 'update' - weko_id = '1234' force_change_mode = False test = { 'pk_id': '1', @@ -1734,19 +1702,16 @@ def test_import_author_to_system(app, mocker): 'authorIdInfo': [ { "idType": "1", - "authorId": "1234", + "authorId": "1", "authorIdShowFlg": "true" } ], 'emailInfo': [] } - with patch('weko_authors.utils.check_weko_id_is_exists') as mock_check_weko_id, \ - patch('weko_authors.utils.WekoAuthors') as mock_weko_authors, \ + with patch('weko_authors.utils.WekoAuthors') as mock_weko_authors, \ patch('weko_authors.utils.db.session') as mock_session: - mock_check_weko_id.return_value = False - import_author_to_system(author, status, weko_id, force_change_mode) - mock_check_weko_id.assert_called_once_with(weko_id, '1') + import_author_to_system(author, status, force_change_mode) mock_weko_authors.update.assert_called_once() update_args = mock_weko_authors.update.call_args actual_author = update_args[0][1] @@ -1756,7 +1721,6 @@ def test_import_author_to_system(app, mocker): author = {'pk_id': '1', 'authorNameInfo': [{'familyName': 'テスト', 'firstName': '太郎'}]} status = 'deleted' - weko_id = '1234' force_change_mode = False test = { 'pk_id': '1', @@ -1765,21 +1729,18 @@ def test_import_author_to_system(app, mocker): 'authorIdInfo': [ { "idType": "1", - "authorId": "1234", + "authorId": "1", "authorIdShowFlg": "true" } ], 'emailInfo': [] } - with patch('weko_authors.utils.check_weko_id_is_exists') as mock_check_weko_id, \ - patch('weko_authors.utils.WekoAuthors') as mock_weko_authors, \ + with patch('weko_authors.utils.WekoAuthors') as mock_weko_authors, \ patch('weko_authors.utils.db.session') as mock_session, \ patch('weko_authors.utils.get_count_item_link') as mock_get_count_item_link: - mock_check_weko_id.return_value = False mock_get_count_item_link.return_value = 0 - import_author_to_system(author, status, weko_id, force_change_mode) - mock_check_weko_id.assert_called_once_with(weko_id, '1') + import_author_to_system(author, status, force_change_mode) mock_weko_authors.update.assert_called_once() update_args = mock_weko_authors.update.call_args actual_author = update_args[0][1] @@ -1787,40 +1748,17 @@ def test_import_author_to_system(app, mocker): assert actual_author == test mock_session.commit.assert_called_once() - author = {'pk_id': '1', 'authorNameInfo': [{'familyName': 'テスト', 'firstName': '太郎'}]} - status = 'new' - weko_id = '1234' - force_change_mode = False - with patch('weko_authors.utils.check_weko_id_is_exists') as mock_check_weko_id: - with pytest.raises(Exception) as ex: - mock_check_weko_id.return_value = True - import_author_to_system(author, status, weko_id, force_change_mode) - assert ex.value.args[0]['error_id'] == "WekoID is duplicated" - assert str(ex.value) == {'error_id': "WekoID is duplicated"} - author = {'pk_id': '1', 'authorNameInfo': [{'familyName': 'テスト', 'firstName': '太郎'}]} status = 'deleted' weko_id = '1234' force_change_mode = False - with patch('weko_authors.utils.check_weko_id_is_exists') as mock_check_weko_id,\ - patch('weko_authors.utils.get_count_item_link') as mock_get_count_item_link: + with patch('weko_authors.utils.get_count_item_link') as mock_get_count_item_link: with pytest.raises(Exception) as ex: - mock_check_weko_id.return_value = False mock_get_count_item_link.return_value = 1 - import_author_to_system(author, status, weko_id, force_change_mode) + import_author_to_system(author, status, force_change_mode) assert ex.value.args[0]['error_id'] == "delete_author_link" assert str(ex.value) == {'error_id': "delete_author_link"} - author = {'pk_id': '1', 'authorNameInfo': [{'familyName': 'テスト', 'firstName': '太郎'}]} - status = 'update' - weko_id = '1234' - force_change_mode = False - with patch('weko_authors.utils.check_weko_id_is_exists') as mock_check_weko_id: - with pytest.raises(Exception) as ex: - mock_check_weko_id.return_value = True - import_author_to_system(author, status, weko_id, force_change_mode) - assert ex.value.args[0]['error_id'] == "WekoID is duplicated" - assert str(ex.value) == {'error_id': "WekoID is duplicated"} # def get_count_item_link(pk_id): # .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::test_get_count_item_link -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp @@ -1877,87 +1815,10 @@ def delete(i): # 6 Not register author data assert count_authors()['count'] == 0 -from weko_authors.utils import validate_weko_id, check_weko_id_is_exists, check_period_date, delete_export_url,\ +from weko_authors.utils import check_period_date, delete_export_url,\ handle_exception, export_prefix,check_file_name, clean_deep from redis.exceptions import RedisError from sqlalchemy.exc import SQLAlchemyError -# .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::TestValidateWekoId -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp -class TestValidateWekoId: - # 正常系 - def test_validate_weko_id_valid(self, app): - with patch("weko_authors.utils.check_weko_id_is_exists", return_value=False): - result = validate_weko_id("12345") - assert result == (True, None) - - # 異常系 - def test_validate_weko_id_not_half_digit(self, app): - result = validate_weko_id("abcde") - assert result == (False, "not half digit") - - # 異常系 - def test_validate_weko_id_already_exists(self, app): - with patch("weko_authors.utils.check_weko_id_is_exists", return_value=True): - result = validate_weko_id("12345") - assert result == (False, "already exists") - - # 異常系 - def test_validate_weko_id_exception(self, app): - with patch("weko_authors.utils.check_weko_id_is_exists", side_effect=Exception("Test Exception")): - with pytest.raises(Exception, match="Test Exception"): - validate_weko_id("12345") - -# .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::TestCheckWekoIdIsExists -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp -class TestCheckWekoIdIsExists: - # 正常系: weko_idが存在しない場合 - def test_check_weko_id_is_exists_not_exist(self, app): - with patch('weko_authors.utils.RecordIndexer') as MockIndexer: - mock_indexer = MockIndexer.return_value - mock_indexer.client.search.return_value = {'hits': {'hits': []}} - assert check_weko_id_is_exists("12345") == False - - # 正常系: weko_idが存在するが、pk_idが一致する場合 - def test_check_weko_id_is_exists_exist_same_pk_id(self, app): - with patch('weko_authors.utils.RecordIndexer') as MockIndexer: - mock_indexer = MockIndexer.return_value - mock_indexer.client.search.return_value = { - 'hits': { - 'hits': [ - { - '_source': { - 'pk_id': '1', - 'authorIdInfo': [{'idType': '1', 'authorId': '12345'}] - } - } - ] - } - } - assert check_weko_id_is_exists("12345", pk_id="1") == False - - # 異常系: weko_idが存在する場合 - def test_check_weko_id_is_exists_exist(self, app): - with patch('weko_authors.utils.RecordIndexer') as MockIndexer: - mock_indexer = MockIndexer.return_value - mock_indexer.client.search.return_value = { - 'hits': { - 'hits': [ - { - '_source': { - 'pk_id': '2', - 'authorIdInfo': [{'idType': '1', 'authorId': '12345'}] - } - } - ] - } - } - assert check_weko_id_is_exists("12345") == True - - # 異常系: Elasticsearchクライアントが例外をスローする場合 - def test_check_weko_id_is_exists_exception(self, app): - with patch('weko_authors.utils.RecordIndexer') as MockIndexer: - mock_indexer = MockIndexer.return_value - mock_indexer.client.search.side_effect = Exception("Elasticsearch error") - with pytest.raises(Exception): - check_weko_id_is_exists("12345") # .tox/c1/bin/pytest --cov=weko_authors tests/test_utils.py::TestCheckPeriodDate -vv -s --cov-branch --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp class TestCheckPeriodDate: @@ -3500,8 +3361,7 @@ def test_create_result_file_for_user(app, mocker): "No.": "1", "Start Date": "2025-01-01", "End Date": "2025-01-02", - "Previous WEKO ID": "123", - "New WEKO ID": "456", + "WEKO ID": "123", "full_name": "テスト 太郎", "Status": "success", } @@ -3511,8 +3371,7 @@ def test_create_result_file_for_user(app, mocker): "No.": "1", "Start Date": "2025-01-01", "End Date": "2025-01-02", - "Previous WEKO ID": "123", - "New WEKO ID": "456", + "WEKO ID": "123", "full_name": "テスト 太郎", "Status": "success", } @@ -3525,8 +3384,8 @@ def test_create_result_file_for_user(app, mocker): create_result_file_for_user(json) open.assert_any_call({"key":"cache_result_over_max_file_path_key"}, "r", encoding="utf-8") csv_writer = csv.writer.return_value - csv_writer.writerow.assert_any_call(["No.", "Start Date", "End Date", "Previous WEKO ID", "New WEKO ID", "full_name", "Status"]) - csv_writer.writerow.assert_any_call(['1', '2025-01-01', '2025-01-02', '123', '456', 'テスト 太郎', 'success']) + csv_writer.writerow.assert_any_call(["No.", "Start Date", "End Date", "WEKO ID", "full_name", "Status"]) + csv_writer.writerow.assert_any_call(['1', '2025-01-01', '2025-01-02', '123', 'テスト 太郎', 'success']) # Exception mock_logger = mocker.patch("weko_authors.utils.current_app.logger") diff --git a/modules/weko-authors/tests/test_validation.py b/modules/weko-authors/tests/test_validation.py index 1e526596a5..7a562d490e 100644 --- a/modules/weko-authors/tests/test_validation.py +++ b/modules/weko-authors/tests/test_validation.py @@ -8,10 +8,8 @@ validate_identifier_scheme, validate_external_author_identifier, validate_affiliation_period_end, - validate_digits_for_wekoid, validate_affiliation_identifier_scheme, validate_affiliation_period_start, - check_weko_id_is_exits_for_import ) # .tox/c1/bin/pytest --cov=weko_authors tests/test_validation.py -vv -s --cov-branch --cov-report=term --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp @@ -304,30 +302,6 @@ def test_multiple_values(self): assert len(result) == 2 assert "Period end must be after Period start." in result - -# def validate_digits_for_wekoid(items, values=[]) -# .tox/c1/bin/pytest --cov=weko_authors tests/test_validation.py::test_validate_digits_for_wekoid -vv -s --cov-branch --cov-report=term --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp -def test_validate_digits_for_wekoid(): - items = 'items' - values = [ - {'value': '111'}, - ] - result = validate_digits_for_wekoid(items, values) - assert result == [] - - values_2 = [ - {'value': ''}, - ] - result = validate_digits_for_wekoid(items, values_2) - assert result == ["WEKO ID is required item."] - - values_3 = [ - {'value': '111'}, - ] - result = validate_digits_for_wekoid(items, values_3) - assert result == ["WEKO ID is Half-width digits only"] - - # def validate_affiliation_identifier_scheme(item, values=[]) # .tox/c1/bin/pytest --cov=weko_authors tests/test_validation.py::test_validate_affiliation_identifier_scheme -vv -s --cov-branch --cov-report=term --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp def test_validate_affiliation_identifier_scheme(authors_affiliation_settings): @@ -361,31 +335,3 @@ def test_validate_affiliation_period_start(authors): ] result = validate_affiliation_period_start(item, values_2) assert result == ["External Affiliation Period must be in the format: yyyy-MM-dd, blank. 2025317"] - - -# def check_weko_id_is_exits_for_import(pk_id, weko_id, existed_external_authors_id={}) -# .tox/c1/bin/pytest --cov=weko_authors tests/test_validation.py::test_check_weko_id_is_exits_for_import -vv -s --cov-branch --cov-report=term --cov-report=html --basetemp=/code/modules/weko-authors/.tox/c1/tmp -def test_check_weko_id_is_exits_for_import(authors): - pk_id = '2' - weko_id = '1' - existed_external_authors_id = { - '1': {}, - } - result = check_weko_id_is_exits_for_import(pk_id, weko_id, existed_external_authors_id) - assert result == [] - - pk_id = '2' - weko_id = '1' - existed_external_authors_id = { - '1': {'1': ["2"],}, - } - result = check_weko_id_is_exits_for_import(pk_id, weko_id, existed_external_authors_id) - assert result == [] - - pk_id = '3' - weko_id = '1' - existed_external_authors_id = { - '1': {'1': ["2"],}, - } - result = check_weko_id_is_exits_for_import(pk_id, weko_id, existed_external_authors_id) - assert result == ["Specified WEKO ID already exist."] diff --git a/modules/weko-authors/tests/test_views.py b/modules/weko-authors/tests/test_views.py index 4ef24e607d..50cf8a6c22 100644 --- a/modules/weko-authors/tests/test_views.py +++ b/modules/weko-authors/tests/test_views.py @@ -125,38 +125,6 @@ def test_create_author(client, users, mocker): assert res.status_code == 200 assert get_json(res) == {"msg": "Header Error"} - # 条件: WEKO IDが設定されていない場合 - # 入力: authorIdInfoにidTypeが1のauthorIdが存在しない - # 期待結果: ステータスコード500、メッセージ"Please set WEKO ID." - input_data = { - "authorIdInfo": [{"idType": "2", "authorId": "0123"}] - } - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "Please set WEKO ID."} - - # 条件: WEKO IDが半角数字でない場合 - # 入力: authorIdInfoにidTypeが1のauthorIdが半角数字でない - # 期待結果: ステータスコード500、メッセージ"Please set the WEKOID in the half digit." - input_data = { - "authorIdInfo": [{"idType": "1", "authorId": "abc"}] - } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(False, "not half digit")) - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "Please set the WEKOID in the half digit."} - - # 条件: WEKO IDが既に存在する場合 - # 入力: authorIdInfoにidTypeが1のauthorIdが既に存在する - # 期待結果: ステータスコード500、メッセージ"The value is already in use as WEKO ID." - input_data = { - "authorIdInfo": [{"idType": "1", "authorId": "123"}] - } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(False, "already exists")) - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "The value is already in use as WEKO ID."} - # 条件: affiliation start dateとend dateがyyyy-MM-dd形式でない場合 # 入力: affiliation start dateとend dateがyyyy-MM-dd形式でない # 期待結果: ステータスコード500、メッセージ"Please set the affiliation start date and end date in the format yyyy-MM-dd." @@ -164,7 +132,6 @@ def test_create_author(client, users, mocker): "authorIdInfo": [{"idType": "1", "authorId": "123"}], "affiliationInfo": [{"startDate": "2021/01/01", "endDate": "2021/12/31"}] } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(False, "not date format")) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 500 @@ -177,7 +144,6 @@ def test_create_author(client, users, mocker): "authorIdInfo": [{"idType": "1", "authorId": "123"}], "affiliationInfo": [{"startDate": "2022-01-01", "endDate": "2021-12-31"}] } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(False, "start is after end")) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 500 @@ -190,7 +156,6 @@ def test_create_author(client, users, mocker): "authorIdInfo": [{"idType": "1", "authorId": "123"}], "affiliationInfo": [{"startDate": "2021-01-01", "endDate": "2021-12-31"}] } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(True, "")) mocker.patch('weko_authors.views.WekoAuthors.create', return_value=None) res = client.post(url, data=json.dumps(input_data), content_type='application/json') @@ -204,25 +169,12 @@ def test_create_author(client, users, mocker): "authorIdInfo": [{"idType": "1", "authorId": "123"}], "affiliationInfo": [{"startDate": "2021-01-01", "endDate": "2021-12-31"}] } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(True, "")) mocker.patch('weko_authors.views.WekoAuthors.create', side_effect=Exception("test_error")) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 500 assert get_json(res) == {"msg": "Failed"} - # 条件: validate_weko_idで例外が発生する場合 - # 入力: 正常なauthorIdInfoとaffiliationInfo - # 期待結果: ステータスコード500、メッセージ"Failed" - input_data = { - "authorIdInfo": [{"idType": "1", "authorId": "123"}], - "affiliationInfo": [{"startDate": "2021-01-01", "endDate": "2021-12-31"}] - } - mocker.patch('weko_authors.views.validate_weko_id', side_effect=Exception("test_error")) - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "Failed"} - # 条件: validate_community_idsで例外が発生する場合 # 入力: 正常なauthorIdInfoとaffiliationInfo # 期待結果: ステータスコード400、メッセージ"Failed" @@ -231,7 +183,6 @@ def test_create_author(client, users, mocker): "affiliationInfo": [{"startDate": "2021-01-01", "endDate": "2021-12-31"}], "communityIds": ["invalid_id"] } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.WekoAuthors.create', return_value=None) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 400 @@ -283,50 +234,6 @@ def test_update_author(client, users, mocker): assert res.status_code == 200 assert get_json(res) == {"msg": "Header Error"} - # 条件: WEKO IDが設定されていない場合 - # 入力: authorIdInfoにidTypeが1のauthorIdが存在しない - # 期待結果: ステータスコード500、メッセージ"Please set WEKO ID." - input_data = { - "forceChangeFlag": "false", - "author":{ - "pk_id": "1", - "authorIdInfo": [{"idType": "2", "authorId": "0123"}] - } - } - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "Please set WEKO ID."} - - # 条件: WEKO IDが半角数字でない場合 - # 入力: authorIdInfoにidTypeが1のauthorIdが半角数字でない - # 期待結果: ステータスコード500、メッセージ"Please set the WEKOID in the half digit." - input_data = { - "forceChangeFlag": "false", - "author":{ - "pk_id": "1", - "authorIdInfo": [{"idType": "1", "authorId": "abc"}] - } - } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(False, "not half digit")) - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "Please set the WEKOID in the half digit."} - - # 条件: WEKO IDが既に存在する場合 - # 入力: authorIdInfoにidTypeが1のauthorIdが既に存在する - # 期待結果: ステータスコード500、メッセージ"The value is already in use as WEKO ID." - input_data = { - "forceChangeFlag": "false", - "author":{ - "pk_id": "1", - "authorIdInfo": [{"idType": "1", "authorId": "123"}] - } - } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(False, "already exists")) - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "The value is already in use as WEKO ID."} - # 条件: affiliation start dateとend dateがyyyy-MM-dd形式でない場合 # 入力: affiliation start dateとend dateがyyyy-MM-dd形式でない # 期待結果: ステータスコード500、メッセージ"Please set the affiliation start date and end date in the format yyyy-MM-dd." @@ -338,7 +245,6 @@ def test_update_author(client, users, mocker): "affiliationInfo": [{"startDate": "2021/01/01", "endDate": "2021/12/31"}] } } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(False, "not date format")) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 500 @@ -355,7 +261,6 @@ def test_update_author(client, users, mocker): "affiliationInfo": [{"startDate": "2022-01-01", "endDate": "2021-12-31"}] } } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(False, "start is after end")) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 500 @@ -372,7 +277,6 @@ def test_update_author(client, users, mocker): "affiliationInfo": [{"startDate": "2021-01-01", "endDate": "2021-12-31"}] } } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(True, "")) mocker.patch('weko_authors.views.WekoAuthors.update', return_value=None) mock_author = mocker.patch("weko_authors.views.Authors") @@ -392,29 +296,12 @@ def test_update_author(client, users, mocker): "affiliationInfo": [{"startDate": "2021-01-01", "endDate": "2021-12-31"}] } } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.check_period_date', return_value=(True, "")) mocker.patch('weko_authors.views.WekoAuthors.update', side_effect=Exception("test_error")) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 500 assert get_json(res) == {"msg": "Failed"} - # 条件: validate_weko_idで例外が発生する場合 - # 入力: 正常なauthorIdInfoとaffiliationInfo - # 期待結果: ステータスコード500、メッセージ"Failed" - input_data = { - "forceChangeFlag": "false", - "author":{ - "pk_id": "1", - "authorIdInfo": [{"idType": "1", "authorId": "123"}], - "affiliationInfo": [{"startDate": "2021-01-01", "endDate": "2021-12-31"}] - } - } - mocker.patch('weko_authors.views.validate_weko_id', side_effect=Exception("test_error")) - res = client.post(url, data=json.dumps(input_data), content_type='application/json') - assert res.status_code == 500 - assert get_json(res) == {"msg": "Failed"} - # 条件: validate_community_idsで例外が発生する場合 # 入力: 正常なauthorIdInfoとaffiliationInfo # 期待結果: ステータスコード500、メッセージ"Failed" @@ -427,7 +314,6 @@ def test_update_author(client, users, mocker): "communityIds": ["invalid_id"] } } - mocker.patch('weko_authors.views.validate_weko_id', return_value=(True, "")) mocker.patch('weko_authors.views.WekoAuthors.update', return_value=None) res = client.post(url, data=json.dumps(input_data), content_type='application/json') assert res.status_code == 400 @@ -1797,64 +1683,3 @@ def test_dbsession_clean(app, db): dbsession_clean(Exception) assert ItemTypeName.query.filter_by(id=3).first() is None - -# def get_max_weko_id(): -# .tox/c1/bin/pytest --cov=weko_authors tests/test_views.py::test_get_max_weko_id -vv -s --cov-branch --cov-report=term --basetemp=/code/modules/weko-authors/.tox/c1/tmp -def test_get_max_weko_id(client, users, mocker ): - class MockClient(): - def __init__(self,data): - self.data = data - def search(self,index=None,body=None,scroll='2m'): - return self.data[index] - def scroll(self,scroll_id='scroll_id',scroll='2m'): - self.data['test-authors']['hits'] ={"hits":{"hits":[]}} - return self.data['test-authors']['hits'] - - url = url_for("weko_authors.get_max_weko_id") - login_user_via_session(client=client, email=users[0]['email']) - - data = { - "test-authors": { - "hits": { - "hits": [ - {"_source": {"authorIdInfo": [{"authorId": "2",'idType': '1'}]}, 'pk_id': 'xxx'} - ] - }, - "_scroll_id": "AAA" - }, - "test-weko": { - "hits": { - "total": 1 - } - } - } - - record_indexer = RecordIndexer() - record_indexer.client=MockClient(data) - mocker.patch("weko_authors.views.RecordIndexer",return_value=record_indexer) - test = {'max_author_id': 2} - res = client.get(url) - assert get_json(res) == test - - data_2 = { - "test-authors": { - "hits": { - "hits": [ - {"_source": {}, 'pk_id': 'xxx'} - ] - }, - "_scroll_id": "AAA" - }, - "test-weko": { - "hits": { - "total": 1 - } - } - } - - record_indexer = RecordIndexer() - record_indexer.client=MockClient(data_2) - mocker.patch("weko_authors.views.RecordIndexer",return_value=record_indexer) - test = {'max_author_id': 0} - res = client.get(url) - assert get_json(res) == test diff --git a/modules/weko-authors/weko_authors/admin.py b/modules/weko-authors/weko_authors/admin.py index 47b816a93c..5afa3d1ecd 100644 --- a/modules/weko-authors/weko_authors/admin.py +++ b/modules/weko-authors/weko_authors/admin.py @@ -519,8 +519,6 @@ def import_authors(self) -> jsonify: tasks.append({ 'task_id': task.task_id, 'record_id': records[idx].get('pk_id'), - 'previous_weko_id': records[idx].get('current_weko_id'), - 'new_weko_id': records[idx].get('weko_id'), 'status': 'PENDING' }) task_ids.append(task.task_id) diff --git a/modules/weko-authors/weko_authors/api.py b/modules/weko-authors/weko_authors/api.py index 417e26ea88..c33ce1aed2 100644 --- a/modules/weko-authors/weko_authors/api.py +++ b/modules/weko-authors/weko_authors/api.py @@ -59,6 +59,14 @@ def create(cls, data): data["pk_id"] = str(new_id) data["gather_flg"] = 0 community_ids = data.pop("communityIds", []) + data["authorIdInfo"].insert( + 0, + { + "idType": "1", + "authorId": str(new_id), + "authorIdShowFlg": "true" + } + ) es_id = str(uuid.uuid4()) es_data = json.loads(json.dumps(data)) @@ -232,7 +240,7 @@ def get_author_for_validation(cls): continue for authorIdInfo in metadata.get('authorIdInfo', {}): idType = authorIdInfo.get('idType') - if idType: + if idType and idType != '1': author_ids = existed_external_authors_id.get(idType, {}) weko_ids = author_ids.get(authorIdInfo.get('authorId'), []) weko_ids.append(str(author.id)) @@ -241,68 +249,6 @@ def get_author_for_validation(cls): return existed_authors_id, existed_external_authors_id - @classmethod - def get_pk_id_by_weko_id(cls, weko_id): - """ - Get pk_id by weko_id. - """ - query = { - "_source": ["pk_id", "authorIdInfo"], - "query": { - "bool": { - "must": [ - {"term": {"authorIdInfo.authorId": weko_id}}, - {"term": {"gather_flg": {"value": 0}}} - ], - "must_not": [ - {"term": {"is_deleted": True}} - ] - } - } - } - - # Search - indexer = RecordIndexer() - result = indexer.client.search( - index=current_app.config['WEKO_AUTHORS_ES_INDEX_NAME'], - body=query - ) - - for res in result['hits']['hits']: - author_id_info_from_es = res['_source']['authorIdInfo'] - for info in author_id_info_from_es: - if info.get('idType') == '1': - author_id = info.get('authorId') - if author_id == weko_id: - pk_id = res['_source']['pk_id'] - return pk_id - return -1 - - @classmethod - def get_weko_id_by_pk_id(cls, pk_id): - """Get weko_id from pk_id. - - Args: - pk_id (str): pk_id - - Returns: - weko_id :str - """ - try: - with db.session.begin_nested(): - author = Authors.query.filter_by(id=pk_id).one_or_none() - if not author: - return None - json = author.json - for author_id_info in json["authorIdInfo"]: - if author_id_info["idType"] == "1": - weko_id = author_id_info["authorId"] - break - return weko_id - except Exception as ex: - current_app.logger.error("Failed to get weko_id by pk_id") - raise - @classmethod def get_used_scheme_of_id_prefix(cls): """get used scheme of id prefix.""" @@ -625,12 +571,6 @@ def prepare_export_data(cls, mappings, affiliation_mappings, community_mappings, None ) ) - elif mapping["json_id"] == "weko_id": - id_info = json_data["authorIdInfo"][0] - if id_info["idType"] == "1": - row.append(id_info["authorId"]) - else: - row.append(None) else: row.append(json_data.get(mapping['json_id'])) diff --git a/modules/weko-authors/weko_authors/config.py b/modules/weko-authors/weko_authors/config.py index 2241935262..826c8b605c 100644 --- a/modules/weko-authors/weko_authors/config.py +++ b/modules/weko-authors/weko_authors/config.py @@ -80,22 +80,10 @@ WEKO_AUTHORS_FILE_MAPPING = [ { - "json_id": "pk_id", - "label_en": "Author ID", - "label_jp": "著者ID" + 'label_en': 'WEKO ID', + 'label_jp': 'WEKO ID', + 'json_id': 'pk_id' }, - { - "json_id": "weko_id", - "label_en": "WEKO ID", - "label_jp": "WEKO ID", - 'validation': { - 'validator': { - 'class_name': 'weko_authors.contrib.validation', - 'func_name': 'validate_digits_for_wekoid' - } - } - }, - { 'json_id': 'authorNameInfo', 'child': [ diff --git a/modules/weko-authors/weko_authors/contrib/validation.py b/modules/weko-authors/weko_authors/contrib/validation.py index e9f695f752..05b087036d 100644 --- a/modules/weko-authors/weko_authors/contrib/validation.py +++ b/modules/weko-authors/weko_authors/contrib/validation.py @@ -93,32 +93,6 @@ def validate_map(values=[], _map=[]): errors_key.append(val['key']) return errors_key -def validate_digits_for_wekoid(items, values=[]): - """ - weko_idについてのバリデーションチェックします。 - 以下の場合にエラーを追加します。 - ・weko_idが存在しない場合 - ・weko_idが半角数字でない場合 - - Args: - item (dict): インポートされるデータをdictに直したもの、ここでは使わない - values (list, optional): List values with key path. Defaults to []. - - Returns: - list: List errors message. - """ - errors = [] - err_msg_format = _("WEKO ID is Half-width digits only") - err_msg = _("WEKO ID is required item.") - for val in values: - weko_id = val["value"] - if weko_id: - if not bool(re.fullmatch(r'[0-9]+', weko_id)): - errors.append(err_msg_format) - else: - errors.append(err_msg) - return errors - def validate_identifier_scheme(item, values=[]): """Validate Identifier Scheme. @@ -257,23 +231,3 @@ def validate_external_author_identifier(item, values=[], return msg.format('
'.join(warnings)) return None - -def check_weko_id_is_exits_for_import(pk_id, weko_id, existed_external_authors_id={}): - """weko_idがexisted_external_authors_idに存在するか確認します。 - 存在し、かつpk_idが一致しなかった場合、エラーを出します。 - - Args: - pk_id (str): pk_id - weko_id (str): weko_id - existed_external_authors_id (dict, optional): (object, optional): Existed external - author id. Defaults to {}. - - Returns: - list: List metadata paths are errors. - """ - errors = [] - err_msg = _("Specified WEKO ID already exist.") - exists_id = existed_external_authors_id["1"].get(weko_id) - if exists_id and not (pk_id in exists_id) : - errors.append(err_msg) - return errors diff --git a/modules/weko-authors/weko_authors/rest.py b/modules/weko-authors/weko_authors/rest.py index f54bb9bd01..d3c55e498c 100644 --- a/modules/weko-authors/weko_authors/rest.py +++ b/modules/weko-authors/weko_authors/rest.py @@ -40,7 +40,7 @@ ) from weko_authors.api import WekoAuthors from weko_authors.utils import ( - validate_weko_id, check_period_date, validate_community_ids, check_delete_author + check_period_date, validate_community_ids, check_delete_author ) from .errors import ( @@ -387,7 +387,6 @@ def post_v1(self, **kwargs): prefix_schemes, affiliation_schemes ) - self.validate_author_data(author_data, author_data.get("pk_id")) try: author_data["communityIds"] = validate_community_ids( author_data.get("communityIds", []), is_create=True @@ -397,7 +396,6 @@ def post_v1(self, **kwargs): raise author_data = self.process_authors_data_before(author_data) - self.handle_weko_id(author_data) WekoAuthors.create(author_data) @@ -431,34 +429,6 @@ def post_v1(self, **kwargs): traceback.print_exc() raise InternalServerError("Internal server error.") - def validate_author_data(self, author_data, pk_id=None): - """Validate author data. - Args: - author_data (dict): The author data to validate. - pk_id (str, optional): The primary key ID of the author. - - Raises: - InvalidDataRESTError: If the author data is invalid. - """ - for auth_id in author_data.get("authorIdInfo", []): - id_type = auth_id.get("idType") - author_id = auth_id.get("authorId") - current_app.logger.debug(f"Validating author ID: {author_id} with type: {id_type}") - if id_type == "WEKO": - current_app.logger.debug(f"Validating WEKO ID: {author_id}") - is_valid, error_msg = validate_weko_id(author_id, pk_id) - if not is_valid and error_msg == "not half digit": - current_app.logger.error("The WEKO ID must be numeric characters only.") - raise InvalidDataRESTError( - description="Bad Request: The WEKO ID must be numeric characters only." - ) - if not is_valid and error_msg == "already exists": - current_app.logger.error("The value is already in use as WEKO ID.") - raise InvalidDataRESTError( - description="Bad Request: The value is already in use as WEKO ID." - ) - - def validate_request(self, request, schema): """Validate the request. @@ -498,53 +468,6 @@ def validate_request(self, request, schema): return request_data - def handle_weko_id(self, author_data): - author_id_info = author_data.get("authorIdInfo", []) - has_weko_id = any(auth_id.get("idType") == "1" for auth_id in author_id_info) - - if not has_weko_id: - search_index = current_app.config.get("WEKO_AUTHORS_ES_INDEX_NAME") - - query = { - "size": 0, - "aggs": { - "max_weko_id": { - "max": { - "script": { - "source": """ - int maxId = 0; - for (def entry : params._source.authorIdInfo) { - if (entry.idType == '1') { - try { - int id = Integer.parseInt(entry.authorId); - if (id > maxId) maxId = id; - } catch (Exception e) { - // Ignore non-numeric values - } - } - } - return maxId; - """, - "lang": "painless" - } - } - } - } - } - - search_results = current_search_client.search(index=search_index, body=query) - max_weko_id = search_results["aggregations"]["max_weko_id"]["value"] or 0 - - new_weko_id = str(int(max_weko_id) + 1) - - author_id_info.append({ - "idType": "1", - "authorId": new_weko_id, - "authorIdShowFlg": "true" - }) - - author_data["authorIdInfo"] = author_id_info - def put_v1(self, **kwargs): """Handle PUT request for author update.""" from weko_authors.models import Authors @@ -602,7 +525,6 @@ def put_v1(self, **kwargs): if author_by_es and not pk_id: pk_id = author_by_es.get("pk_id") - self.validate_author_data(author_data, pk_id) community_ids = author_data.get("communityIds", []) if author_by_pk: old = author_by_pk diff --git a/modules/weko-authors/weko_authors/schema.py b/modules/weko-authors/weko_authors/schema.py index 6d86811db2..b3ff845aa8 100644 --- a/modules/weko-authors/weko_authors/schema.py +++ b/modules/weko-authors/weko_authors/schema.py @@ -208,21 +208,6 @@ def validate_not_empty(self, data, **kwargs): if not data: raise ValidationError("author can not be null.") - -class AuthorUpdateSchema(AuthorSchema): - """Schema for updating author information.""" - - @validates_schema - def validate_weko_id_required(self, data, **kwargs): - """Ensure at least one WEKO ID is included on update.""" - author_id_info = data.get("authorIdInfo", []) - if not any(item.get("idType") == "WEKO" for item in author_id_info): - raise ValidationError( - "At least one WEKO ID must be provided in update.", - field_name="authorIdInfo" - ) - - class AuthorCreateRequestSchema(Schema): """Request schema for creating a new author.""" diff --git a/modules/weko-authors/weko_authors/tasks.py b/modules/weko-authors/weko_authors/tasks.py index 1c45435795..41632197c3 100644 --- a/modules/weko-authors/weko_authors/tasks.py +++ b/modules/weko-authors/weko_authors/tasks.py @@ -70,12 +70,11 @@ def export_all(export_target, user_id): @shared_task -def import_author(author, force_change_mode, request_info): +def import_author(author, request_info): """Import Author. Args: author (dict): Author data to import. - force_change_mode (bool): Whether to force change mode. request_info (dict): Request information for logging. Returns: @@ -86,18 +85,12 @@ def import_author(author, force_change_mode, request_info): retrys = current_app.config["WEKO_AUTHORS_BULK_EXPORT_MAX_RETRY"] interval = current_app.config["WEKO_AUTHORS_BULK_EXPORT_RETRY_INTERVAL"] status = author['status'] - weko_id = author['weko_id'] del author['status'] - del author["weko_id"] - del author["current_weko_id"] try: # Retry processing in case of connection error for attempt in range(retrys): try: - import_author_to_system( - author, status, weko_id, force_change_mode, - request_info=request_info - ) + import_author_to_system(author, status) result['status'] = states.SUCCESS break except SQLAlchemyError as ex: @@ -318,9 +311,7 @@ def import_authors_for_over_max(authors, request_info=None): full_name_info += f"\n{full_name}" tasks.append({ 'task_id': task.task_id, - 'record_id': authors[idx].get('pk_id'), - 'previous_weko_id': authors[idx].get('current_weko_id'), - 'new_weko_id': authors[idx].get('weko_id'), + 'weko_id': authors[idx].get('pk_id'), 'full_name': full_name_info, 'type': authors[idx].get('status'), 'status': 'PENDING' @@ -360,8 +351,7 @@ def import_authors_for_over_max(authors, request_info=None): result.append({ "start_date": start_date, "end_date": end_date, - 'previous_weko_id': _task.get('previous_weko_id'), - 'new_weko_id': _task.get('new_weko_id'), + "weko_id": _task['weko_id'], "full_name": _task['full_name'], "type": _task['type'], "status": status, @@ -406,16 +396,14 @@ def write_result_temp_file(result): for res in result: start_date = res.get("start_date", "") end_date = res.get("end_date", "") - prev_weko_id= res.get('previous_weko_id', "") - new_weko_id= res.get('new_weko_id', "") + weko_id = res.get("weko_id", "") full_name = res.get("full_name", "") type = res.get("type", "") status = res.get("status", "") error_id = res.get("error_id", "") msg = prepare_display_status(status, type, error_id) - writer.writerow(["", start_date, end_date, prev_weko_id, new_weko_id, full_name, msg]) - + writer.writerow(["", start_date, end_date, weko_id, full_name, msg]) except Exception as e: current_app.logger.error(e) diff --git a/modules/weko-authors/weko_authors/templates/weko_authors/admin/author_import.html b/modules/weko-authors/weko_authors/templates/weko_authors/admin/author_import.html index fbc4a77898..9b10559cee 100644 --- a/modules/weko-authors/weko_authors/templates/weko_authors/admin/author_import.html +++ b/modules/weko-authors/weko_authors/templates/weko_authors/admin/author_import.html @@ -43,10 +43,7 @@ - - - - + diff --git a/modules/weko-authors/weko_authors/translations/en/LC_MESSAGES/messages.po b/modules/weko-authors/weko_authors/translations/en/LC_MESSAGES/messages.po index 97c57d6721..59326834e6 100644 --- a/modules/weko-authors/weko_authors/translations/en/LC_MESSAGES/messages.po +++ b/modules/weko-authors/weko_authors/translations/en/LC_MESSAGES/messages.po @@ -179,7 +179,7 @@ msgid "Delete" msgstr "" #: weko_authors/utils.py:1364 -msgid "Specified Author ID does not exist." +msgid "Specified WEKO ID does not exist." msgstr "" #: weko_authors/utils.py:1376 @@ -296,10 +296,6 @@ msgstr "" msgid "External author identifier exists in DB.
{}" msgstr "" -#: weko_authors/contrib/validation.py:274 -msgid "Specified WEKO ID already exist." -msgstr "" - #: weko_authors/static/js/weko_authors/app-author-import.chunk.js:2 msgid "TYPED_ARRAY_TAG" msgstr "" diff --git a/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po b/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po index 6387f316cf..a7c9e17939 100644 --- a/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po +++ b/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po @@ -179,8 +179,8 @@ msgid "Delete" msgstr "" #: weko_authors/utils.py:1364 -msgid "Specified Author ID does not exist." -msgstr "指定されたpk_idが存在していません。" +msgid "Specified WEKO ID does not exist." +msgstr "指定されたWEKO IDが存在していません。" #: weko_authors/utils.py:1376 msgid "" @@ -296,10 +296,6 @@ msgstr "" msgid "External author identifier exists in DB.
{}" msgstr "外部著者識別子がDBに存在しています。
{}" -#: weko_authors/contrib/validation.py:274 -msgid "Specified WEKO ID already exist." -msgstr "" - #: weko_authors/static/js/weko_authors/app-author-import.chunk.js:2 msgid "TYPED_ARRAY_TAG" msgstr "" diff --git a/modules/weko-authors/weko_authors/translations/messages.pot b/modules/weko-authors/weko_authors/translations/messages.pot index edadd4bb0b..dd190bc9c4 100644 --- a/modules/weko-authors/weko_authors/translations/messages.pot +++ b/modules/weko-authors/weko_authors/translations/messages.pot @@ -178,7 +178,7 @@ msgid "Delete" msgstr "" #: weko_authors/utils.py:1364 -msgid "Specified Author ID does not exist." +msgid "Specified WEKO ID does not exist." msgstr "" #: weko_authors/utils.py:1376 @@ -295,10 +295,6 @@ msgstr "" msgid "External author identifier exists in DB.
{}" msgstr "" -#: weko_authors/contrib/validation.py:274 -msgid "Specified WEKO ID already exist." -msgstr "" - #: weko_authors/static/js/weko_authors/app-author-import.chunk.js:2 msgid "TYPED_ARRAY_TAG" msgstr "" diff --git a/modules/weko-authors/weko_authors/utils.py b/modules/weko-authors/weko_authors/utils.py index d0bb809dd7..498a17e642 100644 --- a/modules/weko-authors/weko_authors/utils.py +++ b/modules/weko-authors/weko_authors/utils.py @@ -52,7 +52,7 @@ from .contrib.validation import ( validate_by_extend_validator, validate_external_author_identifier, - validate_map, validate_required, check_weko_id_is_exits_for_import + validate_map, validate_required ) from .api import WekoAuthors from .errors import AuthorsValidationError, AuthorsPermissionError @@ -160,82 +160,6 @@ def check_email_existed(email: str): 'author_id': '' } -def validate_weko_id(weko_id, pk_id = None): - """Validate WEKO ID. - - Args: - weko_id (str): WEKO ID. - pk_id (str, optional): Primary key ID. - - Returns: - tuple: (bool, str or None) - """ - if not bool(re.fullmatch(r'[0-9]+', weko_id)): - return False, "not half digit" - - try: - result = check_weko_id_is_exists(weko_id, pk_id) - except Exception as ex: - current_app.logger.error(ex) - raise ex - - if result == True: - return False, "already exists" - return True, None - -def check_weko_id_is_exists(weko_id, pk_id = None): - """Check if weko_id exists in Elasticsearch. - - If author_id is the same as pk_id, skip checking. - weko_id is the value of authorId where authorIdInfo.Idtype is 1. - - Args: - weko_id (str): WEKO ID. - pk_id (str, optional): Primary key ID. - - Returns: - bool: True if exists, False otherwise. - """ - query = { - "_source": ["pk_id", "authorIdInfo"], # Get Author id info field only - "query": { - "bool": { - "must": [ - { - "term": { - "authorIdInfo.authorId": weko_id - } - }, - {"term": {"gather_flg": {"value": 0}}} - ], - "must_not": [ - {"term": {"is_deleted": True}} - ] - } - } - } - - # search from elasticsearch - indexer = RecordIndexer() - result = indexer.client.search( - index=current_app.config['WEKO_AUTHORS_ES_INDEX_NAME'], - body=query - ) - - # if same weko_id exists, return True - for res in result['hits']['hits']: - # if same author_id exists, skip checking - if pk_id and pk_id == res['_source']['pk_id']: - continue - author_id_info_from_es = res['_source']['authorIdInfo'] - for info in author_id_info_from_es: - if info.get('idType') == '1': - author_id = info.get('authorId') - if author_id == weko_id: - return True - return False - - def check_period_date(data): """Check period date. @@ -893,18 +817,12 @@ def validate_import_data(file_format, file_data, mapping_ids, mapping, list_impo errors = [] warnings = [] - pk_id = item.get('pk_id') - weko_id = item.get("weko_id") - current_weko_id = WekoAuthors.get_weko_id_by_pk_id(pk_id) - item["current_weko_id"] = current_weko_id - errors_msg = check_weko_id_is_exits_for_import(pk_id, weko_id, existed_external_authors_id) - if errors_msg: - errors.extend(errors_msg) + weko_id = item.get('pk_id') # check duplication WEKO ID - if pk_id and pk_id not in list_import_id: - list_import_id.append(pk_id) - elif pk_id: + if weko_id and weko_id not in list_import_id: + list_import_id.append(weko_id) + elif weko_id: errors.append(_('There is duplicated data in the {} file.').format(file_format)) # set status @@ -915,11 +833,11 @@ def validate_import_data(file_format, file_data, mapping_ids, mapping, list_impo if item.get('status') == 'new': item["communityIds"] = validate_community_ids(community_ids, is_create=True) elif item.get('status') == 'update': - old = Authors.query.get(pk_id) + old = Authors.query.get('pk_id') old_community_ids = [c.id for c in old.communities] item["communityIds"] = validate_community_ids(community_ids, old_ids=old_community_ids) elif item.get('status') == 'deleted': - check, message = check_delete_author(pk_id) + check, message = check_delete_author('pk_id') if not check: errors.append(message) except AuthorsValidationError as e: @@ -1223,7 +1141,7 @@ def band_check_file_for_user(max_page): try: with open(check_file_path, 'w', newline='', encoding='utf-8') as file: writer = csv.writer(file, delimiter='\t') - writer.writerow(["No.", "Current WEKO ID", "New WEKO ID", "full_name", "MailAddress", "Check Result"]) + writer.writerow(["No.", "WEKO ID", "full_name", "MailAddress", "Check Result"]) for i in range(1, max_page+1): part_check_file_name = f"{check_file_name}-part{i}" check_file_part_path = os.path.join(temp_folder_path, part_check_file_name) @@ -1243,15 +1161,13 @@ def band_check_file_for_user(max_page): else: full_name_info += f"\n{full_name}" - current_weko_id = entry.get("current_weko_id", "") - new_weko_id = entry.get("weko_id", "") email = "" if entry.get("emailInfo", [{}]): email_info = entry.get("emailInfo", [{}])[0] email = email_info.get("email", "") check_result = get_check_result(entry) - writer.writerow([index, current_weko_id, new_weko_id, full_name_info, email, check_result]) + writer.writerow([index, pk_id, full_name_info, email, check_result]) except Exception as ex: raise @@ -1361,7 +1277,7 @@ def set_record_status(file_format, list_existed_author_id, item, errors, warning """Set status to import data.""" item['status'] = 'new' pk_id = item.get('pk_id') - err_msg = _("Specified Author ID does not exist.") + err_msg = _("Specified WEKO ID does not exist.") if item.get('is_deleted', '') == 'D': item['status'] = 'deleted' if not pk_id or list_existed_author_id.get(pk_id) is None: @@ -1454,7 +1370,7 @@ def prepare_import_data(max_page_for_import_tab): return authors, reached_point, count def import_author_to_system( - author, status, weko_id, force_change_mode, request_info=None + author, status, force_change_mode, request_info=None ): """Import author to DB and ES. @@ -1471,8 +1387,6 @@ def import_author_to_system( from weko_logging.activity_logger import UserActivityLogger if author: try: - check_weko_id = check_weko_id_is_exists(weko_id, author.get('pk_id')) - author["is_deleted"] = True if author.get("is_deleted") else False if not author.get('authorIdInfo'): author["authorIdInfo"] = [] @@ -1486,35 +1400,21 @@ def import_author_to_system( nameInfo["fullName"] = fullName if status == 'new': - if check_weko_id: - current_app.logger.error("WekoID is duplicated") - raise Exception({'error_id': "WekoID is duplicated"}) - author["authorIdInfo"].insert( - 0, - { - "idType": "1", - "authorId": weko_id, - "authorIdShowFlg": "true" - } - ) WekoAuthors.create(author) else: if status == 'deleted' \ and get_count_item_link(author['pk_id']) > 0: raise Exception({'error_id': 'delete_author_link'}) - if check_weko_id: - current_app.logger.error("WekoID is duplicated") - raise Exception({'error_id': "WekoID is duplicated"}) author["authorIdInfo"].insert( 0, { "idType": "1", - "authorId": weko_id, + "authorId": author['pk_id'], "authorIdShowFlg": "true" } ) - WekoAuthors.update(author['pk_id'], author, force_change_mode) + WekoAuthors.update(author['pk_id'], author) db.session.commit() if status == "new": UserActivityLogger.info(operation="AUTHOR_CREATE", request_info=request_info) @@ -1576,20 +1476,16 @@ def create_result_file_for_user(json): with open(result_file_path, "w", encoding="utf-8") as result_file: writer = csv.writer(result_file, delimiter='\t') # write header - writer.writerow([ - "No.", "Start Date", "End Date", - "Previous WEKO ID", "New WEKO ID", "full_name", "Status" - ]) + writer.writerow(["No.", "Start Date", "End Date", "WEKO ID", "full_name", "Status"]) # write the json sent from the front desk. for data in json: number = data.get("No.", "") start_date = data.get("Start Date", "") end_date = data.get("End Date", "") - prev_weko_id = data.get("Previous WEKO ID", "") - new_weko_id = data.get("New WEKO ID", "") + weko_id = data.get("WEKO ID", "") full_name = data.get("full_name", "") status = data.get("Status", "") - writer.writerow([number, start_date, end_date, prev_weko_id, new_weko_id, full_name, status]) + writer.writerow([number, start_date, end_date, weko_id, full_name, status]) count = len(json) + 1 with open(result_over_max_file_path, "r", encoding="utf-8") as file: file_reader = csv.reader(file, dialect='excel', delimiter='\t') diff --git a/modules/weko-authors/weko_authors/views.py b/modules/weko-authors/weko_authors/views.py index 817903b3f1..782737981f 100644 --- a/modules/weko-authors/weko_authors/views.py +++ b/modules/weko-authors/weko_authors/views.py @@ -40,7 +40,7 @@ from .permissions import author_permission from .utils import ( get_author_prefix_obj, get_author_affiliation_obj, get_count_item_link, - validate_weko_id, check_period_date, validate_community_ids, + check_period_date, validate_community_ids, get_managed_community, check_delete_author, check_delete_prefix, check_delete_affiliation ) @@ -72,33 +72,10 @@ def create(): return jsonify(msg=_('Header Error')) data = request.get_json() + data["is_deleted"] = "false" activity_id = request.args.get('activity_id') - # weko_idを取得する。 - author_id_info = data["authorIdInfo"] - weko_id = None - for i in author_id_info: - if i.get('idType') == '1': - weko_id = i.get('authorId') - if not weko_id: - return jsonify(msg=_('Please set WEKO ID.')), 500 - - #weko_idのバリーデーションチェック - try: - result_weko_id_check = validate_weko_id(weko_id) - if result_weko_id_check[0] == False and result_weko_id_check[1] == "not half digit": - # weko_idが半角数字でない場合はエラーを返す - return jsonify(msg=_('Please set the WEKOID in the half digit.')), 500 - elif result_weko_id_check[0] == False and result_weko_id_check[1] == "already exists": - # weko_idが既に存在する場合はエラーを返す - return jsonify(msg=_('The value is already in use as WEKO ID.')), 500 - except Exception as ex: - current_app.logger.error(ex) - traceback.print_exc() - return jsonify(msg=_('Failed')), 500 - - - #periodのバリーデーションチェック + #periodのバリデーションチェック result_period_check = check_period_date(data) if result_period_check[0] == False and result_period_check[1] == "not date format": return jsonify(msg=_('Please set the affiliation start date and end date in the format yyyy-MM-dd.')), 500 @@ -140,28 +117,10 @@ def update_author(): req = request.get_json() data = req["author"] force_change_flag = request.get_json()["forceChangeFlag"] - # weko_idを取得する。 - weko_id = None - author_id_info = data["authorIdInfo"] - for i in author_id_info: - if i.get('idType') == '1': - weko_id = i.get('authorId') - if not weko_id: - return jsonify(msg=_('Please set WEKO ID.')), 500 pk_id = data["pk_id"] try: - #weko_idのバリーデーションチェック - result_weko_id_check = validate_weko_id(weko_id, pk_id) - - if result_weko_id_check[0] == False and result_weko_id_check[1] == "not half digit": - # weko_idが半角数字でない場合はエラーを返す - return jsonify(msg=_('Please set the WEKOID in the half digit.')), 500 - elif result_weko_id_check[0] == False and result_weko_id_check[1] == "already exists": - # weko_idが既に存在する場合はエラーを返す - return jsonify(msg=_('The value is already in use as WEKO ID.')), 500 - - #periodのバリーデーションチェック + #periodのバリデーションチェック result_period_check = check_period_date(data) if result_period_check[0] == False and result_period_check[1] == "not date format": return jsonify(msg=_('Please set the affiliation start date and end date in the format yyyy-MM-dd.')), 500 @@ -368,50 +327,6 @@ def getById(): ) return json.dumps(result) -@blueprint_api.route("/get_max_weko_id", methods=['GET']) -@login_required -def get_max_weko_id(): - """Get max weko id.""" - query = { - "_source": ["authorIdInfo"], # authorIdInfoフィールドのみを取得 - "query": { - "bool": { - "must": [ - {"term": {"gather_flg": {"value": 0}}} - ], - "must_not": [ - {"term": {"is_deleted": True}} - ] - } - }, - "size": 1000 # スクロールごとに取得するドキュメント数 - } - - indexer = RecordIndexer() - result = indexer.client.search( - index=current_app.config['WEKO_AUTHORS_ES_INDEX_NAME'], - body=query, - scroll='2m' # スクロールの有効期限 - ) - - max_author_id = 0 - scroll_id = result['_scroll_id'] - - while len(result['hits']['hits']) > 0: - for hit in result['hits']['hits']: - author_id_info = hit['_source'].get('authorIdInfo', []) - for info in author_id_info: - if info.get('idType') == '1': - author_id = int(info.get('authorId')) - if author_id > max_author_id: - max_author_id = author_id - result = indexer.client.scroll( - scroll_id=scroll_id, - scroll='2m' - ) - - return jsonify(max_author_id=max_author_id) - @blueprint_api.route("/input", methods=['POST']) @login_required @author_permission.require(http_exception=403) diff --git a/modules/weko-deposit/tests/test_tasks.py b/modules/weko-deposit/tests/test_tasks.py index 9d7538956a..affc06e5cf 100644 --- a/modules/weko-deposit/tests/test_tasks.py +++ b/modules/weko-deposit/tests/test_tasks.py @@ -471,7 +471,7 @@ def test_process_with_data(self, mock_commit, mock_update_author_data, mock_reco # .tox/c1/bin/pytest --cov=weko_deposit tests/test_tasks.py::test_update_authorInfo -v -s -vv --cov-branch --cov-report=term --cov-config=tox.ini --basetemp=/code/modules/weko-deposit/.tox/c1/tmp def test_update_authorInfo(app, db, records,mocker): app.config.update(WEKO_SEARCH_MAX_RESULT=1) - mocker.patch("weko_deposit.tasks.WekoDeposit.update_author_link_and_weko_link") + mocker.patch("weko_deposit.tasks.WekoDeposit.update_author_link") mock_recordssearch = MagicMock(side_effect=MockRecordsSearch) with patch("weko_deposit.tasks.RecordsSearch", mock_recordssearch): with patch("weko_deposit.tasks.RecordIndexer", MockRecordIndexer): diff --git a/modules/weko-search-ui/tests/data/jsonld/record_metadata_full.json b/modules/weko-search-ui/tests/data/jsonld/record_metadata_full.json index 22f15928a4..61fd3650fa 100644 --- a/modules/weko-search-ui/tests/data/jsonld/record_metadata_full.json +++ b/modules/weko-search-ui/tests/data/jsonld/record_metadata_full.json @@ -30,7 +30,6 @@ ], "status": "published" }, - "weko_link": {}, "item_title": "RO-Crate Sample Dataset", "author_link": [], "item_type_id": "30002", @@ -347,4 +346,4 @@ } ] } -} +} \ No newline at end of file From e3352037751f7d0a0856b57648c84248c32359ff Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Tue, 25 Nov 2025 18:42:17 +0900 Subject: [PATCH 5/9] =?UTF-8?q?=E8=A8=80=E8=AA=9E=E3=83=95=E3=82=A1?= =?UTF-8?q?=E3=82=A4=E3=83=AB=E3=82=92=E6=9B=B4=E6=96=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../translations/en/LC_MESSAGES/messages.mo | Bin 8719 -> 469 bytes .../translations/en/LC_MESSAGES/messages.po | 277 +++++++++--------- .../translations/ja/LC_MESSAGES/messages.mo | Bin 9645 -> 6909 bytes .../translations/ja/LC_MESSAGES/messages.po | 277 +++++++++--------- .../weko_authors/translations/messages.pot | 273 ++++++++--------- 5 files changed, 394 insertions(+), 433 deletions(-) diff --git a/modules/weko-authors/weko_authors/translations/en/LC_MESSAGES/messages.mo b/modules/weko-authors/weko_authors/translations/en/LC_MESSAGES/messages.mo index ca76532d8adf0fcce3bfd4eea5b93fb52ec5d89f..992ae2757c2a51bb3f29c7f0a489349444649f8a 100644 GIT binary patch delta 121 zcmeBoxyr0_Pl#nI0}wC*u?!HafH+7%2gqIyl+*>%lRpVp85!ytnJO4sSQ(pY8(10` ya0U452Bnr|7H8(?>AEDAq*^H$85o)B8p73?O`a#x%4j`VS+tPN&|J@eiva-0`xhku literal 8719 zcmeH~Ym8l0700&%S_hGriXh?!4Q;jec4lzsFn!SJ+!=ad?sS+3wIN#1oU`xDVa`2g zIFC-RwV)7U5J`*?wI<>h`vGEnYZQ%vm{b!bF^a?(MS>*6ghYc;6T=68YoC1|o%)F1 zXVT8^oIU%jz4qGwwf5f6U3UH>il1ZrzncF)KTqCzKOcF$Qf*#O!bSKGSir^>rFOy} z!xQkY@J%pzgHmsS_c(sW@o~o|;Z6MgS@>>v$$(Nksu9PV;ibIS;p^a?@EW)T-wHnl z-v~bsuY;!{E~%fwi{USzeD^#Qe_nF@JG_wh3;2br>QX3nu7c;ot*(6syny!+D0X*4 zu{QxRp>BoZ-y+-wm*GY5Q}7^s6iQrPg7V!J4DvR34V3S8L9ttR@3%w#DW6{w|1~J_ zco@p}pMx^aqfqAm6DZ?72PMug!*{~h(#WH_4obhB5Es;x;~bQEeh|tM`%wH?hS$Rc zUJgGCWxU7XH2f931Zsl01r9-ZAA{oGZBWLYbMHqTn^4AYL79KswSU^RpMsasej4t9 z--GXhe}jYYN)}VYTj3~Ng%bZSLmBsLQ2hU{#a#P~!0-d^@}dr)AvjP;@fnxC=^L_d<#PI21qM51EoW z>KMaoc|QrSgkOSU@B2`6{uJB|pNE6+Y8FNG(11taVfbeF4am}{Gf=*J2A+Nmc?cz6 zPa%#)csb4#FoZkdAE4yx#aEPmJp@HZUvT`o;~B@_LCM!wpu}khiz)ux?sz98G|E7k z?>ZEJ?tv1AFGA7Bx1r?gQ&8snB9!%d8OnFBLW$FP1Vwlm6#ZNaMRye_c5Z~C;|bS( z5Xw5-4#n_uhiy z$GuSU^^;KI@fehGzXfHSC!oy#k5I<@CzLpEK}4d%YoOTK2c_R_Q0BYj7(3;aiT{&O#{C%-|9|QDTPW-Q0+f7x1xmhd zMLffB4oaLJfD`aEJO=*+MTZ~ak23xPQ1bOLxE1~eN<73FLZP-p8FvzjP8yE$P~v(7 zO8h?n#m^jy{!Tc40!qG~f=EUE0E)d|LtIh6hr8jcQ1W$@&Z3799)TH@e0>^<4qkxr z-Cv-p=mVyiq(*1Cm*!!-tn-yH)5)4iy*y0f9lD(qIyNTI%_550x`+dl(m4+3X|$ij zxry^kOGC@#Uf|`PE;6w+UgRrDT05YAD-u=%mRDvuyDv zKhxW0>h~A2@$#^B`&?T)v=>Di1L;B%cIJR-)(HRnaEYjOLXjt$8Tu>z zVI=w+A~fC?)Ot;gM^Un_{nU7QC-zy@wFV|K=eB0XOBtkes@vBtLB9poNTib$?qS`F zbB&9>7yBmCYiZ(}45c--Ilh*Z+GFlFtMUKzW!X;xM5j_EwF8|enM&E#@M6}igO6`}Jf<5|`&ztNn`5DGx*e$p%Z3q;=O&Fwr+(ecnlwzv_ae*nk|D3Hi(`|k#jYcIP zt(PJ%Uab2a`R*c2 zDN12(T2(b;*1Ou0=Z^Yj%<){!BvmzQ;s9MkyLP;F*UxL>m3Duy8v)%T<5ANx(-rbr zZBnUa2;${df5nTMItZ6TR-*4?ThjYVjx|qRmAZlp;)Z>3R?1oTNeE*p!c2;+JEb|Z zj9yHtIuE*&MJ`!OxaF3YmQnNm3N^ujO``XFZY<}?SU*=K^c4Byo@E!5;t|`=*NiV4 zQgBvk)O)fnhtyk7oZNa&v6%0DTvjatxv`-1`uyKU-Re^cr0BKF46CZ1MG;MJKz8v= zLK^#Ry&mQ(7cAUJmy8FIw7AgS}iYatA!)8leMGcb93WIjxLPfq81QH zC(|<>h4CtZBIRV@^0dH4AoZ5|QSLL;DEX+Xh)#7W)hRmaX{YSkOl0$tqel1Sv1;1o z`5-9PqR>ZXLARoa^~}nklv2+D+1yK$nO%Bu)s{c7#Cms5J7uLQHQnbfdd?=VWO65vQ}plC<|a z#l!8&eI4VZ>>!eH^?iZu@lcVgg+$j9e&=4KWM|~Rm=-a~W4SO@RmRw%TN_>EJM!%$ z3f~{3fK}GR0QuW8CYxi_R((A-l-$E!DklYXgkz4=Qp=t_%99|+izG!>^5-D2?yY#o zjOJ+F3bTwuusvehqvVN`FMRYA$4s{YgeH^1Uw&)zAdLN}2ncGfr7-9CW;pH1NvCh$ zs46+6AGPJlYN&_;J1|Kmg`lb%a@e(JGD#>~%$$&k0H2gfXO!-rX zq8p7Hf zv{11JEi6}RWZIY>9vmL23=LKWZ_tB-V>gVxckm{x&K|1FvAc)|{rv_%LSL zjMtJDy=QuQe-u`eba~&vY*eIPRGDI@XT9S!`PU5HVDoNB7Hr-PH}8g\n" "Language: en\n" "Language-Team: en \n" -"Plural-Forms: nplurals=2; plural=(n != 1)\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.5.1\n" +"Generated-By: Babel 2.17.0\n" -#: weko_authors/admin.py:422 weko_authors/admin.py:651 weko_authors/views.py:98 -#: weko_authors/views.py:128 weko_authors/views.py:184 -#: weko_authors/views.py:235 +#: weko_authors/admin.py:422 weko_authors/admin.py:649 +#: weko_authors/views.py:105 weko_authors/views.py:143 +#: weko_authors/views.py:194 msgid "Failed" msgstr "" -#: weko_authors/admin.py:657 weko_authors/admin.py:666 -#: weko_authors/admin.py:675 +#: weko_authors/admin.py:655 weko_authors/admin.py:664 +#: weko_authors/admin.py:673 msgid "Author Management" msgstr "" -#: weko_authors/admin.py:658 +#: weko_authors/admin.py:656 msgid "Edit" msgstr "" -#: weko_authors/admin.py:667 +#: weko_authors/admin.py:665 #: weko_authors/templates/weko_authors/admin/author_export.html:45 msgid "Export" msgstr "" -#: weko_authors/admin.py:676 +#: weko_authors/admin.py:674 #: weko_authors/templates/weko_authors/admin/author_import.html:42 msgid "Import" msgstr "" @@ -60,239 +60,234 @@ msgstr "" msgid "Allow delete authors." msgstr "" -#: weko_authors/tasks.py:359 +#: weko_authors/tasks.py:350 msgid "TimeOut" msgstr "" -#: weko_authors/tasks.py:458 -#: weko_authors/templates/weko_authors/admin/author_import.html:96 +#: weko_authors/tasks.py:446 +#: weko_authors/templates/weko_authors/admin/author_import.html:93 msgid "Error" msgstr "" -#: weko_authors/tasks.py:459 -#: weko_authors/templates/weko_authors/admin/author_import.html:105 -#: weko_authors/utils.py:1371 weko_authors/views.py:200 +#: weko_authors/tasks.py:447 +#: weko_authors/templates/weko_authors/admin/author_import.html:102 +#: weko_authors/utils.py:1287 weko_authors/views.py:159 msgid "The author is linked to items and cannot be deleted." msgstr "" -#: weko_authors/tasks.py:460 -#: weko_authors/templates/weko_authors/admin/author_import.html:104 +#: weko_authors/tasks.py:448 +#: weko_authors/templates/weko_authors/admin/author_import.html:101 msgid "Failed to import." msgstr "" -#: weko_authors/tasks.py:473 -#: weko_authors/templates/weko_authors/admin/author_import.html:91 +#: weko_authors/tasks.py:461 +#: weko_authors/templates/weko_authors/admin/author_import.html:88 msgid "Register Success" msgstr "" -#: weko_authors/tasks.py:474 -#: weko_authors/templates/weko_authors/admin/author_import.html:92 +#: weko_authors/tasks.py:462 +#: weko_authors/templates/weko_authors/admin/author_import.html:89 msgid "Update Success" msgstr "" -#: weko_authors/tasks.py:475 -#: weko_authors/templates/weko_authors/admin/author_import.html:93 +#: weko_authors/tasks.py:463 +#: weko_authors/templates/weko_authors/admin/author_import.html:90 msgid "Delete Success" msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:52 -#: weko_authors/templates/weko_authors/admin/author_import.html:106 -#: weko_authors/utils.py:663 weko_authors/utils.py:704 +#: weko_authors/templates/weko_authors/admin/author_import.html:103 +#: weko_authors/utils.py:587 weko_authors/utils.py:628 msgid "Internal server error" msgstr "" -#: weko_authors/utils.py:792 weko_authors/utils.py:1027 +#: weko_authors/utils.py:717 weko_authors/utils.py:946 +#, python-brace-format msgid "The following metadata keys are duplicated.
{}" msgstr "" -#: weko_authors/utils.py:804 weko_authors/utils.py:1038 +#: weko_authors/utils.py:728 weko_authors/utils.py:956 +#, python-brace-format msgid "Specified item does not consistency with DB item.
{}" msgstr "" -#: weko_authors/utils.py:820 weko_authors/utils.py:1064 +#: weko_authors/utils.py:744 weko_authors/utils.py:982 +#, python-brace-format msgid "Cannot read {} file correctly." msgstr "" -#: weko_authors/utils.py:836 +#: weko_authors/utils.py:760 msgid "There is no data to import." msgstr "" -#: weko_authors/utils.py:839 weko_authors/utils.py:1068 +#: weko_authors/utils.py:764 weko_authors/utils.py:986 +#, python-brace-format msgid "" "{} could not be read. Make sure the file format is {} and that the file " "is UTF-8 encoded." msgstr "" -#: weko_authors/utils.py:908 +#: weko_authors/utils.py:826 +#, python-brace-format msgid "There is duplicated data in the {} file." msgstr "" -#: weko_authors/utils.py:946 +#: weko_authors/utils.py:864 +#, python-brace-format msgid "{} is required item." msgstr "" -#: weko_authors/utils.py:955 +#: weko_authors/utils.py:873 +#, python-brace-format msgid "{} should be set by one of {}." msgstr "" -#: weko_authors/utils.py:1142 +#: weko_authors/utils.py:1060 msgid "Scheme is required item." msgstr "" -#: weko_authors/utils.py:1145 +#: weko_authors/utils.py:1063 msgid "The scheme WEKO cannot be used." msgstr "" -#: weko_authors/utils.py:1148 +#: weko_authors/utils.py:1066 msgid "Name is required item." msgstr "" -#: weko_authors/utils.py:1151 +#: weko_authors/utils.py:1069 msgid "URL is not URL format." msgstr "" -#: weko_authors/utils.py:1155 +#: weko_authors/utils.py:1073 msgid "The specified scheme does not exist." msgstr "" -#: weko_authors/utils.py:1159 +#: weko_authors/utils.py:1077 msgid "The specified scheme is used in the author ID." msgstr "" -#: weko_authors/utils.py:1174 +#: weko_authors/utils.py:1092 msgid "The specified scheme is duplicated." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:98 -#: weko_authors/utils.py:1281 +#: weko_authors/templates/weko_authors/admin/author_import.html:95 +#: weko_authors/utils.py:1197 msgid "Register" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:99 -#: weko_authors/utils.py:1283 +#: weko_authors/templates/weko_authors/admin/author_import.html:96 +#: weko_authors/utils.py:1199 msgid "Update" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:100 -#: weko_authors/utils.py:1285 +#: weko_authors/templates/weko_authors/admin/author_import.html:97 +#: weko_authors/utils.py:1201 msgid "Delete" msgstr "" -#: weko_authors/utils.py:1364 +#: weko_authors/utils.py:1280 msgid "Specified WEKO ID does not exist." msgstr "" -#: weko_authors/utils.py:1376 +#: weko_authors/utils.py:1292 +#, python-brace-format msgid "" "The specified author has been deleted. Update author information with {} " "content, but author remains deleted as it is." msgstr "" -#: weko_authors/utils.py:1837 +#: weko_authors/utils.py:1688 +#, python-brace-format msgid "Invalid community ID format: {}" msgstr "" -#: weko_authors/utils.py:1843 +#: weko_authors/utils.py:1694 +#, python-brace-format msgid "Community ID(s) {} does not exist." msgstr "" -#: weko_authors/utils.py:1865 weko_authors/utils.py:1881 -#: weko_authors/utils.py:1887 weko_authors/utils.py:1950 +#: weko_authors/utils.py:1716 weko_authors/utils.py:1732 +#: weko_authors/utils.py:1738 weko_authors/utils.py:1801 +#, python-brace-format msgid "You do not have permission for this {}’s communities: {}." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:46 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:22 -#: weko_authors/utils.py:1865 weko_authors/utils.py:1881 -#: weko_authors/utils.py:1887 weko_authors/utils.py:1957 +#: weko_authors/utils.py:1716 weko_authors/utils.py:1732 +#: weko_authors/utils.py:1738 weko_authors/utils.py:1808 msgid "Author ID" msgstr "" -#: weko_authors/utils.py:1867 weko_authors/utils.py:1873 +#: weko_authors/utils.py:1718 weko_authors/utils.py:1724 msgid "You must include at least one managed community." msgstr "" -#: weko_authors/utils.py:1871 weko_authors/utils.py:1943 +#: weko_authors/utils.py:1722 weko_authors/utils.py:1794 msgid "You cannot manage this record." msgstr "" -#: weko_authors/utils.py:1932 +#: weko_authors/utils.py:1783 +#, python-brace-format msgid "{} not found." msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:42 -#: weko_authors/templates/weko_authors/admin/author_import.html:67 +#: weko_authors/templates/weko_authors/admin/author_import.html:64 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:23 -#: weko_authors/utils.py:1961 +#: weko_authors/utils.py:1812 msgid "ID Prefix" msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:43 -#: weko_authors/templates/weko_authors/admin/author_import.html:68 +#: weko_authors/templates/weko_authors/admin/author_import.html:65 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:24 -#: weko_authors/utils.py:1965 +#: weko_authors/utils.py:1816 msgid "Affiliation ID" msgstr "" -#: weko_authors/views.py:72 weko_authors/views.py:139 weko_authors/views.py:195 +#: weko_authors/views.py:72 weko_authors/views.py:116 weko_authors/views.py:154 msgid "Header Error" msgstr "" -#: weko_authors/views.py:84 weko_authors/views.py:150 -msgid "Please set WEKO ID." -msgstr "" - -#: weko_authors/views.py:91 weko_authors/views.py:159 -msgid "Please set the WEKOID in the half digit." -msgstr "" - -#: weko_authors/views.py:94 weko_authors/views.py:162 -msgid "The value is already in use as WEKO ID." -msgstr "" - -#: weko_authors/views.py:104 weko_authors/views.py:167 +#: weko_authors/views.py:81 weko_authors/views.py:126 msgid "" "Please set the affiliation start date and end date in the format yyyy-MM-" "dd." msgstr "" -#: weko_authors/views.py:106 weko_authors/views.py:169 +#: weko_authors/views.py:83 weko_authors/views.py:128 msgid "The end date must be after the start date." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:88 -#: weko_authors/views.py:129 weko_authors/views.py:186 -#: weko_authors/views.py:237 weko_authors/views.py:765 -#: weko_authors/views.py:834 +#: weko_authors/templates/weko_authors/admin/author_import.html:85 +#: weko_authors/views.py:106 weko_authors/views.py:145 +#: weko_authors/views.py:196 weko_authors/views.py:686 +#: weko_authors/views.py:755 msgid "Success" msgstr "" -#: weko_authors/contrib/validation.py:111 -msgid "WEKO ID is Half-width digits only" -msgstr "" - -#: weko_authors/contrib/validation.py:112 -msgid "WEKO ID is required item." -msgstr "" - -#: weko_authors/contrib/validation.py:133 +#: weko_authors/contrib/validation.py:107 +#, python-brace-format msgid "Specified Identifier Scheme '{}' does not exist." msgstr "" -#: weko_authors/contrib/validation.py:154 +#: weko_authors/contrib/validation.py:128 +#, python-brace-format msgid "Specified Affiliation Identifier Scheme '{}' does not exist." msgstr "" -#: weko_authors/contrib/validation.py:179 -#: weko_authors/contrib/validation.py:204 +#: weko_authors/contrib/validation.py:153 +#: weko_authors/contrib/validation.py:178 +#, python-brace-format msgid "External Affiliation Period must be in the format: yyyy-MM-dd, blank. {}" msgstr "" -#: weko_authors/contrib/validation.py:205 +#: weko_authors/contrib/validation.py:179 msgid "Period end must be after Period start." msgstr "" -#: weko_authors/contrib/validation.py:243 +#: weko_authors/contrib/validation.py:217 +#, python-brace-format msgid "External author identifier exists in DB.
{}" msgstr "" @@ -310,7 +305,7 @@ msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:41 #: weko_authors/templates/weko_authors/admin/author_export.html:49 -#: weko_authors/templates/weko_authors/admin/author_import.html:66 +#: weko_authors/templates/weko_authors/admin/author_import.html:63 msgid "Author DB" msgstr "" @@ -335,7 +330,7 @@ msgid "Execute" msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:51 -#: weko_authors/templates/weko_authors/admin/author_import.html:56 +#: weko_authors/templates/weko_authors/admin/author_import.html:53 msgid "Cancel" msgstr "" @@ -371,163 +366,155 @@ msgstr "" msgid "No." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:47 -msgid "Current WEKO ID" -msgstr "" - -#: weko_authors/templates/weko_authors/admin/author_import.html:48 -msgid "New WEKO ID" -msgstr "" - -#: weko_authors/templates/weko_authors/admin/author_import.html:49 -msgid "Previous WEKO ID" +#: weko_authors/templates/weko_authors/admin/author_import.html:46 +msgid "WEKO ID" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:50 +#: weko_authors/templates/weko_authors/admin/author_import.html:47 msgid "full_name" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:53 +#: weko_authors/templates/weko_authors/admin/author_import.html:50 msgid "Select File" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:54 +#: weko_authors/templates/weko_authors/admin/author_import.html:51 msgid "Selected file name" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:55 +#: weko_authors/templates/weko_authors/admin/author_import.html:52 msgid "Next" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:57 +#: weko_authors/templates/weko_authors/admin/author_import.html:54 msgid "Force Change Mode" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:58 +#: weko_authors/templates/weko_authors/admin/author_import.html:55 msgid "Disclaimer:" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:59 +#: weko_authors/templates/weko_authors/admin/author_import.html:56 msgid "" "- When using this function, all metadata using author information is " "forcibly changed to match the author DB when updating." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:60 +#: weko_authors/templates/weko_authors/admin/author_import.html:57 msgid "" "- If not used, only the author and affiliation identifiers are changed " "when updating." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:61 +#: weko_authors/templates/weko_authors/admin/author_import.html:58 msgid "" "- Before starting this operation, you need fully understand the contents " "and metadata using AuthorDB at your institution." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:62 +#: weko_authors/templates/weko_authors/admin/author_import.html:59 msgid "- Use this function on your own responsibility." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:63 +#: weko_authors/templates/weko_authors/admin/author_import.html:60 msgid "" "- National Institute of Informatics (NII) does not take any " "responsibility for damages caused by using this function." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:64 +#: weko_authors/templates/weko_authors/admin/author_import.html:61 msgid "I agree to the terms of use." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:65 +#: weko_authors/templates/weko_authors/admin/author_import.html:62 msgid "Import target" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:71 +#: weko_authors/templates/weko_authors/admin/author_import.html:68 msgid "Summary" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:72 +#: weko_authors/templates/weko_authors/admin/author_import.html:69 msgid "Total" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:73 +#: weko_authors/templates/weko_authors/admin/author_import.html:70 msgid "New Creator" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:74 +#: weko_authors/templates/weko_authors/admin/author_import.html:71 msgid "Update Creator" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:75 +#: weko_authors/templates/weko_authors/admin/author_import.html:72 msgid "Delete Creator" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:76 +#: weko_authors/templates/weko_authors/admin/author_import.html:73 msgid "Result Error" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:77 +#: weko_authors/templates/weko_authors/admin/author_import.html:74 msgid "Mail Address" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:78 +#: weko_authors/templates/weko_authors/admin/author_import.html:75 msgid "Check Result" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:79 +#: weko_authors/templates/weko_authors/admin/author_import.html:76 msgid "Scheme" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:80 +#: weko_authors/templates/weko_authors/admin/author_import.html:77 msgid "Scheme Name" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:81 +#: weko_authors/templates/weko_authors/admin/author_import.html:78 msgid "URL" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:84 +#: weko_authors/templates/weko_authors/admin/author_import.html:81 msgid "Start Date" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:85 +#: weko_authors/templates/weko_authors/admin/author_import.html:82 msgid "End Date" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:86 +#: weko_authors/templates/weko_authors/admin/author_import.html:83 msgid "Status" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:87 +#: weko_authors/templates/weko_authors/admin/author_import.html:84 msgid "To Do" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:89 +#: weko_authors/templates/weko_authors/admin/author_import.html:86 msgid "Failure" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:90 +#: weko_authors/templates/weko_authors/admin/author_import.html:87 msgid "Pending" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:97 +#: weko_authors/templates/weko_authors/admin/author_import.html:94 msgid "Warning" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:103 +#: weko_authors/templates/weko_authors/admin/author_import.html:100 msgid "The selected target table and the import file format are different." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:107 +#: weko_authors/templates/weko_authors/admin/author_import.html:104 msgid "Celery is not running." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:108 +#: weko_authors/templates/weko_authors/admin/author_import.html:105 msgid "Import is in progress." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:109 +#: weko_authors/templates/weko_authors/admin/author_import.html:106 msgid "Import is in progress on another device." msgstr "" diff --git a/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.mo b/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.mo index 9809d0dfe26e14e65c85059c6e61e7eb499b2e9b..4c63f4127febd34e73398d3919a40a97824d4538 100644 GIT binary patch delta 1641 zcmYM!YiyHM7{Kw>n3d={vY-#&D-;PpYxvg zIoI~n=DC{0FD}P1!>5IBG2bJ0jUImfb{Mmda1OWN-c)0H@dmEM#z&0Vk3E=;vsj7u zF&!(?=Jnx1;?>9_(}bnQBupDO`BcO(3*W}YD9B|#qtL`xFcWX!qj(#&{jbQ_%&b-a zCvurN3hgMB#u=D}I)DPK!lk%?{!KeKI+N|_#-peS&ZE9Cg4)ql)D^mkI>maDR>68R~&Sr_e~ z3N=v!>PR=E4y4EO4O~Sms6V%BsPFxOdWh`=E6{^l|D@$M)PCZqXD+dy8(pG=_239{ z8L{H?mKRY6bQ$$5{D4h3i6QiJ^q;^e>WU1Z4)_vkhu@<1^F6+RW4K;_|Dl5UJ?=%_ z<1uW&=b1(TPof^S>!^ut;e)-j;u+MXyo;^qE9Cs(YpCtVa0GwGA{?M7!H+Pc{~X^H zC0|WTwM)0#ElV@bWk;gBqVcG=u{+ukk9CV9Gbn2_Uy{3-KB>$qkf*aIJkh=XJvJ@T zSK_86R&Cm%T{}CwUX)GQB_i1?rNUV+VP}!_I@{$ZXPty|Leih}g-k7MER1<0G4o6; z-m%MU*}b#lVQixYjr^PMF>tjM$f3u6xXe*gdg delta 4203 zcmc)L4@_0p9l-GeqWn_?6;KQItWND5Jo!^Z(MhRLK@=J4s5OpW-@Cw_k9YC?QHZs# zINQ!yW7}J8Z4z znl0%Fyw5r3oqNykobx+}kE_1Z9KW(4?Tq5*5PwDdU7jVi>p%CWE7ec+DDK8_jN$eS zrPkn&@d*A6bI{IIDjy&4eA4q}&$nvy`H&syu5kk9rf%!M(T)_hSJL z;9PtfZ^H{nOzK^nk3UCw?n9J-Mm?`#Huc$j5Ua|=xs0!tQb?m=mG?yn&Zb_4^5P9B zFWij0Lv2L~a5vtLT{sUP!$v%hvNEG6&lM8wVqAvuTs6vjo21V8>WdWkRenBX3wlvj z zUo!uvG?0y|o(+-!EcD?iT!n9-tjO>17Mw@aGVte6a;4m}8f9s}fU*U3D1qLC%t>{4 zhVV1gkH#r1q3|5a3x9x;?Qi1-{1D5qh<%Zq*p7R!9T(xN$nL01D9^oz=TkV2I7s~o z668LtBbwv*O>D+hg-V^lc!Yv1Stc7K$+X0Cjc0@BUX*it(DN0PTp02E6G{$@qr5MT zSykX7l$mZp=AtZ=rN19pnYen3f;{k!+n|1l((ww)lKsW|UM+Rob5R0aj1SCjx!HTy3!$<*!_1-Os;N|e1i zj(6fkk?Q?)CU zVt3TNnH1!v>RU1NkF?pTUkV28!`dGVwL)Fm2nK8E^!6Q_W8ttFit6@;<{i4J zUezZC_0XFoFgu*A?u_~6RnFz~inMUp4m$0F{Wir9%Zv&)JQ^F%=>m;aT_;?@3cv&U3B6X+I}D_Oo_)tpP2<=K-q z$+?zUF{dt(YEDVkGG{zb5w~WYw&gh(Ecp z?qbTUug}!^Sys(bH=e}4kbWUMkp{7d8Spu&**}WwNU!O)IxTiI;&u(#W<*Orvya*O z)Gm4F^iIqv5bF(EesVl;^FA_8a--a{L`J*a4;jIjsf&1#H&y9Je{Fax-c;|rk=+=# zddwZMsM=-gdYjLv5mdX|w#lEQqr@piH=%uMcjDA}Ok}3pn%phrC8<2$NcNN-wgS;^ zH-|Wsp&|LZ#| Pj|iv!mmV5hoS*t{tmDk= diff --git a/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po b/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po index a7c9e17939..f78051aee1 100644 --- a/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po +++ b/modules/weko-authors/weko_authors/translations/ja/LC_MESSAGES/messages.po @@ -8,38 +8,38 @@ msgid "" msgstr "" "Project-Id-Version: weko-authors 0.1.0.dev20170000\n" "Report-Msgid-Bugs-To: wekosoftware@nii.ac.jp\n" -"POT-Creation-Date: 2025-10-03 00:38+0900\n" -"PO-Revision-Date: 2025-05-26 17:50+0900\n" +"POT-Creation-Date: 2025-11-25 18:35+0900\n" +"PO-Revision-Date: 2025-11-25 18:36+0900\n" "Last-Translator: FULL NAME \n" "Language: ja\n" "Language-Team: ja \n" -"Plural-Forms: nplurals=1; plural=0\n" +"Plural-Forms: nplurals=1; plural=0;\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.5.1\n" +"Generated-By: Babel 2.17.0\n" -#: weko_authors/admin.py:422 weko_authors/admin.py:651 weko_authors/views.py:98 -#: weko_authors/views.py:128 weko_authors/views.py:184 -#: weko_authors/views.py:235 +#: weko_authors/admin.py:422 weko_authors/admin.py:649 +#: weko_authors/views.py:105 weko_authors/views.py:143 +#: weko_authors/views.py:194 msgid "Failed" msgstr "" -#: weko_authors/admin.py:657 weko_authors/admin.py:666 -#: weko_authors/admin.py:675 +#: weko_authors/admin.py:655 weko_authors/admin.py:664 +#: weko_authors/admin.py:673 msgid "Author Management" msgstr "著者管理" -#: weko_authors/admin.py:658 +#: weko_authors/admin.py:656 msgid "Edit" msgstr "編集" -#: weko_authors/admin.py:667 +#: weko_authors/admin.py:665 #: weko_authors/templates/weko_authors/admin/author_export.html:45 msgid "Export" msgstr "エクスポート" -#: weko_authors/admin.py:676 +#: weko_authors/admin.py:674 #: weko_authors/templates/weko_authors/admin/author_import.html:42 msgid "Import" msgstr "インポート" @@ -60,239 +60,234 @@ msgstr "" msgid "Allow delete authors." msgstr "" -#: weko_authors/tasks.py:359 +#: weko_authors/tasks.py:350 msgid "TimeOut" msgstr "" -#: weko_authors/tasks.py:458 -#: weko_authors/templates/weko_authors/admin/author_import.html:96 +#: weko_authors/tasks.py:446 +#: weko_authors/templates/weko_authors/admin/author_import.html:93 msgid "Error" msgstr "" -#: weko_authors/tasks.py:459 -#: weko_authors/templates/weko_authors/admin/author_import.html:105 -#: weko_authors/utils.py:1371 weko_authors/views.py:200 +#: weko_authors/tasks.py:447 +#: weko_authors/templates/weko_authors/admin/author_import.html:102 +#: weko_authors/utils.py:1287 weko_authors/views.py:159 msgid "The author is linked to items and cannot be deleted." msgstr "アイテムがリンクしているため、指定された著者は削除できません。" -#: weko_authors/tasks.py:460 -#: weko_authors/templates/weko_authors/admin/author_import.html:104 +#: weko_authors/tasks.py:448 +#: weko_authors/templates/weko_authors/admin/author_import.html:101 msgid "Failed to import." msgstr "" -#: weko_authors/tasks.py:473 -#: weko_authors/templates/weko_authors/admin/author_import.html:91 +#: weko_authors/tasks.py:461 +#: weko_authors/templates/weko_authors/admin/author_import.html:88 msgid "Register Success" msgstr "" -#: weko_authors/tasks.py:474 -#: weko_authors/templates/weko_authors/admin/author_import.html:92 +#: weko_authors/tasks.py:462 +#: weko_authors/templates/weko_authors/admin/author_import.html:89 msgid "Update Success" msgstr "" -#: weko_authors/tasks.py:475 -#: weko_authors/templates/weko_authors/admin/author_import.html:93 +#: weko_authors/tasks.py:463 +#: weko_authors/templates/weko_authors/admin/author_import.html:90 msgid "Delete Success" msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:52 -#: weko_authors/templates/weko_authors/admin/author_import.html:106 -#: weko_authors/utils.py:663 weko_authors/utils.py:704 +#: weko_authors/templates/weko_authors/admin/author_import.html:103 +#: weko_authors/utils.py:587 weko_authors/utils.py:628 msgid "Internal server error" msgstr "サーバ内部エラー" -#: weko_authors/utils.py:792 weko_authors/utils.py:1027 +#: weko_authors/utils.py:717 weko_authors/utils.py:946 +#, python-brace-format msgid "The following metadata keys are duplicated.
{}" msgstr "以下のメタデータキーが重複しています。
{}" -#: weko_authors/utils.py:804 weko_authors/utils.py:1038 +#: weko_authors/utils.py:728 weko_authors/utils.py:956 +#, python-brace-format msgid "Specified item does not consistency with DB item.
{}" msgstr "指定された項目とDBの項目が一致しません。
{}" -#: weko_authors/utils.py:820 weko_authors/utils.py:1064 +#: weko_authors/utils.py:744 weko_authors/utils.py:982 +#, python-brace-format msgid "Cannot read {} file correctly." msgstr "{}ファイルが正しく読み込めません。" -#: weko_authors/utils.py:836 +#: weko_authors/utils.py:760 msgid "There is no data to import." msgstr " インポートのデータがありません。" -#: weko_authors/utils.py:839 weko_authors/utils.py:1068 +#: weko_authors/utils.py:764 weko_authors/utils.py:986 +#, python-brace-format msgid "" "{} could not be read. Make sure the file format is {} and that the file " "is UTF-8 encoded." msgstr "{}を読み込めませんでした。ファイル形式が{}であること、またそのファイルがUTF-8でエンコードされているかを確認してください。" -#: weko_authors/utils.py:908 +#: weko_authors/utils.py:826 +#, python-brace-format msgid "There is duplicated data in the {} file." msgstr "{}ファイルの中に重複するデータがあります。" -#: weko_authors/utils.py:946 +#: weko_authors/utils.py:864 +#, python-brace-format msgid "{} is required item." msgstr "{}は必須項目です。" -#: weko_authors/utils.py:955 +#: weko_authors/utils.py:873 +#, python-brace-format msgid "{} should be set by one of {}." msgstr "{}は{}のいずれかを設定してください。" -#: weko_authors/utils.py:1142 +#: weko_authors/utils.py:1060 msgid "Scheme is required item." msgstr "" -#: weko_authors/utils.py:1145 +#: weko_authors/utils.py:1063 msgid "The scheme WEKO cannot be used." msgstr "" -#: weko_authors/utils.py:1148 +#: weko_authors/utils.py:1066 msgid "Name is required item." msgstr "" -#: weko_authors/utils.py:1151 +#: weko_authors/utils.py:1069 msgid "URL is not URL format." msgstr "" -#: weko_authors/utils.py:1155 +#: weko_authors/utils.py:1073 msgid "The specified scheme does not exist." msgstr "" -#: weko_authors/utils.py:1159 +#: weko_authors/utils.py:1077 msgid "The specified scheme is used in the author ID." msgstr "" -#: weko_authors/utils.py:1174 +#: weko_authors/utils.py:1092 msgid "The specified scheme is duplicated." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:98 -#: weko_authors/utils.py:1281 +#: weko_authors/templates/weko_authors/admin/author_import.html:95 +#: weko_authors/utils.py:1197 msgid "Register" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:99 -#: weko_authors/utils.py:1283 +#: weko_authors/templates/weko_authors/admin/author_import.html:96 +#: weko_authors/utils.py:1199 msgid "Update" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:100 -#: weko_authors/utils.py:1285 +#: weko_authors/templates/weko_authors/admin/author_import.html:97 +#: weko_authors/utils.py:1201 msgid "Delete" msgstr "" -#: weko_authors/utils.py:1364 +#: weko_authors/utils.py:1280 msgid "Specified WEKO ID does not exist." msgstr "指定されたWEKO IDが存在していません。" -#: weko_authors/utils.py:1376 +#: weko_authors/utils.py:1292 +#, python-brace-format msgid "" "The specified author has been deleted. Update author information with {} " "content, but author remains deleted as it is." msgstr "指定された著者は削除済です。csvの内容で著者情報を更新しますが、著者は削除されたままです。" -#: weko_authors/utils.py:1837 +#: weko_authors/utils.py:1688 +#, python-brace-format msgid "Invalid community ID format: {}" msgstr "無効なコミュニティID形式です。" -#: weko_authors/utils.py:1843 +#: weko_authors/utils.py:1694 +#, python-brace-format msgid "Community ID(s) {} does not exist." msgstr "指定されたコミュニティID「{}」は存在しません。" -#: weko_authors/utils.py:1865 weko_authors/utils.py:1881 -#: weko_authors/utils.py:1887 weko_authors/utils.py:1950 +#: weko_authors/utils.py:1716 weko_authors/utils.py:1732 +#: weko_authors/utils.py:1738 weko_authors/utils.py:1801 +#, python-brace-format msgid "You do not have permission for this {}’s communities: {}." msgstr "{}に紐づく、コミュニティ「{}」の管理権限がありません。" -#: weko_authors/templates/weko_authors/admin/author_import.html:46 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:22 -#: weko_authors/utils.py:1865 weko_authors/utils.py:1881 -#: weko_authors/utils.py:1887 weko_authors/utils.py:1957 +#: weko_authors/utils.py:1716 weko_authors/utils.py:1732 +#: weko_authors/utils.py:1738 weko_authors/utils.py:1808 msgid "Author ID" msgstr "著者ID" -#: weko_authors/utils.py:1867 weko_authors/utils.py:1873 +#: weko_authors/utils.py:1718 weko_authors/utils.py:1724 msgid "You must include at least one managed community." msgstr "少なくとも1つの管理対象コミュニティを含める必要があります。" -#: weko_authors/utils.py:1871 weko_authors/utils.py:1943 +#: weko_authors/utils.py:1722 weko_authors/utils.py:1794 msgid "You cannot manage this record." msgstr "このレコードを操作することはできません。" -#: weko_authors/utils.py:1932 +#: weko_authors/utils.py:1783 +#, python-brace-format msgid "{} not found." msgstr "{}が見つかりません。" #: weko_authors/templates/weko_authors/admin/author_export.html:42 -#: weko_authors/templates/weko_authors/admin/author_import.html:67 +#: weko_authors/templates/weko_authors/admin/author_import.html:64 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:23 -#: weko_authors/utils.py:1961 +#: weko_authors/utils.py:1812 msgid "ID Prefix" msgstr "著者識別子" #: weko_authors/templates/weko_authors/admin/author_export.html:43 -#: weko_authors/templates/weko_authors/admin/author_import.html:68 +#: weko_authors/templates/weko_authors/admin/author_import.html:65 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:24 -#: weko_authors/utils.py:1965 +#: weko_authors/utils.py:1816 msgid "Affiliation ID" msgstr "機関識別子" -#: weko_authors/views.py:72 weko_authors/views.py:139 weko_authors/views.py:195 +#: weko_authors/views.py:72 weko_authors/views.py:116 weko_authors/views.py:154 msgid "Header Error" msgstr "ヘッダエラー" -#: weko_authors/views.py:84 weko_authors/views.py:150 -msgid "Please set WEKO ID." -msgstr "" - -#: weko_authors/views.py:91 weko_authors/views.py:159 -msgid "Please set the WEKOID in the half digit." -msgstr "" - -#: weko_authors/views.py:94 weko_authors/views.py:162 -msgid "The value is already in use as WEKO ID." -msgstr "" - -#: weko_authors/views.py:104 weko_authors/views.py:167 +#: weko_authors/views.py:81 weko_authors/views.py:126 msgid "" "Please set the affiliation start date and end date in the format yyyy-MM-" "dd." msgstr "" -#: weko_authors/views.py:106 weko_authors/views.py:169 +#: weko_authors/views.py:83 weko_authors/views.py:128 msgid "The end date must be after the start date." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:88 -#: weko_authors/views.py:129 weko_authors/views.py:186 -#: weko_authors/views.py:237 weko_authors/views.py:765 -#: weko_authors/views.py:834 +#: weko_authors/templates/weko_authors/admin/author_import.html:85 +#: weko_authors/views.py:106 weko_authors/views.py:145 +#: weko_authors/views.py:196 weko_authors/views.py:686 +#: weko_authors/views.py:755 msgid "Success" msgstr "成功" -#: weko_authors/contrib/validation.py:111 -msgid "WEKO ID is Half-width digits only" -msgstr "" - -#: weko_authors/contrib/validation.py:112 -msgid "WEKO ID is required item." -msgstr "" - -#: weko_authors/contrib/validation.py:133 +#: weko_authors/contrib/validation.py:107 +#, python-brace-format msgid "Specified Identifier Scheme '{}' does not exist." msgstr "指定された外部著者ID 識別子'{}'が存在していません。" -#: weko_authors/contrib/validation.py:154 +#: weko_authors/contrib/validation.py:128 +#, python-brace-format msgid "Specified Affiliation Identifier Scheme '{}' does not exist." msgstr "" -#: weko_authors/contrib/validation.py:179 -#: weko_authors/contrib/validation.py:204 +#: weko_authors/contrib/validation.py:153 +#: weko_authors/contrib/validation.py:178 +#, python-brace-format msgid "External Affiliation Period must be in the format: yyyy-MM-dd, blank. {}" msgstr "" -#: weko_authors/contrib/validation.py:205 +#: weko_authors/contrib/validation.py:179 msgid "Period end must be after Period start." msgstr "" -#: weko_authors/contrib/validation.py:243 +#: weko_authors/contrib/validation.py:217 +#, python-brace-format msgid "External author identifier exists in DB.
{}" msgstr "外部著者識別子がDBに存在しています。
{}" @@ -310,7 +305,7 @@ msgstr "全件エクスポート" #: weko_authors/templates/weko_authors/admin/author_export.html:41 #: weko_authors/templates/weko_authors/admin/author_export.html:49 -#: weko_authors/templates/weko_authors/admin/author_import.html:66 +#: weko_authors/templates/weko_authors/admin/author_import.html:63 msgid "Author DB" msgstr "著者DB" @@ -335,7 +330,7 @@ msgid "Execute" msgstr "実行" #: weko_authors/templates/weko_authors/admin/author_export.html:51 -#: weko_authors/templates/weko_authors/admin/author_import.html:56 +#: weko_authors/templates/weko_authors/admin/author_import.html:53 msgid "Cancel" msgstr "キャンセル" @@ -371,163 +366,155 @@ msgstr "ダウンロード" msgid "No." msgstr "No." -#: weko_authors/templates/weko_authors/admin/author_import.html:47 -msgid "Current WEKO ID" -msgstr "" - -#: weko_authors/templates/weko_authors/admin/author_import.html:48 -msgid "New WEKO ID" -msgstr "" - -#: weko_authors/templates/weko_authors/admin/author_import.html:49 -msgid "Previous WEKO ID" +#: weko_authors/templates/weko_authors/admin/author_import.html:46 +msgid "WEKO ID" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:50 +#: weko_authors/templates/weko_authors/admin/author_import.html:47 msgid "full_name" msgstr "姓名" -#: weko_authors/templates/weko_authors/admin/author_import.html:53 +#: weko_authors/templates/weko_authors/admin/author_import.html:50 msgid "Select File" msgstr "ファイル選択" -#: weko_authors/templates/weko_authors/admin/author_import.html:54 +#: weko_authors/templates/weko_authors/admin/author_import.html:51 msgid "Selected file name" msgstr "選択したファイル名" -#: weko_authors/templates/weko_authors/admin/author_import.html:55 +#: weko_authors/templates/weko_authors/admin/author_import.html:52 msgid "Next" msgstr "次へ" -#: weko_authors/templates/weko_authors/admin/author_import.html:57 +#: weko_authors/templates/weko_authors/admin/author_import.html:54 msgid "Force Change Mode" msgstr "強制変更モード" -#: weko_authors/templates/weko_authors/admin/author_import.html:58 +#: weko_authors/templates/weko_authors/admin/author_import.html:55 msgid "Disclaimer:" msgstr "免責事項:" -#: weko_authors/templates/weko_authors/admin/author_import.html:59 +#: weko_authors/templates/weko_authors/admin/author_import.html:56 msgid "" "- When using this function, all metadata using author information is " "forcibly changed to match the author DB when updating." msgstr "・本機能を使用する場合、更新時に著者情報を使ったメタデータを著者DBに合わせて強制的に全て変更します。" -#: weko_authors/templates/weko_authors/admin/author_import.html:60 +#: weko_authors/templates/weko_authors/admin/author_import.html:57 msgid "" "- If not used, only the author and affiliation identifiers are changed " "when updating." msgstr "・使用しない場合、更新時に著者識別子、機関識別子のみ変更します。" -#: weko_authors/templates/weko_authors/admin/author_import.html:61 +#: weko_authors/templates/weko_authors/admin/author_import.html:58 msgid "" "- Before starting this operation, you need fully understand the contents " "and metadata using AuthorDB at your institution." msgstr "・本機能は内容及び自機関で登録されている著者情報を使ったメタデータについて十分に理解した上で作業を行なってください。" -#: weko_authors/templates/weko_authors/admin/author_import.html:62 +#: weko_authors/templates/weko_authors/admin/author_import.html:59 msgid "- Use this function on your own responsibility." msgstr "・本機能の利用は、自機関の責任で行なってください。" -#: weko_authors/templates/weko_authors/admin/author_import.html:63 +#: weko_authors/templates/weko_authors/admin/author_import.html:60 msgid "" "- National Institute of Informatics (NII) does not take any " "responsibility for damages caused by using this function." msgstr "・本機能の利用により負った損害などについては、国立情報学研究所は一切の責任を追いません。" -#: weko_authors/templates/weko_authors/admin/author_import.html:64 +#: weko_authors/templates/weko_authors/admin/author_import.html:61 msgid "I agree to the terms of use." msgstr "利用規約に同意します。" -#: weko_authors/templates/weko_authors/admin/author_import.html:65 +#: weko_authors/templates/weko_authors/admin/author_import.html:62 msgid "Import target" msgstr "インポート対象" -#: weko_authors/templates/weko_authors/admin/author_import.html:71 +#: weko_authors/templates/weko_authors/admin/author_import.html:68 msgid "Summary" msgstr "サマリー" -#: weko_authors/templates/weko_authors/admin/author_import.html:72 +#: weko_authors/templates/weko_authors/admin/author_import.html:69 msgid "Total" msgstr "総計" -#: weko_authors/templates/weko_authors/admin/author_import.html:73 +#: weko_authors/templates/weko_authors/admin/author_import.html:70 msgid "New Creator" msgstr "新規登録の著者" -#: weko_authors/templates/weko_authors/admin/author_import.html:74 +#: weko_authors/templates/weko_authors/admin/author_import.html:71 msgid "Update Creator" msgstr "更新の著者" -#: weko_authors/templates/weko_authors/admin/author_import.html:75 +#: weko_authors/templates/weko_authors/admin/author_import.html:72 msgid "Delete Creator" msgstr "削除する著者" -#: weko_authors/templates/weko_authors/admin/author_import.html:76 +#: weko_authors/templates/weko_authors/admin/author_import.html:73 msgid "Result Error" msgstr "チェックエラー" -#: weko_authors/templates/weko_authors/admin/author_import.html:77 +#: weko_authors/templates/weko_authors/admin/author_import.html:74 msgid "Mail Address" msgstr "メールアドレス" -#: weko_authors/templates/weko_authors/admin/author_import.html:78 +#: weko_authors/templates/weko_authors/admin/author_import.html:75 msgid "Check Result" msgstr "チェック結果" -#: weko_authors/templates/weko_authors/admin/author_import.html:79 +#: weko_authors/templates/weko_authors/admin/author_import.html:76 msgid "Scheme" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:80 +#: weko_authors/templates/weko_authors/admin/author_import.html:77 msgid "Scheme Name" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:81 +#: weko_authors/templates/weko_authors/admin/author_import.html:78 msgid "URL" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:84 +#: weko_authors/templates/weko_authors/admin/author_import.html:81 msgid "Start Date" msgstr "開始日" -#: weko_authors/templates/weko_authors/admin/author_import.html:85 +#: weko_authors/templates/weko_authors/admin/author_import.html:82 msgid "End Date" msgstr "終了日" -#: weko_authors/templates/weko_authors/admin/author_import.html:86 +#: weko_authors/templates/weko_authors/admin/author_import.html:83 msgid "Status" msgstr "ステータス" -#: weko_authors/templates/weko_authors/admin/author_import.html:87 +#: weko_authors/templates/weko_authors/admin/author_import.html:84 msgid "To Do" msgstr "To Do" -#: weko_authors/templates/weko_authors/admin/author_import.html:89 +#: weko_authors/templates/weko_authors/admin/author_import.html:86 msgid "Failure" msgstr "失敗" -#: weko_authors/templates/weko_authors/admin/author_import.html:90 +#: weko_authors/templates/weko_authors/admin/author_import.html:87 msgid "Pending" msgstr "処理待ち" -#: weko_authors/templates/weko_authors/admin/author_import.html:97 +#: weko_authors/templates/weko_authors/admin/author_import.html:94 msgid "Warning" msgstr "警告" -#: weko_authors/templates/weko_authors/admin/author_import.html:103 +#: weko_authors/templates/weko_authors/admin/author_import.html:100 msgid "The selected target table and the import file format are different." msgstr "選択された登録対象テーブルとインポートファイルの形式が違います。" -#: weko_authors/templates/weko_authors/admin/author_import.html:107 +#: weko_authors/templates/weko_authors/admin/author_import.html:104 msgid "Celery is not running." msgstr "Celeryは動いていません。" -#: weko_authors/templates/weko_authors/admin/author_import.html:108 +#: weko_authors/templates/weko_authors/admin/author_import.html:105 msgid "Import is in progress." msgstr "インポートを実行中です。" -#: weko_authors/templates/weko_authors/admin/author_import.html:109 +#: weko_authors/templates/weko_authors/admin/author_import.html:106 msgid "Import is in progress on another device." msgstr "他の端末でインポートを実行中です。" diff --git a/modules/weko-authors/weko_authors/translations/messages.pot b/modules/weko-authors/weko_authors/translations/messages.pot index dd190bc9c4..9d5abdb00f 100644 --- a/modules/weko-authors/weko_authors/translations/messages.pot +++ b/modules/weko-authors/weko_authors/translations/messages.pot @@ -9,36 +9,36 @@ msgid "" msgstr "" "Project-Id-Version: weko-authors 0.1.0.dev20170000\n" "Report-Msgid-Bugs-To: wekosoftware@nii.ac.jp\n" -"POT-Creation-Date: 2025-10-03 00:38+0900\n" +"POT-Creation-Date: 2025-11-25 18:35+0900\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.5.1\n" +"Generated-By: Babel 2.17.0\n" -#: weko_authors/admin.py:422 weko_authors/admin.py:651 weko_authors/views.py:98 -#: weko_authors/views.py:128 weko_authors/views.py:184 -#: weko_authors/views.py:235 +#: weko_authors/admin.py:422 weko_authors/admin.py:649 +#: weko_authors/views.py:105 weko_authors/views.py:143 +#: weko_authors/views.py:194 msgid "Failed" msgstr "" -#: weko_authors/admin.py:657 weko_authors/admin.py:666 -#: weko_authors/admin.py:675 +#: weko_authors/admin.py:655 weko_authors/admin.py:664 +#: weko_authors/admin.py:673 msgid "Author Management" msgstr "" -#: weko_authors/admin.py:658 +#: weko_authors/admin.py:656 msgid "Edit" msgstr "" -#: weko_authors/admin.py:667 +#: weko_authors/admin.py:665 #: weko_authors/templates/weko_authors/admin/author_export.html:45 msgid "Export" msgstr "" -#: weko_authors/admin.py:676 +#: weko_authors/admin.py:674 #: weko_authors/templates/weko_authors/admin/author_import.html:42 msgid "Import" msgstr "" @@ -59,239 +59,234 @@ msgstr "" msgid "Allow delete authors." msgstr "" -#: weko_authors/tasks.py:359 +#: weko_authors/tasks.py:350 msgid "TimeOut" msgstr "" -#: weko_authors/tasks.py:458 -#: weko_authors/templates/weko_authors/admin/author_import.html:96 +#: weko_authors/tasks.py:446 +#: weko_authors/templates/weko_authors/admin/author_import.html:93 msgid "Error" msgstr "" -#: weko_authors/tasks.py:459 -#: weko_authors/templates/weko_authors/admin/author_import.html:105 -#: weko_authors/utils.py:1371 weko_authors/views.py:200 +#: weko_authors/tasks.py:447 +#: weko_authors/templates/weko_authors/admin/author_import.html:102 +#: weko_authors/utils.py:1287 weko_authors/views.py:159 msgid "The author is linked to items and cannot be deleted." msgstr "" -#: weko_authors/tasks.py:460 -#: weko_authors/templates/weko_authors/admin/author_import.html:104 +#: weko_authors/tasks.py:448 +#: weko_authors/templates/weko_authors/admin/author_import.html:101 msgid "Failed to import." msgstr "" -#: weko_authors/tasks.py:473 -#: weko_authors/templates/weko_authors/admin/author_import.html:91 +#: weko_authors/tasks.py:461 +#: weko_authors/templates/weko_authors/admin/author_import.html:88 msgid "Register Success" msgstr "" -#: weko_authors/tasks.py:474 -#: weko_authors/templates/weko_authors/admin/author_import.html:92 +#: weko_authors/tasks.py:462 +#: weko_authors/templates/weko_authors/admin/author_import.html:89 msgid "Update Success" msgstr "" -#: weko_authors/tasks.py:475 -#: weko_authors/templates/weko_authors/admin/author_import.html:93 +#: weko_authors/tasks.py:463 +#: weko_authors/templates/weko_authors/admin/author_import.html:90 msgid "Delete Success" msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:52 -#: weko_authors/templates/weko_authors/admin/author_import.html:106 -#: weko_authors/utils.py:663 weko_authors/utils.py:704 +#: weko_authors/templates/weko_authors/admin/author_import.html:103 +#: weko_authors/utils.py:587 weko_authors/utils.py:628 msgid "Internal server error" msgstr "" -#: weko_authors/utils.py:792 weko_authors/utils.py:1027 +#: weko_authors/utils.py:717 weko_authors/utils.py:946 +#, python-brace-format msgid "The following metadata keys are duplicated.
{}" msgstr "" -#: weko_authors/utils.py:804 weko_authors/utils.py:1038 +#: weko_authors/utils.py:728 weko_authors/utils.py:956 +#, python-brace-format msgid "Specified item does not consistency with DB item.
{}" msgstr "" -#: weko_authors/utils.py:820 weko_authors/utils.py:1064 +#: weko_authors/utils.py:744 weko_authors/utils.py:982 +#, python-brace-format msgid "Cannot read {} file correctly." msgstr "" -#: weko_authors/utils.py:836 +#: weko_authors/utils.py:760 msgid "There is no data to import." msgstr "" -#: weko_authors/utils.py:839 weko_authors/utils.py:1068 +#: weko_authors/utils.py:764 weko_authors/utils.py:986 +#, python-brace-format msgid "" "{} could not be read. Make sure the file format is {} and that the file " "is UTF-8 encoded." msgstr "" -#: weko_authors/utils.py:908 +#: weko_authors/utils.py:826 +#, python-brace-format msgid "There is duplicated data in the {} file." msgstr "" -#: weko_authors/utils.py:946 +#: weko_authors/utils.py:864 +#, python-brace-format msgid "{} is required item." msgstr "" -#: weko_authors/utils.py:955 +#: weko_authors/utils.py:873 +#, python-brace-format msgid "{} should be set by one of {}." msgstr "" -#: weko_authors/utils.py:1142 +#: weko_authors/utils.py:1060 msgid "Scheme is required item." msgstr "" -#: weko_authors/utils.py:1145 +#: weko_authors/utils.py:1063 msgid "The scheme WEKO cannot be used." msgstr "" -#: weko_authors/utils.py:1148 +#: weko_authors/utils.py:1066 msgid "Name is required item." msgstr "" -#: weko_authors/utils.py:1151 +#: weko_authors/utils.py:1069 msgid "URL is not URL format." msgstr "" -#: weko_authors/utils.py:1155 +#: weko_authors/utils.py:1073 msgid "The specified scheme does not exist." msgstr "" -#: weko_authors/utils.py:1159 +#: weko_authors/utils.py:1077 msgid "The specified scheme is used in the author ID." msgstr "" -#: weko_authors/utils.py:1174 +#: weko_authors/utils.py:1092 msgid "The specified scheme is duplicated." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:98 -#: weko_authors/utils.py:1281 +#: weko_authors/templates/weko_authors/admin/author_import.html:95 +#: weko_authors/utils.py:1197 msgid "Register" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:99 -#: weko_authors/utils.py:1283 +#: weko_authors/templates/weko_authors/admin/author_import.html:96 +#: weko_authors/utils.py:1199 msgid "Update" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:100 -#: weko_authors/utils.py:1285 +#: weko_authors/templates/weko_authors/admin/author_import.html:97 +#: weko_authors/utils.py:1201 msgid "Delete" msgstr "" -#: weko_authors/utils.py:1364 +#: weko_authors/utils.py:1280 msgid "Specified WEKO ID does not exist." msgstr "" -#: weko_authors/utils.py:1376 +#: weko_authors/utils.py:1292 +#, python-brace-format msgid "" "The specified author has been deleted. Update author information with {} " "content, but author remains deleted as it is." msgstr "" -#: weko_authors/utils.py:1837 +#: weko_authors/utils.py:1688 +#, python-brace-format msgid "Invalid community ID format: {}" msgstr "" -#: weko_authors/utils.py:1843 +#: weko_authors/utils.py:1694 +#, python-brace-format msgid "Community ID(s) {} does not exist." msgstr "" -#: weko_authors/utils.py:1865 weko_authors/utils.py:1881 -#: weko_authors/utils.py:1887 weko_authors/utils.py:1950 +#: weko_authors/utils.py:1716 weko_authors/utils.py:1732 +#: weko_authors/utils.py:1738 weko_authors/utils.py:1801 +#, python-brace-format msgid "You do not have permission for this {}’s communities: {}." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:46 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:22 -#: weko_authors/utils.py:1865 weko_authors/utils.py:1881 -#: weko_authors/utils.py:1887 weko_authors/utils.py:1957 +#: weko_authors/utils.py:1716 weko_authors/utils.py:1732 +#: weko_authors/utils.py:1738 weko_authors/utils.py:1808 msgid "Author ID" msgstr "" -#: weko_authors/utils.py:1867 weko_authors/utils.py:1873 +#: weko_authors/utils.py:1718 weko_authors/utils.py:1724 msgid "You must include at least one managed community." msgstr "" -#: weko_authors/utils.py:1871 weko_authors/utils.py:1943 +#: weko_authors/utils.py:1722 weko_authors/utils.py:1794 msgid "You cannot manage this record." msgstr "" -#: weko_authors/utils.py:1932 +#: weko_authors/utils.py:1783 +#, python-brace-format msgid "{} not found." msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:42 -#: weko_authors/templates/weko_authors/admin/author_import.html:67 +#: weko_authors/templates/weko_authors/admin/author_import.html:64 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:23 -#: weko_authors/utils.py:1961 +#: weko_authors/utils.py:1812 msgid "ID Prefix" msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:43 -#: weko_authors/templates/weko_authors/admin/author_import.html:68 +#: weko_authors/templates/weko_authors/admin/author_import.html:65 #: weko_authors/templates/weko_authors/macros/tabs_selector.html:24 -#: weko_authors/utils.py:1965 +#: weko_authors/utils.py:1816 msgid "Affiliation ID" msgstr "" -#: weko_authors/views.py:72 weko_authors/views.py:139 weko_authors/views.py:195 +#: weko_authors/views.py:72 weko_authors/views.py:116 weko_authors/views.py:154 msgid "Header Error" msgstr "" -#: weko_authors/views.py:84 weko_authors/views.py:150 -msgid "Please set WEKO ID." -msgstr "" - -#: weko_authors/views.py:91 weko_authors/views.py:159 -msgid "Please set the WEKOID in the half digit." -msgstr "" - -#: weko_authors/views.py:94 weko_authors/views.py:162 -msgid "The value is already in use as WEKO ID." -msgstr "" - -#: weko_authors/views.py:104 weko_authors/views.py:167 +#: weko_authors/views.py:81 weko_authors/views.py:126 msgid "" "Please set the affiliation start date and end date in the format yyyy-MM-" "dd." msgstr "" -#: weko_authors/views.py:106 weko_authors/views.py:169 +#: weko_authors/views.py:83 weko_authors/views.py:128 msgid "The end date must be after the start date." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:88 -#: weko_authors/views.py:129 weko_authors/views.py:186 -#: weko_authors/views.py:237 weko_authors/views.py:765 -#: weko_authors/views.py:834 +#: weko_authors/templates/weko_authors/admin/author_import.html:85 +#: weko_authors/views.py:106 weko_authors/views.py:145 +#: weko_authors/views.py:196 weko_authors/views.py:686 +#: weko_authors/views.py:755 msgid "Success" msgstr "" -#: weko_authors/contrib/validation.py:111 -msgid "WEKO ID is Half-width digits only" -msgstr "" - -#: weko_authors/contrib/validation.py:112 -msgid "WEKO ID is required item." -msgstr "" - -#: weko_authors/contrib/validation.py:133 +#: weko_authors/contrib/validation.py:107 +#, python-brace-format msgid "Specified Identifier Scheme '{}' does not exist." msgstr "" -#: weko_authors/contrib/validation.py:154 +#: weko_authors/contrib/validation.py:128 +#, python-brace-format msgid "Specified Affiliation Identifier Scheme '{}' does not exist." msgstr "" -#: weko_authors/contrib/validation.py:179 -#: weko_authors/contrib/validation.py:204 +#: weko_authors/contrib/validation.py:153 +#: weko_authors/contrib/validation.py:178 +#, python-brace-format msgid "External Affiliation Period must be in the format: yyyy-MM-dd, blank. {}" msgstr "" -#: weko_authors/contrib/validation.py:205 +#: weko_authors/contrib/validation.py:179 msgid "Period end must be after Period start." msgstr "" -#: weko_authors/contrib/validation.py:243 +#: weko_authors/contrib/validation.py:217 +#, python-brace-format msgid "External author identifier exists in DB.
{}" msgstr "" @@ -309,7 +304,7 @@ msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:41 #: weko_authors/templates/weko_authors/admin/author_export.html:49 -#: weko_authors/templates/weko_authors/admin/author_import.html:66 +#: weko_authors/templates/weko_authors/admin/author_import.html:63 msgid "Author DB" msgstr "" @@ -334,7 +329,7 @@ msgid "Execute" msgstr "" #: weko_authors/templates/weko_authors/admin/author_export.html:51 -#: weko_authors/templates/weko_authors/admin/author_import.html:56 +#: weko_authors/templates/weko_authors/admin/author_import.html:53 msgid "Cancel" msgstr "" @@ -370,163 +365,155 @@ msgstr "" msgid "No." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:47 -msgid "Current WEKO ID" -msgstr "" - -#: weko_authors/templates/weko_authors/admin/author_import.html:48 -msgid "New WEKO ID" -msgstr "" - -#: weko_authors/templates/weko_authors/admin/author_import.html:49 -msgid "Previous WEKO ID" +#: weko_authors/templates/weko_authors/admin/author_import.html:46 +msgid "WEKO ID" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:50 +#: weko_authors/templates/weko_authors/admin/author_import.html:47 msgid "full_name" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:53 +#: weko_authors/templates/weko_authors/admin/author_import.html:50 msgid "Select File" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:54 +#: weko_authors/templates/weko_authors/admin/author_import.html:51 msgid "Selected file name" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:55 +#: weko_authors/templates/weko_authors/admin/author_import.html:52 msgid "Next" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:57 +#: weko_authors/templates/weko_authors/admin/author_import.html:54 msgid "Force Change Mode" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:58 +#: weko_authors/templates/weko_authors/admin/author_import.html:55 msgid "Disclaimer:" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:59 +#: weko_authors/templates/weko_authors/admin/author_import.html:56 msgid "" "- When using this function, all metadata using author information is " "forcibly changed to match the author DB when updating." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:60 +#: weko_authors/templates/weko_authors/admin/author_import.html:57 msgid "" "- If not used, only the author and affiliation identifiers are changed " "when updating." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:61 +#: weko_authors/templates/weko_authors/admin/author_import.html:58 msgid "" "- Before starting this operation, you need fully understand the contents " "and metadata using AuthorDB at your institution." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:62 +#: weko_authors/templates/weko_authors/admin/author_import.html:59 msgid "- Use this function on your own responsibility." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:63 +#: weko_authors/templates/weko_authors/admin/author_import.html:60 msgid "" "- National Institute of Informatics (NII) does not take any " "responsibility for damages caused by using this function." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:64 +#: weko_authors/templates/weko_authors/admin/author_import.html:61 msgid "I agree to the terms of use." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:65 +#: weko_authors/templates/weko_authors/admin/author_import.html:62 msgid "Import target" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:71 +#: weko_authors/templates/weko_authors/admin/author_import.html:68 msgid "Summary" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:72 +#: weko_authors/templates/weko_authors/admin/author_import.html:69 msgid "Total" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:73 +#: weko_authors/templates/weko_authors/admin/author_import.html:70 msgid "New Creator" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:74 +#: weko_authors/templates/weko_authors/admin/author_import.html:71 msgid "Update Creator" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:75 +#: weko_authors/templates/weko_authors/admin/author_import.html:72 msgid "Delete Creator" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:76 +#: weko_authors/templates/weko_authors/admin/author_import.html:73 msgid "Result Error" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:77 +#: weko_authors/templates/weko_authors/admin/author_import.html:74 msgid "Mail Address" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:78 +#: weko_authors/templates/weko_authors/admin/author_import.html:75 msgid "Check Result" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:79 +#: weko_authors/templates/weko_authors/admin/author_import.html:76 msgid "Scheme" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:80 +#: weko_authors/templates/weko_authors/admin/author_import.html:77 msgid "Scheme Name" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:81 +#: weko_authors/templates/weko_authors/admin/author_import.html:78 msgid "URL" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:84 +#: weko_authors/templates/weko_authors/admin/author_import.html:81 msgid "Start Date" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:85 +#: weko_authors/templates/weko_authors/admin/author_import.html:82 msgid "End Date" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:86 +#: weko_authors/templates/weko_authors/admin/author_import.html:83 msgid "Status" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:87 +#: weko_authors/templates/weko_authors/admin/author_import.html:84 msgid "To Do" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:89 +#: weko_authors/templates/weko_authors/admin/author_import.html:86 msgid "Failure" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:90 +#: weko_authors/templates/weko_authors/admin/author_import.html:87 msgid "Pending" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:97 +#: weko_authors/templates/weko_authors/admin/author_import.html:94 msgid "Warning" msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:103 +#: weko_authors/templates/weko_authors/admin/author_import.html:100 msgid "The selected target table and the import file format are different." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:107 +#: weko_authors/templates/weko_authors/admin/author_import.html:104 msgid "Celery is not running." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:108 +#: weko_authors/templates/weko_authors/admin/author_import.html:105 msgid "Import is in progress." msgstr "" -#: weko_authors/templates/weko_authors/admin/author_import.html:109 +#: weko_authors/templates/weko_authors/admin/author_import.html:106 msgid "Import is in progress on another device." msgstr "" From 7d932a92b88b15d56defe02ce868371fb167e3e1 Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Wed, 26 Nov 2025 08:50:31 +0900 Subject: [PATCH 6/9] =?UTF-8?q?=E6=9C=AA=E5=AE=9A=E7=BE=A9=E3=81=AE?= =?UTF-8?q?=E3=82=AF=E3=83=A9=E3=82=B9=E6=8C=87=E5=AE=9A=E3=82=92=E4=BF=AE?= =?UTF-8?q?=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/weko-authors/weko_authors/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/weko-authors/weko_authors/schema.py b/modules/weko-authors/weko_authors/schema.py index b3ff845aa8..1c2316b0b8 100644 --- a/modules/weko-authors/weko_authors/schema.py +++ b/modules/weko-authors/weko_authors/schema.py @@ -224,7 +224,7 @@ class AuthorUpdateRequestSchema(Schema): force_change = fields.Bool() """Flag to force change""" - author = fields.Nested(AuthorUpdateSchema, required=True) + author = fields.Nested(AuthorSchema, required=True) """Updated author information.""" class Meta: From 3adbc080780c3da92e768f1f731e311297fbb410 Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Thu, 27 Nov 2025 21:11:24 +0900 Subject: [PATCH 7/9] =?UTF-8?q?=E5=BC=95=E6=95=B0=E8=AA=A4=E3=82=8A?= =?UTF-8?q?=E3=82=92=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/weko-authors/weko_authors/tasks.py | 4 ++-- modules/weko-authors/weko_authors/utils.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/weko-authors/weko_authors/tasks.py b/modules/weko-authors/weko_authors/tasks.py index 41632197c3..df222c6b02 100644 --- a/modules/weko-authors/weko_authors/tasks.py +++ b/modules/weko-authors/weko_authors/tasks.py @@ -70,7 +70,7 @@ def export_all(export_target, user_id): @shared_task -def import_author(author, request_info): +def import_author(author, force_change_mode, request_info): """Import Author. Args: @@ -90,7 +90,7 @@ def import_author(author, request_info): # Retry processing in case of connection error for attempt in range(retrys): try: - import_author_to_system(author, status) + import_author_to_system(author, status, force_change_mode) result['status'] = states.SUCCESS break except SQLAlchemyError as ex: diff --git a/modules/weko-authors/weko_authors/utils.py b/modules/weko-authors/weko_authors/utils.py index 498a17e642..8d0d167526 100644 --- a/modules/weko-authors/weko_authors/utils.py +++ b/modules/weko-authors/weko_authors/utils.py @@ -833,11 +833,11 @@ def validate_import_data(file_format, file_data, mapping_ids, mapping, list_impo if item.get('status') == 'new': item["communityIds"] = validate_community_ids(community_ids, is_create=True) elif item.get('status') == 'update': - old = Authors.query.get('pk_id') + old = Authors.query.get(weko_id) old_community_ids = [c.id for c in old.communities] item["communityIds"] = validate_community_ids(community_ids, old_ids=old_community_ids) elif item.get('status') == 'deleted': - check, message = check_delete_author('pk_id') + check, message = check_delete_author(weko_id) if not check: errors.append(message) except AuthorsValidationError as e: From 26ee8f805653dae6fccc735a57093dcd072cccc8 Mon Sep 17 00:00:00 2001 From: ayumi-nishida Date: Thu, 27 Nov 2025 21:23:01 +0900 Subject: [PATCH 8/9] =?UTF-8?q?react=E3=80=81anguler=E3=81=8B=E3=82=89?= =?UTF-8?q?=E3=81=AE=E3=83=93=E3=83=AB=E3=83=89=E3=83=95=E3=82=A1=E3=82=A4?= =?UTF-8?q?=E3=83=AB=E3=82=92=E4=B8=8A=E6=9B=B8=E3=81=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../js/invenio_communities/inline.community.bundle.js | 2 +- .../static/js/invenio_communities/main.bundle.js | 2 +- .../static/js/invenio_communities/main.community.bundle.js | 2 +- .../static/js/weko_authors/app-author-export.chunk.js | 6 +++--- .../static/js/weko_authors/app-author-export.main.chunk.js | 4 ++-- .../js/weko_authors/app-author-export.runtime-main.js | 2 +- .../static/js/weko_authors/app-author-import.chunk.js | 6 +++--- .../static/js/weko_authors/app-author-import.main.chunk.js | 4 ++-- .../js/weko_authors/app-author-import.runtime-main.js | 2 +- .../static/js/weko_authors/inline.affiliation.bundle.js | 2 +- .../weko_authors/static/js/weko_authors/inline.bundle.js | 2 +- .../static/js/weko_authors/inline.prefix.bundle.js | 2 +- .../static/js/weko_authors/main.affiliation.bundle.js | 2 +- .../weko_authors/static/js/weko_authors/main.bundle.js | 2 +- .../static/js/weko_authors/main.prefix.bundle.js | 2 +- .../static/js/weko_index_tree/inline.bundle.js | 2 +- .../static/js/weko_index_tree/main.bundle.js | 2 +- .../static/js/weko_indextree_journal/inline.bundle.js | 2 +- .../static/js/weko_indextree_journal/main.bundle.js | 2 +- .../js/weko_items_ui/inline.items.authorSearch.bundle.js | 2 +- .../weko_items_ui/static/js/weko_items_ui/main.bundle.js | 2 +- .../js/weko_items_ui/main.items.authorSearch.bundle.js | 2 +- .../static/css/weko_search_ui/facet_chunk.css | 5 ++--- .../weko_search_ui/static/js/weko_search_ui/facet.chunk.js | 4 +++- .../static/js/weko_search_ui/facet.main.chunk.js | 5 ++--- .../static/js/weko_search_ui/facet.runtime-main.js | 3 ++- .../weko_theme/static/js/weko_theme/main.bundle.js | 2 +- 27 files changed, 38 insertions(+), 37 deletions(-) diff --git a/modules/invenio-communities/invenio_communities/static/js/invenio_communities/inline.community.bundle.js b/modules/invenio-communities/invenio_communities/static/js/invenio_communities/inline.community.bundle.js index 74fc91cba3..8677a81436 100644 --- a/modules/invenio-communities/invenio_communities/static/js/invenio_communities/inline.community.bundle.js +++ b/modules/invenio-communities/invenio_communities/static/js/invenio_communities/inline.community.bundle.js @@ -1 +1 @@ -!function(e){var n=window.webpackJsonp;window.webpackJsonp=function(r,c,u){for(var a,i,f,l=0,s=[];l>>((3&t)<<3)&255;return o}}},"5NFG":function(e,t){},EVdn:function(e,t,n){var r;!function(t,n){"use strict";"object"==typeof e.exports?e.exports=t.document?n(t,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return n(e)}:n(t)}("undefined"!=typeof window?window:this,function(n,o){"use strict";var i=[],s=Object.getPrototypeOf,a=i.slice,u=i.flat?function(e){return i.flat.call(e)}:function(e){return i.concat.apply([],e)},l=i.push,c=i.indexOf,d={},p=d.toString,f=d.hasOwnProperty,h=f.toString,y=h.call(Object),g={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},m=function(e){return null!=e&&e===e.window},b=n.document,w={type:!0,src:!0,nonce:!0,noModule:!0};function _(e,t,n){var r,o,i=(n=n||b).createElement("script");if(i.text=e,t)for(r in w)(o=t[r]||t.getAttribute&&t.getAttribute(r))&&i.setAttribute(r,o);n.head.appendChild(i).parentNode.removeChild(i)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?d[p.call(e)]||"object":typeof e}var C=/HTML$/i,E=function(e,t){return new E.fn.init(e,t)};function T(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!m(e)&&("array"===n||0===t||"number"==typeof t&&t>0&&t-1 in e)}function S(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}E.fn=E.prototype={jquery:"3.7.1",constructor:E,length:0,toArray:function(){return a.call(this)},get:function(e){return null==e?a.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=E.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return E.each(this,e)},map:function(e){return this.pushStack(E.map(this,function(t,n){return e.call(t,n,t)}))},slice:function(){return this.pushStack(a.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},even:function(){return this.pushStack(E.grep(this,function(e,t){return(t+1)%2}))},odd:function(){return this.pushStack(E.grep(this,function(e,t){return t%2}))},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(n>=0&&n+~]|"+I+")"+I+"*"),B=new RegExp(I+"|>"),q=new RegExp(L),U=new RegExp("^"+M+"$"),z={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),TAG:new RegExp("^("+M+"|[*])"),ATTR:new RegExp("^"+j),PSEUDO:new RegExp("^"+L),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+I+"*(even|odd|(([+-]|)(\\d*)n|)"+I+"*(?:([+-]|)"+I+"*(\\d+)|))"+I+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+I+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+I+"*((?:-\\d)?\\d*)"+I+"*\\)|)(?=[^-]|$)","i")},$=/^(?:input|select|textarea|button)$/i,W=/^h\d$/i,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,G=/[+~]/,Q=new RegExp("\\\\[\\da-fA-F]{1,6}"+I+"?|\\\\([^\\r\\n\\f])","g"),X=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},Y=function(){ue()},J=pe(function(e){return!0===e.disabled&&S(e,"fieldset")},{dir:"parentNode",next:"legend"});try{y.apply(i=a.call(D.childNodes),D.childNodes)}catch(e){y={apply:function(e,t){P.apply(e,a.call(t))},call:function(e){P.apply(e,a.call(arguments,1))}}}function K(e,t,n,r){var o,i,s,a,l,c,f,h=t&&t.ownerDocument,m=t?t.nodeType:9;if(n=n||[],"string"!=typeof e||!e||1!==m&&9!==m&&11!==m)return n;if(!r&&(ue(t),t=t||u,d)){if(11!==m&&(l=Z.exec(e)))if(o=l[1]){if(9===m){if(!(s=t.getElementById(o)))return n;if(s.id===o)return y.call(n,s),n}else if(h&&(s=h.getElementById(o))&&K.contains(t,s)&&s.id===o)return y.call(n,s),n}else{if(l[2])return y.apply(n,t.getElementsByTagName(e)),n;if((o=l[3])&&t.getElementsByClassName)return y.apply(n,t.getElementsByClassName(o)),n}if(!(C[e+" "]||p&&p.test(e))){if(f=e,h=t,1===m&&(B.test(e)||H.test(e))){for((h=G.test(e)&&ae(t.parentNode)||t)==t&&g.scope||((a=t.getAttribute("id"))?a=E.escapeSelector(a):t.setAttribute("id",a=v)),i=(c=ce(e)).length;i--;)c[i]=(a?"#"+a:":scope")+" "+de(c[i]);f=c.join(",")}try{return y.apply(n,h.querySelectorAll(f)),n}catch(t){C(e,!0)}finally{a===v&&t.removeAttribute("id")}}}return me(e.replace(O,"$1"),t,n,r)}function ee(){var e=[];return function n(r,o){return e.push(r+" ")>t.cacheLength&&delete n[e.shift()],n[r+" "]=o}}function te(e){return e[v]=!0,e}function ne(e){var t=u.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function re(e){return function(t){return S(t,"input")&&t.type===e}}function oe(e){return function(t){return(S(t,"input")||S(t,"button"))&&t.type===e}}function ie(e){return function(t){return"form"in t?t.parentNode&&!1===t.disabled?"label"in t?"label"in t.parentNode?t.parentNode.disabled===e:t.disabled===e:t.isDisabled===e||t.isDisabled!==!e&&J(t)===e:t.disabled===e:"label"in t&&t.disabled===e}}function se(e){return te(function(t){return t=+t,te(function(n,r){for(var o,i=e([],n.length,t),s=i.length;s--;)n[o=i[s]]&&(n[o]=!(r[o]=n[o]))})})}function ae(e){return e&&void 0!==e.getElementsByTagName&&e}function ue(e){var n,r=e?e.ownerDocument||e:D;return r!=u&&9===r.nodeType&&r.documentElement?(l=(u=r).documentElement,d=!E.isXMLDoc(u),h=l.matches||l.webkitMatchesSelector||l.msMatchesSelector,l.msMatchesSelector&&D!=u&&(n=u.defaultView)&&n.top!==n&&n.addEventListener("unload",Y),g.getById=ne(function(e){return l.appendChild(e).id=E.expando,!u.getElementsByName||!u.getElementsByName(E.expando).length}),g.disconnectedMatch=ne(function(e){return h.call(e,"*")}),g.scope=ne(function(){return u.querySelectorAll(":scope")}),g.cssHas=ne(function(){try{return u.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),g.getById?(t.filter.ID=function(e){var t=e.replace(Q,X);return function(e){return e.getAttribute("id")===t}},t.find.ID=function(e,t){if(void 0!==t.getElementById&&d){var n=t.getElementById(e);return n?[n]:[]}}):(t.filter.ID=function(e){var t=e.replace(Q,X);return function(e){var n=void 0!==e.getAttributeNode&&e.getAttributeNode("id");return n&&n.value===t}},t.find.ID=function(e,t){if(void 0!==t.getElementById&&d){var n,r,o,i=t.getElementById(e);if(i){if((n=i.getAttributeNode("id"))&&n.value===e)return[i];for(o=t.getElementsByName(e),r=0;i=o[r++];)if((n=i.getAttributeNode("id"))&&n.value===e)return[i]}return[]}}),t.find.TAG=function(e,t){return void 0!==t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},t.find.CLASS=function(e,t){if(void 0!==t.getElementsByClassName&&d)return t.getElementsByClassName(e)},p=[],ne(function(e){var t;l.appendChild(e).innerHTML="",e.querySelectorAll("[selected]").length||p.push("\\["+I+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+v+"-]").length||p.push("~="),e.querySelectorAll("a#"+v+"+*").length||p.push(".#.+[+~]"),e.querySelectorAll(":checked").length||p.push(":checked"),(t=u.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),l.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&p.push(":enabled",":disabled"),(t=u.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||p.push("\\["+I+"*name"+I+"*="+I+"*(?:''|\"\")")}),g.cssHas||p.push(":has"),p=p.length&&new RegExp(p.join("|")),T=function(e,t){if(e===t)return s=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!g.sortDetached&&t.compareDocumentPosition(e)===n?e===u||e.ownerDocument==D&&K.contains(D,e)?-1:t===u||t.ownerDocument==D&&K.contains(D,t)?1:o?c.call(o,e)-c.call(o,t):0:4&n?-1:1)},u):u}for(e in K.matches=function(e,t){return K(e,null,null,t)},K.matchesSelector=function(e,t){if(ue(e),d&&!C[t+" "]&&(!p||!p.test(t)))try{var n=h.call(e,t);if(n||g.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){C(t,!0)}return K(t,u,null,[e]).length>0},K.contains=function(e,t){return(e.ownerDocument||e)!=u&&ue(e),E.contains(e,t)},K.attr=function(e,n){(e.ownerDocument||e)!=u&&ue(e);var r=t.attrHandle[n.toLowerCase()],o=r&&f.call(t.attrHandle,n.toLowerCase())?r(e,n,!d):void 0;return void 0!==o?o:e.getAttribute(n)},K.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},E.uniqueSort=function(e){var t,n=[],r=0,i=0;if(s=!g.sortStable,o=!g.sortStable&&a.call(e,0),N.call(e,T),s){for(;t=e[i++];)t===e[i]&&(r=n.push(i));for(;r--;)A.call(e,n[r],1)}return o=null,e},E.fn.uniqueSort=function(){return this.pushStack(E.uniqueSort(a.apply(this)))},(t=E.expr={cacheLength:50,createPseudo:te,match:z,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(Q,X),e[3]=(e[3]||e[4]||e[5]||"").replace(Q,X),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||K.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&K.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return z.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&q.test(n)&&(t=ce(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(Q,X).toLowerCase();return"*"===e?function(){return!0}:function(e){return S(e,t)}},CLASS:function(e){var t=w[e+" "];return t||(t=new RegExp("(^|"+I+")"+e+"("+I+"|$)"))&&w(e,function(e){return t.test("string"==typeof e.className&&e.className||void 0!==e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var o=K.attr(r,e);return null==o?"!="===t:!t||(o+="","="===t?o===n:"!="===t?o!==n:"^="===t?n&&0===o.indexOf(n):"*="===t?n&&o.indexOf(n)>-1:"$="===t?n&&o.slice(-n.length)===n:"~="===t?(" "+o.replace(V," ")+" ").indexOf(n)>-1:"|="===t&&(o===n||o.slice(0,n.length+1)===n+"-"))}},CHILD:function(e,t,n,r,o){var i="nth"!==e.slice(0,3),s="last"!==e.slice(-4),a="of-type"===t;return 1===r&&0===o?function(e){return!!e.parentNode}:function(t,n,u){var l,c,d,p,f,h=i!==s?"nextSibling":"previousSibling",y=t.parentNode,g=a&&t.nodeName.toLowerCase(),b=!u&&!a,w=!1;if(y){if(i){for(;h;){for(d=t;d=d[h];)if(a?S(d,g):1===d.nodeType)return!1;f=h="only"===e&&!f&&"nextSibling"}return!0}if(f=[s?y.firstChild:y.lastChild],s&&b){for(w=(p=(l=(c=y[v]||(y[v]={}))[e]||[])[0]===m&&l[1])&&l[2],d=p&&y.childNodes[p];d=++p&&d&&d[h]||(w=p=0)||f.pop();)if(1===d.nodeType&&++w&&d===t){c[e]=[m,p,w];break}}else if(b&&(w=p=(l=(c=t[v]||(t[v]={}))[e]||[])[0]===m&&l[1]),!1===w)for(;(d=++p&&d&&d[h]||(w=p=0)||f.pop())&&((a?!S(d,g):1!==d.nodeType)||!++w||(b&&((c=d[v]||(d[v]={}))[e]=[m,w]),d!==t)););return(w-=o)===r||w%r==0&&w/r>=0}}},PSEUDO:function(e,n){var r,o=t.pseudos[e]||t.setFilters[e.toLowerCase()]||K.error("unsupported pseudo: "+e);return o[v]?o(n):o.length>1?(r=[e,e,"",n],t.setFilters.hasOwnProperty(e.toLowerCase())?te(function(e,t){for(var r,i=o(e,n),s=i.length;s--;)e[r=c.call(e,i[s])]=!(t[r]=i[s])}):function(e){return o(e,0,r)}):o}},pseudos:{not:te(function(e){var t=[],n=[],r=ve(e.replace(O,"$1"));return r[v]?te(function(e,t,n,o){for(var i,s=r(e,null,o,[]),a=e.length;a--;)(i=s[a])&&(e[a]=!(t[a]=i))}):function(e,o,i){return t[0]=e,r(t,null,i,n),t[0]=null,!n.pop()}}),has:te(function(e){return function(t){return K(e,t).length>0}}),contains:te(function(e){return e=e.replace(Q,X),function(t){return(t.textContent||E.text(t)).indexOf(e)>-1}}),lang:te(function(e){return U.test(e||"")||K.error("unsupported lang: "+e),e=e.replace(Q,X).toLowerCase(),function(t){var n;do{if(n=d?t.lang:t.getAttribute("xml:lang")||t.getAttribute("lang"))return(n=n.toLowerCase())===e||0===n.indexOf(e+"-")}while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(e){var t=n.location&&n.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===l},focus:function(e){return e===function(){try{return u.activeElement}catch(e){}}()&&u.hasFocus()&&!!(e.type||e.href||~e.tabIndex)},enabled:ie(!1),disabled:ie(!0),checked:function(e){return S(e,"input")&&!!e.checked||S(e,"option")&&!!e.selected},selected:function(e){return!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!t.pseudos.empty(e)},header:function(e){return W.test(e.nodeName)},input:function(e){return $.test(e.nodeName)},button:function(e){return S(e,"input")&&"button"===e.type||S(e,"button")},text:function(e){var t;return S(e,"input")&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:se(function(){return[0]}),last:se(function(e,t){return[t-1]}),eq:se(function(e,t,n){return[n<0?n+t:n]}),even:se(function(e,t){for(var n=0;nt?t:n;--r>=0;)e.push(r);return e}),gt:se(function(e,t,n){for(var r=n<0?n+t:n;++r1?function(t,n,r){for(var o=e.length;o--;)if(!e[o](t,n,r))return!1;return!0}:e[0]}function he(e,t,n,r,o){for(var i,s=[],a=0,u=e.length,l=null!=t;a-1&&(i[l]=!(s[l]=p))}}else f=he(f===s?f.splice(v,f.length):f),o?o(null,s,f,u):y.apply(s,f)})}function ge(e){for(var n,o,i,s=e.length,a=t.relative[e[0].type],u=a||t.relative[" "],l=a?1:0,d=pe(function(e){return e===n},u,!0),p=pe(function(e){return c.call(n,e)>-1},u,!0),f=[function(e,t,o){var i=!a&&(o||t!=r)||((n=t).nodeType?d(e,t,o):p(e,t,o));return n=null,i}];l1&&fe(f),l>1&&de(e.slice(0,l-1).concat({value:" "===e[l-2].type?"*":""})).replace(O,"$1"),o,l0,i=e.length>0,s=function(s,a,l,c,p){var f,h,g,v=0,b="0",w=s&&[],_=[],x=r,C=s||i&&t.find.TAG("*",p),T=m+=null==x?1:Math.random()||.1,S=C.length;for(p&&(r=a==u||a||p);b!==S&&null!=(f=C[b]);b++){if(i&&f){for(h=0,a||f.ownerDocument==u||(ue(f),l=!d);g=e[h++];)if(g(f,a||u,l)){y.call(c,f);break}p&&(m=T)}o&&((f=!g&&f)&&v--,s&&w.push(f))}if(v+=b,o&&b!==v){for(h=0;g=n[h++];)g(w,_,a,l);if(s){if(v>0)for(;b--;)w[b]||_[b]||(_[b]=k.call(c));_=he(_)}y.apply(c,_),p&&!s&&_.length>0&&v+n.length>1&&E.uniqueSort(c)}return p&&(m=T,r=x),w};return o?te(s):s}(s,i))).selector=e}return a}function me(e,n,r,o){var i,s,a,u,l,c="function"==typeof e&&e,p=!o&&ce(e=c.selector||e);if(r=r||[],1===p.length){if((s=p[0]=p[0].slice(0)).length>2&&"ID"===(a=s[0]).type&&9===n.nodeType&&d&&t.relative[s[1].type]){if(!(n=(t.find.ID(a.matches[0].replace(Q,X),n)||[])[0]))return r;c&&(n=n.parentNode),e=e.slice(s.shift().value.length)}for(i=z.needsContext.test(e)?0:s.length;i--&&!t.relative[u=(a=s[i]).type];)if((l=t.find[u])&&(o=l(a.matches[0].replace(Q,X),G.test(s[0].type)&&ae(n.parentNode)||n))){if(s.splice(i,1),!(e=o.length&&de(s)))return y.apply(r,o),r;break}}return(c||ve(e,p))(o,n,!d,r,!n||G.test(e)&&ae(n.parentNode)||n),r}le.prototype=t.filters=t.pseudos,t.setFilters=new le,g.sortStable=v.split("").sort(T).join("")===v,ue(),g.sortDetached=ne(function(e){return 1&e.compareDocumentPosition(u.createElement("fieldset"))}),E.find=K,E.expr[":"]=E.expr.pseudos,E.unique=E.uniqueSort,K.compile=ve,K.select=me,K.setDocument=ue,K.tokenize=ce,K.escape=E.escapeSelector,K.getText=E.text,K.isXML=E.isXMLDoc,K.selectors=E.expr,K.support=E.support,K.uniqueSort=E.uniqueSort}();var j=function(e,t,n){for(var r=[],o=void 0!==n;(e=e[t])&&9!==e.nodeType;)if(1===e.nodeType){if(o&&E(e).is(n))break;r.push(e)}return r},L=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},V=E.expr.match.needsContext,F=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function H(e,t,n){return v(t)?E.grep(e,function(e,r){return!!t.call(e,r,e)!==n}):t.nodeType?E.grep(e,function(e){return e===t!==n}):"string"!=typeof t?E.grep(e,function(e){return c.call(t,e)>-1!==n}):E.filter(t,e,n)}E.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?E.find.matchesSelector(r,e)?[r]:[]:E.find.matches(e,E.grep(t,function(e){return 1===e.nodeType}))},E.fn.extend({find:function(e){var t,n,r=this.length,o=this;if("string"!=typeof e)return this.pushStack(E(e).filter(function(){for(t=0;t1?E.uniqueSort(n):n},filter:function(e){return this.pushStack(H(this,e||[],!1))},not:function(e){return this.pushStack(H(this,e||[],!0))},is:function(e){return!!H(this,"string"==typeof e&&V.test(e)?E(e):e||[],!1).length}});var B,q=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(E.fn.init=function(e,t,n){var r,o;if(!e)return this;if(n=n||B,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&e.length>=3?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(E.merge(this,E.parseHTML(r[1],(t=t instanceof E?t[0]:t)&&t.nodeType?t.ownerDocument||t:b,!0)),F.test(r[1])&&E.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(o=b.getElementById(r[2]))&&(this[0]=o,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(E):E.makeArray(e,this)}).prototype=E.fn,B=E(b);var U=/^(?:parents|prev(?:Until|All))/,z={children:!0,contents:!0,next:!0,prev:!0};function $(e,t){for(;(e=e[t])&&1!==e.nodeType;);return e}E.fn.extend({has:function(e){var t=E(e,this),n=t.length;return this.filter(function(){for(var e=0;e-1:1===n.nodeType&&E.find.matchesSelector(n,e))){i.push(n);break}return this.pushStack(i.length>1?E.uniqueSort(i):i)},index:function(e){return e?"string"==typeof e?c.call(E(e),this[0]):c.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(E.uniqueSort(E.merge(this.get(),E(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),E.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return j(e,"parentNode")},parentsUntil:function(e,t,n){return j(e,"parentNode",n)},next:function(e){return $(e,"nextSibling")},prev:function(e){return $(e,"previousSibling")},nextAll:function(e){return j(e,"nextSibling")},prevAll:function(e){return j(e,"previousSibling")},nextUntil:function(e,t,n){return j(e,"nextSibling",n)},prevUntil:function(e,t,n){return j(e,"previousSibling",n)},siblings:function(e){return L((e.parentNode||{}).firstChild,e)},children:function(e){return L(e.firstChild)},contents:function(e){return null!=e.contentDocument&&s(e.contentDocument)?e.contentDocument:(S(e,"template")&&(e=e.content||e),E.merge([],e.childNodes))}},function(e,t){E.fn[e]=function(n,r){var o=E.map(this,t,n);return"Until"!==e.slice(-5)&&(r=n),r&&"string"==typeof r&&(o=E.filter(r,o)),this.length>1&&(z[e]||E.uniqueSort(o),U.test(e)&&o.reverse()),this.pushStack(o)}});var W=/[^\x20\t\r\n\f]+/g;function Z(e){return e}function G(e){throw e}function Q(e,t,n,r){var o;try{e&&v(o=e.promise)?o.call(e).done(t).fail(n):e&&v(o=e.then)?o.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}E.Callbacks=function(e){e="string"==typeof e?function(e){var t={};return E.each(e.match(W)||[],function(e,n){t[n]=!0}),t}(e):E.extend({},e);var t,n,r,o,i=[],s=[],a=-1,u=function(){for(o=o||e.once,r=t=!0;s.length;a=-1)for(n=s.shift();++a-1;)i.splice(n,1),n<=a&&a--}),this},has:function(e){return e?E.inArray(e,i)>-1:i.length>0},empty:function(){return i&&(i=[]),this},disable:function(){return o=s=[],i=n="",this},disabled:function(){return!i},lock:function(){return o=s=[],n||t||(i=n=""),this},locked:function(){return!!o},fireWith:function(e,n){return o||(n=[e,(n=n||[]).slice?n.slice():n],s.push(n),t||u()),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!r}};return l},E.extend({Deferred:function(e){var t=[["notify","progress",E.Callbacks("memory"),E.Callbacks("memory"),2],["resolve","done",E.Callbacks("once memory"),E.Callbacks("once memory"),0,"resolved"],["reject","fail",E.Callbacks("once memory"),E.Callbacks("once memory"),1,"rejected"]],r="pending",o={state:function(){return r},always:function(){return i.done(arguments).fail(arguments),this},catch:function(e){return o.then(null,e)},pipe:function(){var e=arguments;return E.Deferred(function(n){E.each(t,function(t,r){var o=v(e[r[4]])&&e[r[4]];i[r[1]](function(){var e=o&&o.apply(this,arguments);e&&v(e.promise)?e.promise().progress(n.notify).done(n.resolve).fail(n.reject):n[r[0]+"With"](this,o?[e]:arguments)})}),e=null}).promise()},then:function(e,r,o){var i=0;function s(e,t,r,o){return function(){var a=this,u=arguments,l=function(){var n,l;if(!(e=i&&(r!==G&&(a=void 0,u=[n]),t.rejectWith(a,u))}};e?c():(E.Deferred.getErrorHook?c.error=E.Deferred.getErrorHook():E.Deferred.getStackHook&&(c.error=E.Deferred.getStackHook()),n.setTimeout(c))}}return E.Deferred(function(n){t[0][3].add(s(0,n,v(o)?o:Z,n.notifyWith)),t[1][3].add(s(0,n,v(e)?e:Z)),t[2][3].add(s(0,n,v(r)?r:G))}).promise()},promise:function(e){return null!=e?E.extend(e,o):o}},i={};return E.each(t,function(e,n){var s=n[2],a=n[5];o[n[1]]=s.add,a&&s.add(function(){r=a},t[3-e][2].disable,t[3-e][3].disable,t[0][2].lock,t[0][3].lock),s.add(n[3].fire),i[n[0]]=function(){return i[n[0]+"With"](this===i?void 0:this,arguments),this},i[n[0]+"With"]=s.fireWith}),o.promise(i),e&&e.call(i,i),i},when:function(e){var t=arguments.length,n=t,r=Array(n),o=a.call(arguments),i=E.Deferred(),s=function(e){return function(n){r[e]=this,o[e]=arguments.length>1?a.call(arguments):n,--t||i.resolveWith(r,o)}};if(t<=1&&(Q(e,i.done(s(n)).resolve,i.reject,!t),"pending"===i.state()||v(o[n]&&o[n].then)))return i.then();for(;n--;)Q(o[n],s(n),i.reject);return i.promise()}});var X=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;E.Deferred.exceptionHook=function(e,t){n.console&&n.console.warn&&e&&X.test(e.name)&&n.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},E.readyException=function(e){n.setTimeout(function(){throw e})};var Y=E.Deferred();function J(){b.removeEventListener("DOMContentLoaded",J),n.removeEventListener("load",J),E.ready()}E.fn.ready=function(e){return Y.then(e).catch(function(e){E.readyException(e)}),this},E.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--E.readyWait:E.isReady)||(E.isReady=!0,!0!==e&&--E.readyWait>0||Y.resolveWith(b,[E]))}}),E.ready.then=Y.then,"complete"===b.readyState||"loading"!==b.readyState&&!b.documentElement.doScroll?n.setTimeout(E.ready):(b.addEventListener("DOMContentLoaded",J),n.addEventListener("load",J));var K=function(e,t,n,r,o,i,s){var a=0,u=e.length,l=null==n;if("object"===x(n))for(a in o=!0,n)K(e,t,a,n[a],!0,i,s);else if(void 0!==r&&(o=!0,v(r)||(s=!0),l&&(s?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(E(e),n)})),t))for(;a1,null,!0)},removeData:function(e){return this.each(function(){ae.remove(this,e)})}}),E.extend({queue:function(e,t,n){var r;if(e)return r=se.get(e,t=(t||"fx")+"queue"),n&&(!r||Array.isArray(n)?r=se.access(e,t,E.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){var n=E.queue(e,t=t||"fx"),r=n.length,o=n.shift(),i=E._queueHooks(e,t);"inprogress"===o&&(o=n.shift(),r--),o&&("fx"===t&&n.unshift("inprogress"),delete i.stop,o.call(e,function(){E.dequeue(e,t)},i)),!r&&i&&i.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return se.get(e,n)||se.access(e,n,{empty:E.Callbacks("once memory").add(function(){se.remove(e,[t+"queue",n])})})}}),E.fn.extend({queue:function(e,t){var n=2;return"string"!=typeof e&&(t=e,e="fx",n--),arguments.length\x20\t\r\n\f]*)/i,Se=/^$|^module$|\/(?:java|ecma)script/i;xe=b.createDocumentFragment().appendChild(b.createElement("div")),(Ce=b.createElement("input")).setAttribute("type","radio"),Ce.setAttribute("checked","checked"),Ce.setAttribute("name","t"),xe.appendChild(Ce),g.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="",g.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="",g.option=!!xe.lastChild;var ke={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function Ne(e,t){var n;return n=void 0!==e.getElementsByTagName?e.getElementsByTagName(t||"*"):void 0!==e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&S(e,t)?E.merge([e],n):n}function Ae(e,t){for(var n=0,r=e.length;n",""]);var Ie=/<|&#?\w+;/;function Oe(e,t,n,r,o){for(var i,s,a,u,l,c,d=t.createDocumentFragment(),p=[],f=0,h=e.length;f-1)o&&o.push(i);else if(l=ye(i),s=Ne(d.appendChild(i),"script"),l&&Ae(s),n)for(c=0;i=s[c++];)Se.test(i.type||"")&&n.push(i);return d}var Re=/^([^.]*)(?:\.(.+)|)/;function Me(){return!0}function De(){return!1}function Pe(e,t,n,r,o,i){var s,a;if("object"==typeof t){for(a in"string"!=typeof n&&(r=r||n,n=void 0),t)Pe(e,a,n,r,t[a],i);return e}if(null==r&&null==o?(o=n,r=n=void 0):null==o&&("string"==typeof n?(o=r,r=void 0):(o=r,r=n,n=void 0)),!1===o)o=De;else if(!o)return e;return 1===i&&(s=o,(o=function(e){return E().off(e),s.apply(this,arguments)}).guid=s.guid||(s.guid=E.guid++)),e.each(function(){E.event.add(this,t,o,r,n)})}function je(e,t,n){n?(se.set(e,t,!1),E.event.add(e,t,{namespace:!1,handler:function(e){var n,r=se.get(this,t);if(1&e.isTrigger&&this[t]){if(r)(E.event.special[t]||{}).delegateType&&e.stopPropagation();else if(r=a.call(arguments),se.set(this,t,r),this[t](),n=se.get(this,t),se.set(this,t,!1),r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n}else r&&(se.set(this,t,E.event.trigger(r[0],r.slice(1),this)),e.stopPropagation(),e.isImmediatePropagationStopped=Me)}})):void 0===se.get(e,t)&&E.event.add(e,t,Me)}E.event={global:{},add:function(e,t,n,r,o){var i,s,a,u,l,c,d,p,f,h,y,g=se.get(e);if(oe(e))for(n.handler&&(n=(i=n).handler,o=i.selector),o&&E.find.matchesSelector(he,o),n.guid||(n.guid=E.guid++),(u=g.events)||(u=g.events=Object.create(null)),(s=g.handle)||(s=g.handle=function(t){return void 0!==E&&E.event.triggered!==t.type?E.event.dispatch.apply(e,arguments):void 0}),l=(t=(t||"").match(W)||[""]).length;l--;)f=y=(a=Re.exec(t[l])||[])[1],h=(a[2]||"").split(".").sort(),f&&(d=E.event.special[f]||{},d=E.event.special[f=(o?d.delegateType:d.bindType)||f]||{},c=E.extend({type:f,origType:y,data:r,handler:n,guid:n.guid,selector:o,needsContext:o&&E.expr.match.needsContext.test(o),namespace:h.join(".")},i),(p=u[f])||((p=u[f]=[]).delegateCount=0,d.setup&&!1!==d.setup.call(e,r,h,s)||e.addEventListener&&e.addEventListener(f,s)),d.add&&(d.add.call(e,c),c.handler.guid||(c.handler.guid=n.guid)),o?p.splice(p.delegateCount++,0,c):p.push(c),E.event.global[f]=!0)},remove:function(e,t,n,r,o){var i,s,a,u,l,c,d,p,f,h,y,g=se.hasData(e)&&se.get(e);if(g&&(u=g.events)){for(l=(t=(t||"").match(W)||[""]).length;l--;)if(f=y=(a=Re.exec(t[l])||[])[1],h=(a[2]||"").split(".").sort(),f){for(d=E.event.special[f]||{},p=u[f=(r?d.delegateType:d.bindType)||f]||[],a=a[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),s=i=p.length;i--;)c=p[i],!o&&y!==c.origType||n&&n.guid!==c.guid||a&&!a.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(i,1),c.selector&&p.delegateCount--,d.remove&&d.remove.call(e,c));s&&!p.length&&(d.teardown&&!1!==d.teardown.call(e,h,g.handle)||E.removeEvent(e,f,g.handle),delete u[f])}else for(f in u)E.event.remove(e,f+t[l],n,r,!0);E.isEmptyObject(u)&&se.remove(e,"handle events")}},dispatch:function(e){var t,n,r,o,i,s,a=new Array(arguments.length),u=E.event.fix(e),l=(se.get(this,"events")||Object.create(null))[u.type]||[],c=E.event.special[u.type]||{};for(a[0]=u,t=1;t=1))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(i=[],s={},n=0;n-1:E.find(o,this,null,[l]).length),s[o]&&i.push(r);i.length&&a.push({elem:l,handlers:i})}return l=this,u\s*$/g;function He(e,t){return S(e,"table")&&S(11!==t.nodeType?t:t.firstChild,"tr")&&E(e).children("tbody")[0]||e}function Be(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Ue(e,t){var n,r,o,i,s,a;if(1===t.nodeType){if(se.hasData(e)&&(a=se.get(e).events))for(o in se.remove(t,"handle events"),a)for(n=0,r=a[o].length;n1&&"string"==typeof h&&!g.checkClone&&Ve.test(h))return e.each(function(o){var i=e.eq(o);y&&(t[0]=h.call(this,o,i.html())),ze(i,t,n,r)});if(p&&(i=(o=Oe(t,e[0].ownerDocument,!1,e,r)).firstChild,1===o.childNodes.length&&(o=i),i||r)){for(a=(s=E.map(Ne(o,"script"),Be)).length;d0&&Ae(s,!d&&Ne(e,"script")),c},cleanData:function(e){for(var t,n,r,o=E.event.special,i=0;void 0!==(n=e[i]);i++)if(oe(n)){if(t=n[se.expando]){if(t.events)for(r in t.events)o[r]?E.event.remove(n,r):E.removeEvent(n,r,t.handle);n[se.expando]=void 0}n[ae.expando]&&(n[ae.expando]=void 0)}}}),E.fn.extend({detach:function(e){return $e(this,e,!0)},remove:function(e){return $e(this,e)},text:function(e){return K(this,function(e){return void 0===e?E.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return ze(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||He(this,e).appendChild(e)})},prepend:function(){return ze(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=He(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return ze(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return ze(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(E.cleanData(Ne(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return E.clone(this,e,t)})},html:function(e){return K(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Le.test(e)&&!ke[(Te.exec(e)||["",""])[1].toLowerCase()]){e=E.htmlPrefilter(e);try{for(;n=0&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-i-u-a-.5))||0),u+l}function ut(e,t,n){var r=Ge(e),o=(!g.boxSizingReliable()||n)&&"border-box"===E.css(e,"boxSizing",!1,r),i=o,s=Ye(e,t,r),a="offset"+t[0].toUpperCase()+t.slice(1);if(We.test(s)){if(!n)return s;s="auto"}return(!g.boxSizingReliable()&&o||!g.reliableTrDimensions()&&S(e,"tr")||"auto"===s||!parseFloat(s)&&"inline"===E.css(e,"display",!1,r))&&e.getClientRects().length&&(o="border-box"===E.css(e,"boxSizing",!1,r),(i=a in e)&&(s=e[a])),(s=parseFloat(s)||0)+at(e,t,n||(o?"border":"content"),i,r,s)+"px"}function lt(e,t,n,r,o){return new lt.prototype.init(e,t,n,r,o)}E.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Ye(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,aspectRatio:!0,borderImageSlice:!0,columnCount:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,scale:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeMiterlimit:!0,strokeOpacity:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var o,i,s,a=re(t),u=Ze.test(t),l=e.style;if(u||(t=nt(a)),s=E.cssHooks[t]||E.cssHooks[a],void 0===n)return s&&"get"in s&&void 0!==(o=s.get(e,!1,r))?o:l[t];"string"==(i=typeof n)&&(o=pe.exec(n))&&o[1]&&(n=me(e,t,o),i="number"),null!=n&&n==n&&("number"!==i||u||(n+=o&&o[3]||(E.cssNumber[a]?"":"px")),g.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),s&&"set"in s&&void 0===(n=s.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var o,i,s,a=re(t);return Ze.test(t)||(t=nt(a)),(s=E.cssHooks[t]||E.cssHooks[a])&&"get"in s&&(o=s.get(e,!0,n)),void 0===o&&(o=Ye(e,t,r)),"normal"===o&&t in it&&(o=it[t]),""===n||n?(i=parseFloat(o),!0===n||isFinite(i)?i||0:o):o}}),E.each(["height","width"],function(e,t){E.cssHooks[t]={get:function(e,n,r){if(n)return!rt.test(E.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?ut(e,t,r):Qe(e,ot,function(){return ut(e,t,r)})},set:function(e,n,r){var o,i=Ge(e),s=!g.scrollboxSize()&&"absolute"===i.position,a=(s||r)&&"border-box"===E.css(e,"boxSizing",!1,i),u=r?at(e,t,r,a,i):0;return a&&s&&(u-=Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-parseFloat(i[t])-at(e,t,"border",!1,i)-.5)),u&&(o=pe.exec(n))&&"px"!==(o[3]||"px")&&(e.style[t]=n,n=E.css(e,t)),st(0,n,u)}}}),E.cssHooks.marginLeft=Je(g.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Ye(e,"marginLeft"))||e.getBoundingClientRect().left-Qe(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),E.each({margin:"",padding:"",border:"Width"},function(e,t){E.cssHooks[e+t]={expand:function(n){for(var r=0,o={},i="string"==typeof n?n.split(" "):[n];r<4;r++)o[e+fe[r]+t]=i[r]||i[r-2]||i[0];return o}},"margin"!==e&&(E.cssHooks[e+t].set=st)}),E.fn.extend({css:function(e,t){return K(this,function(e,t,n){var r,o,i={},s=0;if(Array.isArray(t)){for(r=Ge(e),o=t.length;s1)}}),E.Tween=lt,(lt.prototype={constructor:lt,init:function(e,t,n,r,o,i){this.elem=e,this.prop=n,this.easing=o||E.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=i||(E.cssNumber[n]?"":"px")},cur:function(){var e=lt.propHooks[this.prop];return e&&e.get?e.get(this):lt.propHooks._default.get(this)},run:function(e){var t,n=lt.propHooks[this.prop];return this.pos=t=this.options.duration?E.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):lt.propHooks._default.set(this),this}}).init.prototype=lt.prototype,(lt.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=E.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){E.fx.step[e.prop]?E.fx.step[e.prop](e):1!==e.elem.nodeType||!E.cssHooks[e.prop]&&null==e.elem.style[nt(e.prop)]?e.elem[e.prop]=e.now:E.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=lt.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},E.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},E.fx=lt.prototype.init,E.fx.step={};var ct,dt,pt=/^(?:toggle|show|hide)$/,ft=/queueHooks$/;function ht(){dt&&(!1===b.hidden&&n.requestAnimationFrame?n.requestAnimationFrame(ht):n.setTimeout(ht,E.fx.interval),E.fx.tick())}function yt(){return n.setTimeout(function(){ct=void 0}),ct=Date.now()}function gt(e,t){var n,r=0,o={height:e};for(t=t?1:0;r<4;r+=2-t)o["margin"+(n=fe[r])]=o["padding"+n]=e;return t&&(o.opacity=o.width=e),o}function vt(e,t,n){for(var r,o=(mt.tweeners[t]||[]).concat(mt.tweeners["*"]),i=0,s=o.length;i1)},removeAttr:function(e){return this.each(function(){E.removeAttr(this,e)})}}),E.extend({attr:function(e,t,n){var r,o,i=e.nodeType;if(3!==i&&8!==i&&2!==i)return void 0===e.getAttribute?E.prop(e,t,n):(1===i&&E.isXMLDoc(e)||(o=E.attrHooks[t.toLowerCase()]||(E.expr.match.bool.test(t)?bt:void 0)),void 0!==n?null===n?void E.removeAttr(e,t):o&&"set"in o&&void 0!==(r=o.set(e,n,t))?r:(e.setAttribute(t,n+""),n):o&&"get"in o&&null!==(r=o.get(e,t))?r:null==(r=E.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!g.radioValue&&"radio"===t&&S(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,o=t&&t.match(W);if(o&&1===e.nodeType)for(;n=o[r++];)e.removeAttribute(n)}}),bt={set:function(e,t,n){return!1===t?E.removeAttr(e,n):e.setAttribute(n,n),n}},E.each(E.expr.match.bool.source.match(/\w+/g),function(e,t){var n=wt[t]||E.find.attr;wt[t]=function(e,t,r){var o,i,s=t.toLowerCase();return r||(i=wt[s],wt[s]=o,o=null!=n(e,t,r)?s:null,wt[s]=i),o}});var _t=/^(?:input|select|textarea|button)$/i,xt=/^(?:a|area)$/i;function Ct(e){return(e.match(W)||[]).join(" ")}function Et(e){return e.getAttribute&&e.getAttribute("class")||""}function Tt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(W)||[]}E.fn.extend({prop:function(e,t){return K(this,E.prop,e,t,arguments.length>1)},removeProp:function(e){return this.each(function(){delete this[E.propFix[e]||e]})}}),E.extend({prop:function(e,t,n){var r,o,i=e.nodeType;if(3!==i&&8!==i&&2!==i)return 1===i&&E.isXMLDoc(e)||(o=E.propHooks[t=E.propFix[t]||t]),void 0!==n?o&&"set"in o&&void 0!==(r=o.set(e,n,t))?r:e[t]=n:o&&"get"in o&&null!==(r=o.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=E.find.attr(e,"tabindex");return t?parseInt(t,10):_t.test(e.nodeName)||xt.test(e.nodeName)&&e.href?0:-1}}},propFix:{for:"htmlFor",class:"className"}}),g.optSelected||(E.propHooks.selected={get:function(e){return null},set:function(e){}}),E.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){E.propFix[this.toLowerCase()]=this}),E.fn.extend({addClass:function(e){var t,n,r,o,i,s;return v(e)?this.each(function(t){E(this).addClass(e.call(this,t,Et(this)))}):(t=Tt(e)).length?this.each(function(){if(r=Et(this),n=1===this.nodeType&&" "+Ct(r)+" "){for(i=0;i-1;)n=n.replace(" "+o+" "," ");s=Ct(n),r!==s&&this.setAttribute("class",s)}}):this:this.attr("class","")},toggleClass:function(e,t){var n,r,o,i,s=typeof e,a="string"===s||Array.isArray(e);return v(e)?this.each(function(n){E(this).toggleClass(e.call(this,n,Et(this),t),t)}):"boolean"==typeof t&&a?t?this.addClass(e):this.removeClass(e):(n=Tt(e),this.each(function(){if(a)for(i=E(this),o=0;o-1)return!0;return!1}});var St=/\r/g;E.fn.extend({val:function(e){var t,n,r,o=this[0];return arguments.length?(r=v(e),this.each(function(n){var o;1===this.nodeType&&(null==(o=r?e.call(this,n,E(this).val()):e)?o="":"number"==typeof o?o+="":Array.isArray(o)&&(o=E.map(o,function(e){return null==e?"":e+""})),(t=E.valHooks[this.type]||E.valHooks[this.nodeName.toLowerCase()])&&"set"in t&&void 0!==t.set(this,o,"value")||(this.value=o))})):o?(t=E.valHooks[o.type]||E.valHooks[o.nodeName.toLowerCase()])&&"get"in t&&void 0!==(n=t.get(o,"value"))?n:"string"==typeof(n=o.value)?n.replace(St,""):null==n?"":n:void 0}}),E.extend({valHooks:{option:{get:function(e){var t=E.find.attr(e,"value");return null!=t?t:Ct(E.text(e))}},select:{get:function(e){var t,n,r,o=e.options,i=e.selectedIndex,s="select-one"===e.type,a=s?null:[],u=s?i+1:o.length;for(r=i<0?u:s?i:0;r-1)&&(n=!0);return n||(e.selectedIndex=-1),i}}}}),E.each(["radio","checkbox"],function(){E.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=E.inArray(E(e).val(),t)>-1}},g.checkOn||(E.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})});var kt=n.location,Nt={guid:Date.now()},At=/\?/;E.parseXML=function(e){var t,r;if(!e||"string"!=typeof e)return null;try{t=(new n.DOMParser).parseFromString(e,"text/xml")}catch(e){}return r=t&&t.getElementsByTagName("parsererror")[0],t&&!r||E.error("Invalid XML: "+(r?E.map(r.childNodes,function(e){return e.textContent}).join("\n"):e)),t};var It=/^(?:focusinfocus|focusoutblur)$/,Ot=function(e){e.stopPropagation()};E.extend(E.event,{trigger:function(e,t,r,o){var i,s,a,u,l,c,d,p,h=[r||b],y=f.call(e,"type")?e.type:e,g=f.call(e,"namespace")?e.namespace.split("."):[];if(s=p=a=r=r||b,3!==r.nodeType&&8!==r.nodeType&&!It.test(y+E.event.triggered)&&(y.indexOf(".")>-1&&(y=(g=y.split(".")).shift(),g.sort()),l=y.indexOf(":")<0&&"on"+y,(e=e[E.expando]?e:new E.Event(y,"object"==typeof e&&e)).isTrigger=o?2:3,e.namespace=g.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+g.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=r),t=null==t?[e]:E.makeArray(t,[e]),d=E.event.special[y]||{},o||!d.trigger||!1!==d.trigger.apply(r,t))){if(!o&&!d.noBubble&&!m(r)){for(It.test((u=d.delegateType||y)+y)||(s=s.parentNode);s;s=s.parentNode)h.push(s),a=s;a===(r.ownerDocument||b)&&h.push(a.defaultView||a.parentWindow||n)}for(i=0;(s=h[i++])&&!e.isPropagationStopped();)p=s,e.type=i>1?u:d.bindType||y,(c=(se.get(s,"events")||Object.create(null))[e.type]&&se.get(s,"handle"))&&c.apply(s,t),(c=l&&s[l])&&c.apply&&oe(s)&&(e.result=c.apply(s,t),!1===e.result&&e.preventDefault());return e.type=y,o||e.isDefaultPrevented()||d._default&&!1!==d._default.apply(h.pop(),t)||!oe(r)||l&&v(r[y])&&!m(r)&&((a=r[l])&&(r[l]=null),E.event.triggered=y,e.isPropagationStopped()&&p.addEventListener(y,Ot),r[y](),e.isPropagationStopped()&&p.removeEventListener(y,Ot),E.event.triggered=void 0,a&&(r[l]=a)),e.result}},simulate:function(e,t,n){var r=E.extend(new E.Event,n,{type:e,isSimulated:!0});E.event.trigger(r,null,t)}}),E.fn.extend({trigger:function(e,t){return this.each(function(){E.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return E.event.trigger(e,t,n,!0)}});var Rt=/\[\]$/,Mt=/\r?\n/g,Dt=/^(?:submit|button|image|reset|file)$/i,Pt=/^(?:input|select|textarea|keygen)/i;function jt(e,t,n,r){var o;if(Array.isArray(t))E.each(t,function(t,o){n||Rt.test(e)?r(e,o):jt(e+"["+("object"==typeof o&&null!=o?t:"")+"]",o,n,r)});else if(n||"object"!==x(t))r(e,t);else for(o in t)jt(e+"["+o+"]",t[o],n,r)}E.param=function(e,t){var n,r=[],o=function(e,t){var n=v(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!E.isPlainObject(e))E.each(e,function(){o(this.name,this.value)});else for(n in e)jt(n,e[n],t,o);return r.join("&")},E.fn.extend({serialize:function(){return E.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=E.prop(this,"elements");return e?E.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!E(this).is(":disabled")&&Pt.test(this.nodeName)&&!Dt.test(e)&&(this.checked||!Ee.test(e))}).map(function(e,t){var n=E(this).val();return null==n?null:Array.isArray(n)?E.map(n,function(e){return{name:t.name,value:e.replace(Mt,"\r\n")}}):{name:t.name,value:n.replace(Mt,"\r\n")}}).get()}});var Lt=/%20/g,Vt=/#.*$/,Ft=/([?&])_=[^&]*/,Ht=/^(.*?):[ \t]*([^\r\n]*)$/gm,Bt=/^(?:GET|HEAD)$/,qt=/^\/\//,Ut={},zt={},$t="*/".concat("*"),Wt=b.createElement("a");function Zt(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var r,o=0,i=t.toLowerCase().match(W)||[];if(v(n))for(;r=i[o++];)"+"===r[0]?(r=r.slice(1)||"*",(e[r]=e[r]||[]).unshift(n)):(e[r]=e[r]||[]).push(n)}}function Gt(e,t,n,r){var o={},i=e===zt;function s(a){var u;return o[a]=!0,E.each(e[a]||[],function(e,a){var l=a(t,n,r);return"string"!=typeof l||i||o[l]?i?!(u=l):void 0:(t.dataTypes.unshift(l),s(l),!1)}),u}return s(t.dataTypes[0])||!o["*"]&&s("*")}function Qt(e,t){var n,r,o=E.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((o[n]?e:r||(r={}))[n]=t[n]);return r&&E.extend(!0,e,r),e}Wt.href=kt.href,E.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:kt.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(kt.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":$t,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":E.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Qt(Qt(e,E.ajaxSettings),t):Qt(E.ajaxSettings,e)},ajaxPrefilter:Zt(Ut),ajaxTransport:Zt(zt),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0);var r,o,i,s,a,u,l,c,d,p,f=E.ajaxSetup({},t=t||{}),h=f.context||f,y=f.context&&(h.nodeType||h.jquery)?E(h):E.event,g=E.Deferred(),v=E.Callbacks("once memory"),m=f.statusCode||{},w={},_={},x="canceled",C={readyState:0,getResponseHeader:function(e){var t;if(l){if(!s)for(s={};t=Ht.exec(i);)s[t[1].toLowerCase()+" "]=(s[t[1].toLowerCase()+" "]||[]).concat(t[2]);t=s[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return l?i:null},setRequestHeader:function(e,t){return null==l&&(e=_[e.toLowerCase()]=_[e.toLowerCase()]||e,w[e]=t),this},overrideMimeType:function(e){return null==l&&(f.mimeType=e),this},statusCode:function(e){var t;if(e)if(l)C.always(e[C.status]);else for(t in e)m[t]=[m[t],e[t]];return this},abort:function(e){var t=e||x;return r&&r.abort(t),T(0,t),this}};if(g.promise(C),f.url=((e||f.url||kt.href)+"").replace(qt,kt.protocol+"//"),f.type=t.method||t.type||f.method||f.type,f.dataTypes=(f.dataType||"*").toLowerCase().match(W)||[""],null==f.crossDomain){u=b.createElement("a");try{u.href=f.url,u.href=u.href,f.crossDomain=Wt.protocol+"//"+Wt.host!=u.protocol+"//"+u.host}catch(e){f.crossDomain=!0}}if(f.data&&f.processData&&"string"!=typeof f.data&&(f.data=E.param(f.data,f.traditional)),Gt(Ut,f,t,C),l)return C;for(d in(c=E.event&&f.global)&&0==E.active++&&E.event.trigger("ajaxStart"),f.type=f.type.toUpperCase(),f.hasContent=!Bt.test(f.type),o=f.url.replace(Vt,""),f.hasContent?f.data&&f.processData&&0===(f.contentType||"").indexOf("application/x-www-form-urlencoded")&&(f.data=f.data.replace(Lt,"+")):(p=f.url.slice(o.length),f.data&&(f.processData||"string"==typeof f.data)&&(o+=(At.test(o)?"&":"?")+f.data,delete f.data),!1===f.cache&&(o=o.replace(Ft,"$1"),p=(At.test(o)?"&":"?")+"_="+Nt.guid+++p),f.url=o+p),f.ifModified&&(E.lastModified[o]&&C.setRequestHeader("If-Modified-Since",E.lastModified[o]),E.etag[o]&&C.setRequestHeader("If-None-Match",E.etag[o])),(f.data&&f.hasContent&&!1!==f.contentType||t.contentType)&&C.setRequestHeader("Content-Type",f.contentType),C.setRequestHeader("Accept",f.dataTypes[0]&&f.accepts[f.dataTypes[0]]?f.accepts[f.dataTypes[0]]+("*"!==f.dataTypes[0]?", "+$t+"; q=0.01":""):f.accepts["*"]),f.headers)C.setRequestHeader(d,f.headers[d]);if(f.beforeSend&&(!1===f.beforeSend.call(h,C,f)||l))return C.abort();if(x="abort",v.add(f.complete),C.done(f.success),C.fail(f.error),r=Gt(zt,f,t,C)){if(C.readyState=1,c&&y.trigger("ajaxSend",[C,f]),l)return C;f.async&&f.timeout>0&&(a=n.setTimeout(function(){C.abort("timeout")},f.timeout));try{l=!1,r.send(w,T)}catch(e){if(l)throw e;T(-1,e)}}else T(-1,"No Transport");function T(e,t,s,u){var d,p,b,w,_,x=t;l||(l=!0,a&&n.clearTimeout(a),r=void 0,i=u||"",C.readyState=e>0?4:0,d=e>=200&&e<300||304===e,s&&(w=function(e,t,n){for(var r,o,i,s,a=e.contents,u=e.dataTypes;"*"===u[0];)u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(o in a)if(a[o]&&a[o].test(r)){u.unshift(o);break}if(u[0]in n)i=u[0];else{for(o in n){if(!u[0]||e.converters[o+" "+u[0]]){i=o;break}s||(s=o)}i=i||s}if(i)return i!==u[0]&&u.unshift(i),n[i]}(f,C,s)),!d&&E.inArray("script",f.dataTypes)>-1&&E.inArray("json",f.dataTypes)<0&&(f.converters["text script"]=function(){}),w=function(e,t,n,r){var o,i,s,a,u,l={},c=e.dataTypes.slice();if(c[1])for(s in e.converters)l[s.toLowerCase()]=e.converters[s];for(i=c.shift();i;)if(e.responseFields[i]&&(n[e.responseFields[i]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=i,i=c.shift())if("*"===i)i=u;else if("*"!==u&&u!==i){if(!(s=l[u+" "+i]||l["* "+i]))for(o in l)if((a=o.split(" "))[1]===i&&(s=l[u+" "+a[0]]||l["* "+a[0]])){!0===s?s=l[o]:!0!==l[o]&&(i=a[0],c.unshift(a[1]));break}if(!0!==s)if(s&&e.throws)t=s(t);else try{t=s(t)}catch(e){return{state:"parsererror",error:s?e:"No conversion from "+u+" to "+i}}}return{state:"success",data:t}}(f,w,C,d),d?(f.ifModified&&((_=C.getResponseHeader("Last-Modified"))&&(E.lastModified[o]=_),(_=C.getResponseHeader("etag"))&&(E.etag[o]=_)),204===e||"HEAD"===f.type?x="nocontent":304===e?x="notmodified":(x=w.state,p=w.data,d=!(b=w.error))):(b=x,!e&&x||(x="error",e<0&&(e=0))),C.status=e,C.statusText=(t||x)+"",d?g.resolveWith(h,[p,x,C]):g.rejectWith(h,[C,x,b]),C.statusCode(m),m=void 0,c&&y.trigger(d?"ajaxSuccess":"ajaxError",[C,f,d?p:b]),v.fireWith(h,[C,x]),c&&(y.trigger("ajaxComplete",[C,f]),--E.active||E.event.trigger("ajaxStop")))}return C},getJSON:function(e,t,n){return E.get(e,t,n,"json")},getScript:function(e,t){return E.get(e,void 0,t,"script")}}),E.each(["get","post"],function(e,t){E[t]=function(e,n,r,o){return v(n)&&(o=o||r,r=n,n=void 0),E.ajax(E.extend({url:e,type:t,dataType:o,data:n,success:r},E.isPlainObject(e)&&e))}}),E.ajaxPrefilter(function(e){var t;for(t in e.headers)"content-type"===t.toLowerCase()&&(e.contentType=e.headers[t]||"")}),E._evalUrl=function(e,t,n){return E.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){E.globalEval(e,t,n)}})},E.fn.extend({wrapAll:function(e){var t;return this[0]&&(v(e)&&(e=e.call(this[0])),t=E(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){for(var e=this;e.firstElementChild;)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(e){return v(e)?this.each(function(t){E(this).wrapInner(e.call(this,t))}):this.each(function(){var t=E(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v(e);return this.each(function(n){E(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(e){return this.parent(e).not("body").each(function(){E(this).replaceWith(this.childNodes)}),this}}),E.expr.pseudos.hidden=function(e){return!E.expr.pseudos.visible(e)},E.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},E.ajaxSettings.xhr=function(){try{return new n.XMLHttpRequest}catch(e){}};var Xt={0:200,1223:204},Yt=E.ajaxSettings.xhr();g.cors=!!Yt&&"withCredentials"in Yt,g.ajax=Yt=!!Yt,E.ajaxTransport(function(e){var t,r;if(g.cors||Yt&&!e.crossDomain)return{send:function(o,i){var s,a=e.xhr();if(a.open(e.type,e.url,e.async,e.username,e.password),e.xhrFields)for(s in e.xhrFields)a[s]=e.xhrFields[s];for(s in e.mimeType&&a.overrideMimeType&&a.overrideMimeType(e.mimeType),e.crossDomain||o["X-Requested-With"]||(o["X-Requested-With"]="XMLHttpRequest"),o)a.setRequestHeader(s,o[s]);t=function(e){return function(){t&&(t=r=a.onload=a.onerror=a.onabort=a.ontimeout=a.onreadystatechange=null,"abort"===e?a.abort():"error"===e?"number"!=typeof a.status?i(0,"error"):i(a.status,a.statusText):i(Xt[a.status]||a.status,a.statusText,"text"!==(a.responseType||"text")||"string"!=typeof a.responseText?{binary:a.response}:{text:a.responseText},a.getAllResponseHeaders()))}},a.onload=t(),r=a.onerror=a.ontimeout=t("error"),void 0!==a.onabort?a.onabort=r:a.onreadystatechange=function(){4===a.readyState&&n.setTimeout(function(){t&&r()})},t=t("abort");try{a.send(e.hasContent&&e.data||null)}catch(e){if(t)throw e}},abort:function(){t&&t()}}}),E.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),E.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return E.globalEval(e),e}}}),E.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),E.ajaxTransport("script",function(e){var t,n;if(e.crossDomain||e.scriptAttrs)return{send:function(r,o){t=E("