|
1 | | -from unittest.mock import MagicMock, patch |
| 1 | +from unittest.mock import MagicMock, patch, ANY |
2 | 2 |
|
3 | 3 | import pytest |
4 | 4 | import torch |
@@ -37,12 +37,17 @@ def test_device_for_torch(): |
37 | 37 | assert dev.type == "cuda" |
38 | 38 | assert dev.index == 0 |
39 | 39 |
|
| 40 | + # Test invalid device fallback |
| 41 | + with patch("torch.cuda.is_available", return_value=False): |
| 42 | + # Even if we ask for cuda, if not available it might raise or fallback depending on implementation |
| 43 | + # _device_for_torch calls _parse_device_string which checks availability. |
| 44 | + # If _parse_device_string returns cpu, _device_for_torch returns cpu device. |
| 45 | + dev = _device_for_torch("cuda") |
| 46 | + assert dev.type == "cpu" |
| 47 | + |
40 | 48 | def test_load_model_fairchem_logic(mock_hf_token): |
41 | 49 | config = Config({"optimization": {"model_name": "test_model"}}) |
42 | 50 |
|
43 | | - # We need to patch fairchem.core inside the function or pre-import it |
44 | | - # Since it is a local import, patching the module where it lives (fairchem.core) works |
45 | | - # if we can import it first. |
46 | 51 | try: |
47 | 52 | import fairchem.core as _ # noqa: F401 |
48 | 53 | except ImportError: |
@@ -75,3 +80,93 @@ def test_load_model_torchsim_logic(mock_hf_token): |
75 | 80 |
|
76 | 81 | mock_model_cls.assert_called() |
77 | 82 | assert model is mock_model_cls.return_value |
| 83 | + |
| 84 | +def test_load_model_fairchem_path(mock_hf_token, tmp_path): |
| 85 | + model_file = tmp_path / "model.pt" |
| 86 | + model_file.touch() |
| 87 | + |
| 88 | + config = Config({"optimization": {"model_path": str(model_file)}}) |
| 89 | + |
| 90 | + try: |
| 91 | + import fairchem.core as _ |
| 92 | + except ImportError: |
| 93 | + pytest.skip("fairchem.core not installed") |
| 94 | + |
| 95 | + with patch("fairchem.core.pretrained_mlip.load_predict_unit") as mock_load_unit, \ |
| 96 | + patch("fairchem.core.FAIRChemCalculator") as mock_calc_cls: |
| 97 | + |
| 98 | + mock_load_unit.return_value = MagicMock() |
| 99 | + mock_calc_cls.return_value = MagicMock() |
| 100 | + |
| 101 | + calc = load_model_fairchem(config) |
| 102 | + |
| 103 | + mock_load_unit.assert_called_with(path=model_file, device=ANY) |
| 104 | + mock_calc_cls.assert_called() |
| 105 | + |
| 106 | +def test_load_model_torchsim_path(mock_hf_token, tmp_path): |
| 107 | + model_file = tmp_path / "model.pt" |
| 108 | + model_file.touch() |
| 109 | + |
| 110 | + config = Config({"optimization": {"model_path": str(model_file)}}) |
| 111 | + |
| 112 | + try: |
| 113 | + import torch_sim.models.fairchem as _ |
| 114 | + except ImportError: |
| 115 | + pytest.skip("torch_sim not installed") |
| 116 | + |
| 117 | + with patch("torch_sim.models.fairchem.FairChemModel") as mock_model_cls: |
| 118 | + mock_model_cls.return_value = MagicMock() |
| 119 | + |
| 120 | + model = load_model_torchsim(config) |
| 121 | + |
| 122 | + call_kwargs = mock_model_cls.call_args.kwargs |
| 123 | + assert call_kwargs["model"] == model_file |
| 124 | + |
| 125 | +def test_model_cache_creation_failure(mock_hf_token, caplog): |
| 126 | + # Test fails to create cache dir |
| 127 | + config = Config({"optimization": {"model_cache_dir": "/invalid/path/cache", "model_name": "uma"}}) |
| 128 | + |
| 129 | + try: |
| 130 | + import fairchem.core as _ |
| 131 | + except ImportError: |
| 132 | + pytest.skip("fairchem.core not installed") |
| 133 | + |
| 134 | + with patch("os.makedirs", side_effect=OSError("Permission denied")), \ |
| 135 | + patch("fairchem.core.pretrained_mlip.get_predict_unit") as mock_get_unit, \ |
| 136 | + patch("fairchem.core.FAIRChemCalculator"): |
| 137 | + |
| 138 | + load_model_fairchem(config) |
| 139 | + |
| 140 | + # Verify get_predict_unit called without cache_dir |
| 141 | + call_kwargs = mock_get_unit.call_args.kwargs |
| 142 | + assert "cache_dir" not in call_kwargs |
| 143 | + |
| 144 | + # Check that warning was logged |
| 145 | + assert "Could not create model cache directory" in caplog.text |
| 146 | + |
| 147 | +def test_model_path_not_exists(mock_hf_token): |
| 148 | + # Path doesn't exist, should fall back to name |
| 149 | + config = Config({"optimization": {"model_path": "/non/existent/path", "model_name": "fallback"}}) |
| 150 | + |
| 151 | + try: |
| 152 | + import fairchem.core as _ |
| 153 | + except ImportError: |
| 154 | + pytest.skip("fairchem.core not installed") |
| 155 | + |
| 156 | + with patch("fairchem.core.pretrained_mlip.get_predict_unit") as mock_get_unit, \ |
| 157 | + patch("fairchem.core.FAIRChemCalculator"): |
| 158 | + |
| 159 | + load_model_fairchem(config) |
| 160 | + |
| 161 | + # Should call get_predict_unit (name based) |
| 162 | + mock_get_unit.assert_called() |
| 163 | + args, _ = mock_get_unit.call_args |
| 164 | + assert args[0] == "fallback" |
| 165 | + |
| 166 | +def test_missing_model_name(mock_hf_token): |
| 167 | + # Config without model_name (and no valid path) should raise ValueError |
| 168 | + # Config default has model_name, so we must explicitly set it to empty |
| 169 | + config = Config({"optimization": {"model_name": ""}}) |
| 170 | + |
| 171 | + with pytest.raises(ValueError, match="Model name must be specified"): |
| 172 | + load_model_fairchem(config) |
0 commit comments