Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/fabric_cli/commands/fs/export/fab_fs_export_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from fabric_cli.core.fab_types import ItemType, definition_format_mapping
from fabric_cli.core.hiearchy.fab_folder import Folder
from fabric_cli.core.hiearchy.fab_hiearchy import Item, Workspace
from fabric_cli.errors import ErrorMessages
from fabric_cli.errors.common import CommonErrors
from fabric_cli.utils import fab_cmd_export_utils as utils_export
from fabric_cli.utils import fab_item_util, fab_mem_store, fab_storage, fab_ui

Expand Down Expand Up @@ -117,7 +117,7 @@ def export_single_item(
# Non-empty format param but not supported
available_formats = [k for k in valid_export_formats.keys() if k != "default"]
raise FabricCLIError(
ErrorMessages.Export.invalid_export_format(available_formats),
CommonErrors.invalid_format(available_formats),
fab_constant.ERROR_INVALID_INPUT,
)
else:
Expand Down
23 changes: 17 additions & 6 deletions src/fabric_cli/commands/fs/impor/fab_fs_import_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
from fabric_cli.client.fab_api_types import ApiResponse
from fabric_cli.core import fab_constant, fab_logger
from fabric_cli.core.fab_exceptions import FabricCLIError
from fabric_cli.core.fab_types import ItemType
from fabric_cli.core.fab_types import ItemType, definition_format_mapping
from fabric_cli.core.hiearchy.fab_hiearchy import Item
from fabric_cli.errors.common import CommonErrors
from fabric_cli.utils import fab_cmd_import_utils as utils_import
from fabric_cli.utils import fab_mem_store as utils_mem_store
from fabric_cli.utils import fab_storage as utils_storage
Expand All @@ -20,11 +21,21 @@ def import_single_item(item: Item, args: Namespace) -> None:
_input_format = None
if args.format:
_input_format = args.format
if _input_format not in (".py", ".ipynb"):
raise FabricCLIError(
"Invalid format. Only '.py' and '.ipynb' are supported.",
fab_constant.ERROR_INVALID_INPUT,
)
# Validate format against item type's supported formats
valid_import_formats = definition_format_mapping.get(item.item_type, {})
if _input_format not in valid_import_formats:
# Format not in definition_format_mapping
available_formats = [k for k in valid_import_formats.keys() if k != "default"]
if available_formats:
raise FabricCLIError(
CommonErrors.invalid_format(available_formats),
fab_constant.ERROR_INVALID_INPUT,
)
else:
raise FabricCLIError(
f"Format not supported for {item.item_type}. No import formats are available.",
fab_constant.ERROR_INVALID_INPUT,
)

args.ws_id = item.workspace.id
input_path = utils_storage.get_import_path(args.input)
Expand Down
13 changes: 12 additions & 1 deletion src/fabric_cli/core/fab_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,10 +560,21 @@ class MirroredDatabaseFolders(Enum):
# Item Payload definition

definition_format_mapping = {
ItemType.SPARK_JOB_DEFINITION: {"default": "?format=SparkJobDefinitionV1"},
ItemType.SPARK_JOB_DEFINITION: {
"default": "?format=SparkJobDefinitionV1",
"SparkJobDefinitionV1": "?format=SparkJobDefinitionV1",
"SparkJobDefinitionV2": "?format=SparkJobDefinitionV2",
},
ItemType.NOTEBOOK: {
"default": "?format=ipynb",
".py": "?format=fabricGitSource",
".ipynb": "?format=ipynb",
"ipynb": "?format=ipynb",
"fabricGitSource": "?format=fabricGitSource",
},
ItemType.SEMANTIC_MODEL: {
"default": "?format=TMDL",
"TMDL": "?format=TMDL",
"TMSL": "?format=TMSL",
},
}
39 changes: 30 additions & 9 deletions src/fabric_cli/core/hiearchy/fab_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,33 +78,54 @@ def get_payload(self, definition, input_format=None) -> dict:
match self.item_type:

case ItemType.SPARK_JOB_DEFINITION:
# Determine format: use input_format if provided, otherwise default to V1
format_value = input_format if input_format else "SparkJobDefinitionV1"
return {
"type": str(self.item_type),
"description": "Imported from fab",
"folderId": self.folder_id,
"displayName": self.short_name,
"definition": {
"format": "SparkJobDefinitionV1",
"format": format_value,
"parts": definition["parts"],
},
}
case ItemType.NOTEBOOK:
# Determine notebook format
if input_format == ".py":
# .py format: no format field in definition
notebook_definition = {"parts": definition["parts"]}
else:
# Determine format value for non-.py formats
if input_format and input_format != ".ipynb":
format_value = input_format
else:
format_value = "ipynb"
notebook_definition = {"format": format_value, "parts": definition["parts"]}

return {
"type": str(self.item_type),
"description": "Imported from fab",
"folderId": self.folder_id,
"displayName": self.short_name,
"definition": {
**(
{"parts": definition["parts"]}
if input_format == ".py"
else {"format": "ipynb", "parts": definition["parts"]}
)
},
"definition": notebook_definition,
}
case ItemType.SEMANTIC_MODEL:
# Determine format: use input_format if provided, otherwise default to TMDL
format_value = input_format if input_format else "TMDL"
payload = {
"type": str(self.item_type),
"description": "Imported from fab",
"folderId": self.folder_id,
"displayName": self.short_name,
"definition": definition,
}
# Add format to definition if it has parts
if "parts" in definition:
payload["definition"]["format"] = format_value
return payload
case (
ItemType.REPORT
| ItemType.SEMANTIC_MODEL
| ItemType.KQL_DASHBOARD
| ItemType.DATA_PIPELINE
| ItemType.KQL_QUERYSET
Expand Down
5 changes: 5 additions & 0 deletions src/fabric_cli/errors/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,3 +244,8 @@ def gateway_property_not_supported_for_type(
property_name: str, gateway_type: str
) -> str:
return f"Setting '{property_name}' is not supported for Gateway type '{gateway_type}'"

@staticmethod
def invalid_format(valid_formats: list[str]) -> str:
message = "Only the following formats are supported: " + ", ".join(valid_formats) if len(valid_formats) else "No formats are supported"
return f"Invalid format. {message}"
16 changes: 11 additions & 5 deletions src/fabric_cli/parsers/fab_fs_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,11 @@ def register_export_parser(subparsers: _SubParsersAction) -> None:
"# export multiple items from a workspace to local",
"$ export ws1.Workspace -o /tmp\n",
"# export a single item to a lakehouse",
"$ export ws1.Workspace/rep1.Report -o /ws1.Workspace/lh1.Lakehouse/Files/export -f",
"$ export ws1.Workspace/rep1.Report -o /ws1.Workspace/lh1.Lakehouse/Files/export -f\n",
"# export a spark job definition with V2 format",
"$ export ws1.Workspace/sjd1.SparkJobDefinition -o /tmp --format SparkJobDefinitionV2\n",
"# export a semantic model in TMSL format",
"$ export ws1.Workspace/sm1.SemanticModel -o /tmp --format TMSL",
]

export_parser = subparsers.add_parser(
Expand Down Expand Up @@ -319,7 +323,7 @@ def register_export_parser(subparsers: _SubParsersAction) -> None:
export_parser.add_argument(
"--format",
metavar="",
help="Export format. Optional",
help="Export format. Supported formats vary by item type (e.g., SparkJobDefinition: SparkJobDefinitionV1/V2, SemanticModel: TMDL/TMSL, Notebook: ipynb/fabricGitSource). Optional",
)
export_parser.add_argument(
"-f",
Expand Down Expand Up @@ -392,8 +396,10 @@ def register_import_parser(subparsers: _SubParsersAction) -> None:
import_examples = [
"# import a notebook from a local directory",
"$ import imp.Notebook -i /tmp/nb1.Notebook\n",
"# import a pipeline from a local directory",
"$ import pip.Notebook -i /tmp/pip1.DataPipeline -f",
"# import a notebook in .py format",
"$ import nb1.Notebook -i /tmp/nb1.Notebook --format .py -f\n",
"# import a spark job definition with V2 format",
"$ import sjd1.SparkJobDefinition -i /tmp/sjd1 --format SparkJobDefinitionV2 -f",
]

import_parser = subparsers.add_parser(
Expand All @@ -415,7 +421,7 @@ def register_import_parser(subparsers: _SubParsersAction) -> None:
import_parser.add_argument(
"--format",
metavar="",
help="Input format. Optional, supported for notebooks (.ipynb, .py)",
help="Input format. Supported formats vary by item type (e.g., SparkJobDefinition: SparkJobDefinitionV1/V2, SemanticModel: TMDL/TMSL, Notebook: .ipynb/.py/ipynb/fabricGitSource). Optional",
)
import_parser.add_argument(
"-f", "--force", required=False, action="store_true", help="Force. Optional"
Expand Down
73 changes: 73 additions & 0 deletions tests/test_core/test_format_mapping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

"""
Unit tests for format mapping in export/import functionality
"""

import pytest
from fabric_cli.core.fab_types import ItemType, definition_format_mapping


class TestFormatMapping:
"""Test the definition_format_mapping for various item types"""

def test_spark_job_definition_formats(self):
"""Test SparkJobDefinition format mapping"""
formats = definition_format_mapping.get(ItemType.SPARK_JOB_DEFINITION, {})

# Check all expected formats exist
assert "default" in formats
assert "SparkJobDefinitionV1" in formats
assert "SparkJobDefinitionV2" in formats

# Check format values
assert formats["default"] == "?format=SparkJobDefinitionV1"
assert formats["SparkJobDefinitionV1"] == "?format=SparkJobDefinitionV1"
assert formats["SparkJobDefinitionV2"] == "?format=SparkJobDefinitionV2"

def test_semantic_model_formats(self):
"""Test SemanticModel format mapping"""
formats = definition_format_mapping.get(ItemType.SEMANTIC_MODEL, {})

# Check all expected formats exist
assert "default" in formats
assert "TMDL" in formats
assert "TMSL" in formats

# Check format values
assert formats["default"] == "?format=TMDL"
assert formats["TMDL"] == "?format=TMDL"
assert formats["TMSL"] == "?format=TMSL"

def test_notebook_formats(self):
"""Test Notebook format mapping"""
formats = definition_format_mapping.get(ItemType.NOTEBOOK, {})

# Check all expected formats exist
assert "default" in formats
assert ".py" in formats
assert ".ipynb" in formats
assert "ipynb" in formats
assert "fabricGitSource" in formats

# Check format values
assert formats["default"] == "?format=ipynb"
assert formats[".py"] == "?format=fabricGitSource"
assert formats[".ipynb"] == "?format=ipynb"
assert formats["ipynb"] == "?format=ipynb"
assert formats["fabricGitSource"] == "?format=fabricGitSource"

def test_unsupported_item_type_has_no_formats(self):
"""Test that unsupported item types have no format mapping"""
# Report doesn't have format support
formats = definition_format_mapping.get(ItemType.REPORT, {})
assert formats == {}

def test_all_formats_have_query_string_prefix(self):
"""Test that all format values start with ?format="""
for item_type, formats in definition_format_mapping.items():
for format_key, format_value in formats.items():
if format_key != "default": # Also check default
assert format_value.startswith("?format="), \
f"Format value '{format_value}' for {item_type}.{format_key} should start with '?format='"
67 changes: 67 additions & 0 deletions tests/test_core/test_format_validation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

"""
Integration test to verify format validation logic
"""

import pytest
from fabric_cli.core.fab_exceptions import FabricCLIError
from fabric_cli.core.fab_types import ItemType, definition_format_mapping


class TestFormatValidation:
"""Test format validation logic in export/import"""

def test_invalid_format_for_spark_job_definition(self):
"""Test that invalid format for SparkJobDefinition raises error"""
valid_formats = definition_format_mapping.get(ItemType.SPARK_JOB_DEFINITION, {})
invalid_format = "InvalidFormat"

# Check that invalid format is not in mapping
assert invalid_format not in valid_formats

# Valid formats should be present
assert "SparkJobDefinitionV1" in valid_formats
assert "SparkJobDefinitionV2" in valid_formats

def test_invalid_format_for_semantic_model(self):
"""Test that invalid format for SemanticModel raises error"""
valid_formats = definition_format_mapping.get(ItemType.SEMANTIC_MODEL, {})
invalid_format = "InvalidFormat"

# Check that invalid format is not in mapping
assert invalid_format not in valid_formats

# Valid formats should be present
assert "TMDL" in valid_formats
assert "TMSL" in valid_formats

def test_invalid_format_for_notebook(self):
"""Test that invalid format for Notebook raises error"""
valid_formats = definition_format_mapping.get(ItemType.NOTEBOOK, {})
invalid_format = "InvalidFormat"

# Check that invalid format is not in mapping
assert invalid_format not in valid_formats

# Valid formats should be present
assert ".py" in valid_formats
assert ".ipynb" in valid_formats
assert "ipynb" in valid_formats
assert "fabricGitSource" in valid_formats

def test_format_mapping_keys_consistency(self):
"""Test that all format mappings have consistent structure"""
for item_type, formats in definition_format_mapping.items():
# All mappings should have a default
assert "default" in formats, f"{item_type} should have a 'default' format"

# All format values should be query strings
for format_key, format_value in formats.items():
assert format_value.startswith("?format="), \
f"{item_type}.{format_key} format value should start with '?format='"

# Extract the actual format name from query string
actual_format = format_value.replace("?format=", "")
assert actual_format, f"{item_type}.{format_key} should have a non-empty format name"