Skip to content

Commit ce503f6

Browse files
committed
remove some debug log
Signed-off-by: jaogoy <jaogoy@gmail.com>
1 parent 259d009 commit ce503f6

File tree

1 file changed

+30
-146
lines changed

1 file changed

+30
-146
lines changed

sqlmesh/core/engine_adapter/starrocks.py

Lines changed: 30 additions & 146 deletions
Original file line numberDiff line numberDiff line change
@@ -1476,7 +1476,7 @@ def validate_and_normalize_property(
14761476
>>> validated = PropertyValidator.validate_and_normalize_property("distributed_by", "RANDOM")
14771477
>>> # Result: "RANDOM" (string from EnumType)
14781478
"""
1479-
logger.debug("validate_and_normalize_property. value: %s, type: %s", value, type(value))
1479+
# logger.debug("validate_and_normalize_property. value: %s, type: %s", value, type(value))
14801480

14811481
# Step 1: Optionally preprocess string with parentheses
14821482
if preprocess_parentheses:
@@ -2178,25 +2178,16 @@ def _create_table_from_columns(
21782178
# Use setdefault to simplify table_properties access
21792179
table_properties = kwargs.setdefault("table_properties", {})
21802180

2181-
# Log entry point
2182-
logger.debug(
2183-
"_create_table_from_columns: table=%s, primary_key=%s (from model param), "
2184-
"table_properties.keys=%s",
2185-
table_name,
2186-
primary_key,
2187-
list(table_properties.keys()),
2188-
)
2189-
21902181
# Extract and validate key columns from table_properties
21912182
# Priority: parameter primary_key > table_properties (already handled above)
21922183
key_type, key_columns = self._extract_and_validate_key_columns(
21932184
table_properties, primary_key
21942185
)
2195-
logger.debug(
2196-
"_create_table_from_columns: extracted key_type=%s, key_columns=%s",
2197-
key_type,
2198-
key_columns,
2199-
)
2186+
# logger.debug(
2187+
# "_create_table_from_columns: extracted key_type=%s, key_columns=%s",
2188+
# key_type,
2189+
# key_columns,
2190+
# )
22002191

22012192
# IMPORTANT: Normalize parameter primary_key into table_properties for unified handling
22022193
# This ensures _build_table_properties_exp() can access primary_key even when
@@ -2205,24 +2196,24 @@ def _create_table_from_columns(
22052196
table_properties["primary_key"] = primary_key
22062197
logger.debug("_create_table_from_columns: unified primary_key into table_properties")
22072198
elif key_type:
2208-
logger.debug(
2209-
"table key type '%s' may be handled in _build_table_key_property", key_type
2210-
)
2199+
# logger.debug(
2200+
# "table key type '%s' may be handled in _build_table_key_property", key_type
2201+
# )
2202+
pass
22112203

22122204
# StarRocks key column ordering constraint: All key types need reordering
22132205
if key_columns:
22142206
target_columns_to_types = self._reorder_columns_for_key(
22152207
target_columns_to_types, key_columns, key_type or "key"
22162208
)
2217-
logger.debug("_create_table_from_columns: reordered columns for %s", key_type)
22182209

22192210
# IMPORTANT: Do NOT pass primary_key to base class!
22202211
# Unlike other databases, StarRocks requires PRIMARY KEY to be in POST_SCHEMA location
22212212
# (in properties section after columns), not inside schema (inside column definitions).
22222213
# We handle ALL key types (including PRIMARY KEY) in _build_table_key_property.
2223-
logger.debug(
2224-
"_create_table_from_columns: NOT passing primary_key to base class (handled in _build_table_key_property)"
2225-
)
2214+
# logger.debug(
2215+
# "_create_table_from_columns: NOT passing primary_key to base class (handled in _build_table_key_property)"
2216+
# )
22262217
super()._create_table_from_columns(
22272218
table_name=table_name,
22282219
target_columns_to_types=target_columns_to_types,
@@ -2275,11 +2266,11 @@ def create_view(
22752266
self.get_data_object(view_name), DataObjectType.MATERIALIZED_VIEW
22762267
)
22772268
self.drop_view(view_name, ignore_if_not_exists=True, materialized=True)
2278-
logger.debug(
2279-
f"Creating materialized view: {view_name}, materialized: {materialized}, "
2280-
f"materialized_properties: {materialized_properties}, "
2281-
f"view_properties: {view_properties}, create_kwargs: {create_kwargs}, "
2282-
)
2269+
# logger.debug(
2270+
# f"Creating materialized view: {view_name}, materialized: {materialized}, "
2271+
# f"materialized_properties: {materialized_properties}, "
2272+
# f"view_properties: {view_properties}, create_kwargs: {create_kwargs}, "
2273+
# )
22832274

22842275
return self._create_materialized_view(
22852276
view_name=view_name,
@@ -2359,12 +2350,12 @@ def _create_materialized_view(
23592350
partitioned_by = materialized_properties.get("partitioned_by")
23602351
clustered_by = materialized_properties.get("clustered_by")
23612352
partition_interval_unit = materialized_properties.get("partition_interval_unit")
2362-
logger.debug(
2363-
f"Get info from materialized_properties: {materialized_properties}, "
2364-
f"partitioned_by: {partitioned_by}, "
2365-
f"clustered_by: {clustered_by}, "
2366-
f"partition_interval_unit: {partition_interval_unit}"
2367-
)
2353+
# logger.debug(
2354+
# f"Get info from materialized_properties: {materialized_properties}, "
2355+
# f"partitioned_by: {partitioned_by}, "
2356+
# f"clustered_by: {clustered_by}, "
2357+
# f"partition_interval_unit: {partition_interval_unit}"
2358+
# )
23682359

23692360
properties_exp = self._build_table_properties_exp(
23702361
catalog_name=target_table.catalog,
@@ -2488,10 +2479,10 @@ def _build_table_properties_exp(
24882479
"""
24892480
properties: t.List[exp.Expression] = []
24902481
table_properties_copy = dict(table_properties) if table_properties else {}
2491-
logger.debug(
2492-
"_build_table_properties_exp: table_properties=%s",
2493-
table_properties.keys() if table_properties else [],
2494-
)
2482+
# logger.debug(
2483+
# "_build_table_properties_exp: table_properties=%s",
2484+
# table_properties.keys() if table_properties else [],
2485+
# )
24952486

24962487
is_mv = table_kind == "MATERIALIZED_VIEW"
24972488
if is_mv:
@@ -2508,7 +2499,6 @@ def _build_table_properties_exp(
25082499
property_description="key type",
25092500
table_properties=table_properties_copy,
25102501
)
2511-
logger.debug("_build_table_properties_exp: active_key_type='%s'", active_key_type)
25122502
if is_mv and active_key_type:
25132503
raise SQLMeshError(
25142504
f"You can't specify the table type when the table is a materialized view. "
@@ -2525,22 +2515,11 @@ def _build_table_properties_exp(
25252515
key_type, key_expr, preprocess_parentheses=True
25262516
)
25272517
key_columns = tuple(col.name for col in normalized)
2528-
logger.debug(
2529-
"_build_table_properties_exp: key_type=%s, key_columns=%s",
2530-
key_type,
2531-
key_columns,
2532-
)
25332518

25342519
# 1. Handle key constraints (ALL types including PRIMARY KEY)
25352520
key_prop = self._build_table_key_property(table_properties_copy, active_key_type)
25362521
if key_prop:
25372522
properties.append(key_prop)
2538-
logger.debug(
2539-
"_build_table_properties_exp: generated key_prop=%s",
2540-
type(key_prop).__name__,
2541-
)
2542-
else:
2543-
logger.debug("_build_table_properties_exp: key_prop skipped (not defined)")
25442523

25452524
# 2. Add table comment (it must be ahead of other properties except the talbe key/type)
25462525
if table_description:
@@ -2562,46 +2541,22 @@ def _build_table_properties_exp(
25622541
)
25632542
if partition_prop:
25642543
properties.append(partition_prop)
2565-
logger.debug(
2566-
"_build_table_properties_exp: generated partition_prop=%s",
2567-
type(partition_prop).__name__,
2568-
)
2569-
else:
2570-
logger.debug("_build_table_properties_exp: partition_prop skipped (not defined)")
25712544

25722545
# 4. Handle distributed_by (DISTRIBUTED BY HASH/RANDOM)
25732546
distributed_prop = self._build_distributed_by_property(table_properties_copy, key_columns)
25742547
if distributed_prop:
25752548
properties.append(distributed_prop)
2576-
logger.debug(
2577-
"_build_table_properties_exp: generated distributed_prop=%s",
2578-
type(distributed_prop).__name__,
2579-
)
2580-
else:
2581-
logger.debug("_build_table_properties_exp: distributed_prop skipped (not defined)")
25822549

25832550
# 5. Handle refresh_property (REFRESH ...)
25842551
if is_mv:
25852552
refresh_prop = self._build_refresh_property(table_properties_copy)
25862553
if refresh_prop:
25872554
properties.append(refresh_prop)
2588-
logger.debug(
2589-
"_build_table_properties_exp: generated refresh_prop=%s",
2590-
type(refresh_prop).__name__,
2591-
)
2592-
else:
2593-
logger.debug("_build_table_properties_exp: refresh_prop skipped (not defined)")
25942555

25952556
# 6. Handle order_by/clustered_by (ORDER BY ...)
25962557
order_prop = self._build_order_by_property(table_properties_copy, clustered_by or None)
25972558
if order_prop:
25982559
properties.append(order_prop)
2599-
logger.debug(
2600-
"_build_table_properties_exp: generated order_prop=%s",
2601-
type(order_prop).__name__,
2602-
)
2603-
else:
2604-
logger.debug("_build_table_properties_exp: order_prop skipped (not defined)")
26052560

26062561
# 5. Handle other properties (replication_num, storage_medium, etc.)
26072562
other_props = self._build_other_properties(table_properties_copy)
@@ -2670,11 +2625,8 @@ def _build_table_key_property(
26702625
Key property expression for the active key type, or None
26712626
"""
26722627
if not active_key_type:
2673-
logger.debug("_build_table_key_property: no active_key_type, skipped")
26742628
return None
26752629

2676-
logger.debug("_build_table_key_property: processing %s", active_key_type)
2677-
26782630
# Configuration: key_name -> Property class (excluding primary_key)
26792631
KEY_PROPERTY_CLASSES: t.Dict[str, t.Type[exp.Expression]] = {
26802632
"primary_key": exp.PrimaryKey,
@@ -2714,11 +2666,6 @@ def _build_table_key_property(
27142666
)
27152667
# normalized is List[exp.Column] as defined in TableKeyInputSpec
27162668
result = property_class(expressions=list(normalized))
2717-
logger.debug(
2718-
"_build_table_key_property: generated %s with columns=%s",
2719-
type(result).__name__,
2720-
[col.name for col in normalized],
2721-
)
27222669
return result
27232670

27242671
def _build_partition_property(
@@ -2761,22 +2708,10 @@ def _build_partition_property(
27612708
)
27622709

27632710
# If parameter was provided, it takes priority
2764-
if partitioned_by:
2765-
logger.debug(
2766-
"_build_partition_property: using partitioned_by from model param=%s",
2767-
partitioned_by,
2768-
)
2769-
elif not partitioned_by and partition_param_name:
2711+
if not partitioned_by and partition_param_name:
27702712
# Get from table_properties
27712713
partitioned_by = table_properties.pop(partition_param_name, None)
2772-
logger.debug(
2773-
"_build_partition_property: using partitioned_by from table_properties[%s]=%s",
2774-
partition_param_name,
2775-
partitioned_by,
2776-
)
2777-
27782714
if not partitioned_by:
2779-
logger.debug("_build_partition_property: no 'partitioned_by' defined, skipped")
27802715
return None
27812716

27822717
# Parse partition expressions to extract columns and kind (RANGE/LIST)
@@ -2809,7 +2744,6 @@ def extract_column_name(expr: exp.Expression) -> t.Optional[str]:
28092744
# Get partition definitions (RANGE/LIST partitions)
28102745
# Note: Expression-based partitioning (partition_kind=None) does not support pre-created partitions
28112746
if partitions := table_properties.pop("partitions", None):
2812-
logger.debug("Pre-created partitions: %s", partitions)
28132747
if partition_kind is None:
28142748
logger.warning(
28152749
"[StarRocks] 'partitions' parameter is ignored for expression-based partitioning. "
@@ -2831,7 +2765,6 @@ def extract_column_name(expr: exp.Expression) -> t.Optional[str]:
28312765
partitions=partitions,
28322766
partition_kind=partition_kind,
28332767
)
2834-
logger.debug("_build_partition_property: generated %s", result)
28352768
return result
28362769

28372770
def _parse_partition_expressions(
@@ -2930,12 +2863,6 @@ def _build_partitioned_by_exp(
29302863
"""
29312864
partition_kind = kwargs.get("partition_kind")
29322865
partitions: t.Optional[t.List[str]] = kwargs.get("partitions")
2933-
logger.debug(
2934-
"_build_partitioned_by_exp: partition_kind=%s, partitioned_by=%s, partitions=%s",
2935-
partition_kind,
2936-
partitioned_by,
2937-
partitions,
2938-
)
29392866

29402867
# Process partitions to create_expressions
29412868
# partitions is already List[str] after SPEC normalization
@@ -2989,20 +2916,10 @@ def _build_distributed_by_property(
29892916

29902917
# No default - if not set, return None
29912918
if distributed_by is None:
2992-
logger.debug("_build_distributed_by_property: no 'distributed_by' defined, skipped")
29932919
return None
29942920

2995-
logger.debug(
2996-
"_build_distributed_by_property: using distributed_by from table_properties=%s",
2997-
distributed_by,
2998-
)
2999-
30002921
# Try to parse complex string with BUCKETS first
30012922
unified = self._parse_distribution_with_buckets(distributed_by)
3002-
logger.debug(
3003-
"_build_distributed_by_property: parsed distribution with buckets: %s",
3004-
unified,
3005-
)
30062923
if unified is None:
30072924
# Fall back to SPEC-based parsing
30082925
normalized = PropertyValidator.validate_and_normalize_property(
@@ -3044,10 +2961,6 @@ def _build_distributed_by_property(
30442961
buckets=buckets_expr,
30452962
order=None,
30462963
)
3047-
logger.debug(
3048-
"_build_distributed_by_property: generated DistributedByProperty: %s",
3049-
result,
3050-
)
30512964
return result
30522965

30532966
def _build_refresh_property(
@@ -3171,11 +3084,6 @@ def _parse_distribution_with_buckets(
31713084
(The output function will still handle "HASH(id)" without BUCKETS)
31723085
"""
31733086
# Only handle string or Literal string values
3174-
logger.debug(
3175-
"_parse_distribution_with_buckets: distributed_by: %s, type: %s",
3176-
distributed_by,
3177-
type(distributed_by),
3178-
)
31793087
if isinstance(distributed_by, str):
31803088
text = distributed_by
31813089
elif isinstance(distributed_by, exp.Literal) and distributed_by.is_string:
@@ -3197,11 +3105,6 @@ def _parse_distribution_with_buckets(
31973105

31983106
# Parse the HASH/RANDOM part via SPEC
31993107
normalized = PropertyValidator.validate_and_normalize_property("distributed_by", hash_part)
3200-
logger.debug(
3201-
"_parse_distribution_with_buckets: parsed hash part: %s, type: %s",
3202-
normalized,
3203-
type(normalized),
3204-
)
32053108

32063109
return DistributionTupleOutputType.to_unified_dict(normalized, int(buckets_str))
32073110

@@ -3236,31 +3139,19 @@ def _build_order_by_property(
32363139
)
32373140

32383141
# If parameter was provided, it takes priority
3239-
if clustered_by:
3240-
logger.debug(
3241-
"_build_order_by_property: using clustered_by from model param=%s",
3242-
clustered_by,
3243-
)
3244-
elif clustered_by is None and order_by_param_name:
3142+
if clustered_by is None and order_by_param_name:
32453143
# Get order_by from table_properties (already validated by check_at_most_one)
32463144
order_by = table_properties.pop(order_by_param_name, None)
32473145
if order_by is not None:
32483146
normalized = PropertyValidator.validate_and_normalize_property(
32493147
"clustered_by", order_by, preprocess_parentheses=True
32503148
)
32513149
clustered_by = list(normalized)
3252-
logger.debug(
3253-
"_build_order_by_property: using clustered_by from table_properties[%s]=%s",
3254-
order_by_param_name,
3255-
clustered_by,
3256-
)
32573150

32583151
if clustered_by:
32593152
result = exp.Cluster(expressions=clustered_by)
3260-
logger.debug("_build_order_by_property: generated Cluster")
32613153
return result
32623154
else: # noqa: RET505
3263-
logger.debug("_build_order_by_property: no 'clustered_by' defined, skipped")
32643155
return None
32653156

32663157
def _build_other_properties(self, table_properties: t.Dict[str, t.Any]) -> t.List[exp.Property]:
@@ -3337,7 +3228,6 @@ def _extract_and_validate_key_columns(
33373228
table_properties=table_properties,
33383229
parameter_value=primary_key,
33393230
)
3340-
logger.debug("get table key: %s", {active_key_type})
33413231

33423232
# If parameter primary_key was provided, return it
33433233
if primary_key:
@@ -3355,12 +3245,6 @@ def _extract_and_validate_key_columns(
33553245
)
33563246
key_columns = tuple(col.name for col in normalized)
33573247

3358-
logger.debug(
3359-
"Extracted '%s' from table_properties, value=%s",
3360-
active_key_type,
3361-
key_columns,
3362-
)
3363-
33643248
return (active_key_type, key_columns)
33653249

33663250
def _reorder_columns_for_key(

0 commit comments

Comments
 (0)