@@ -301,3 +301,135 @@ def test_load_microbatch_required_only(
301301 )
302302 assert model .kind .batch_size == 1
303303 assert model .depends_on_self is False
304+
305+
306+ @pytest .mark .slow
307+ def test_load_microbatch_with_ref (
308+ tmp_path : Path , caplog , dbt_dummy_postgres_config : PostgresConfig , create_empty_project
309+ ) -> None :
310+ yaml = YAML ()
311+ project_dir , model_dir = create_empty_project ()
312+ source_schema = {
313+ "version" : 2 ,
314+ "sources" : [
315+ {
316+ "name" : "my_source" ,
317+ "tables" : [{"name" : "my_table" , "config" : {"event_time" : "ds" }}],
318+ }
319+ ],
320+ }
321+ source_schema_file = model_dir / "source_schema.yml"
322+ with open (source_schema_file , "w" , encoding = "utf-8" ) as f :
323+ yaml .dump (source_schema , f )
324+ # add `tests` to model config since this is loaded by dbt and ignored and we shouldn't error when loading it
325+ microbatch_contents = """
326+ {{
327+ config(
328+ materialized='incremental',
329+ incremental_strategy='microbatch',
330+ event_time='ds',
331+ begin='2020-01-01',
332+ batch_size='day'
333+ )
334+ }}
335+
336+ SELECT cola, ds FROM {{ source('my_source', 'my_table') }}
337+ """
338+ microbatch_model_file = model_dir / "microbatch.sql"
339+ with open (microbatch_model_file , "w" , encoding = "utf-8" ) as f :
340+ f .write (microbatch_contents )
341+
342+ microbatch_two_contents = """
343+ {{
344+ config(
345+ materialized='incremental',
346+ incremental_strategy='microbatch',
347+ event_time='ds',
348+ begin='2020-01-05',
349+ batch_size='day'
350+ )
351+ }}
352+
353+ SELECT cola, ds FROM {{ ref('microbatch') }}
354+ """
355+ microbatch_two_model_file = model_dir / "microbatch_two.sql"
356+ with open (microbatch_two_model_file , "w" , encoding = "utf-8" ) as f :
357+ f .write (microbatch_two_contents )
358+
359+ microbatch_snapshot_fqn = '"local"."main"."microbatch"'
360+ microbatch_two_snapshot_fqn = '"local"."main"."microbatch_two"'
361+ context = Context (paths = project_dir )
362+ assert (
363+ context .render (microbatch_snapshot_fqn , start = "2025-01-01" , end = "2025-01-10" ).sql ()
364+ == 'SELECT "cola" AS "cola", "ds" AS "ds" FROM (SELECT * FROM "local"."my_source"."my_table" AS "my_table" WHERE "ds" BETWEEN \' 2025-01-01 00:00:00+00:00\' AND \' 2025-01-10 23:59:59.999999+00:00\' ) AS "_q_0"'
365+ )
366+ assert (
367+ context .render (microbatch_two_snapshot_fqn , start = "2025-01-01" , end = "2025-01-10" ).sql ()
368+ == 'SELECT "_q_0"."cola" AS "cola", "_q_0"."ds" AS "ds" FROM (SELECT "microbatch"."cola" AS "cola", "microbatch"."ds" AS "ds" FROM "local"."main"."microbatch" AS "microbatch" WHERE "microbatch"."ds" <= \' 2025-01-10 23:59:59.999999+00:00\' AND "microbatch"."ds" >= \' 2025-01-01 00:00:00+00:00\' ) AS "_q_0"'
369+ )
370+
371+
372+ @pytest .mark .slow
373+ def test_load_microbatch_with_ref_no_filter (
374+ tmp_path : Path , caplog , dbt_dummy_postgres_config : PostgresConfig , create_empty_project
375+ ) -> None :
376+ yaml = YAML ()
377+ project_dir , model_dir = create_empty_project ()
378+ source_schema = {
379+ "version" : 2 ,
380+ "sources" : [
381+ {
382+ "name" : "my_source" ,
383+ "tables" : [{"name" : "my_table" , "config" : {"event_time" : "ds" }}],
384+ }
385+ ],
386+ }
387+ source_schema_file = model_dir / "source_schema.yml"
388+ with open (source_schema_file , "w" , encoding = "utf-8" ) as f :
389+ yaml .dump (source_schema , f )
390+ # add `tests` to model config since this is loaded by dbt and ignored and we shouldn't error when loading it
391+ microbatch_contents = """
392+ {{
393+ config(
394+ materialized='incremental',
395+ incremental_strategy='microbatch',
396+ event_time='ds',
397+ begin='2020-01-01',
398+ batch_size='day'
399+ )
400+ }}
401+
402+ SELECT cola, ds FROM {{ source('my_source', 'my_table').render() }}
403+ """
404+ microbatch_model_file = model_dir / "microbatch.sql"
405+ with open (microbatch_model_file , "w" , encoding = "utf-8" ) as f :
406+ f .write (microbatch_contents )
407+
408+ microbatch_two_contents = """
409+ {{
410+ config(
411+ materialized='incremental',
412+ incremental_strategy='microbatch',
413+ event_time='ds',
414+ begin='2020-01-01',
415+ batch_size='day'
416+ )
417+ }}
418+
419+ SELECT cola, ds FROM {{ ref('microbatch').render() }}
420+ """
421+ microbatch_two_model_file = model_dir / "microbatch_two.sql"
422+ with open (microbatch_two_model_file , "w" , encoding = "utf-8" ) as f :
423+ f .write (microbatch_two_contents )
424+
425+ microbatch_snapshot_fqn = '"local"."main"."microbatch"'
426+ microbatch_two_snapshot_fqn = '"local"."main"."microbatch_two"'
427+ context = Context (paths = project_dir )
428+ assert (
429+ context .render (microbatch_snapshot_fqn , start = "2025-01-01" , end = "2025-01-10" ).sql ()
430+ == 'SELECT "cola" AS "cola", "ds" AS "ds" FROM "local"."my_source"."my_table" AS "my_table"'
431+ )
432+ assert (
433+ context .render (microbatch_two_snapshot_fqn , start = "2025-01-01" , end = "2025-01-10" ).sql ()
434+ == 'SELECT "microbatch"."cola" AS "cola", "microbatch"."ds" AS "ds" FROM "local"."main"."microbatch" AS "microbatch"'
435+ )
0 commit comments