diff --git a/packages/dbml-core/__tests__/examples/exporter/mssql_exporter/input/insert_records.in.dbml b/packages/dbml-core/__tests__/examples/exporter/mssql_exporter/input/insert_records.in.dbml new file mode 100644 index 000000000..b9c190484 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/mssql_exporter/input/insert_records.in.dbml @@ -0,0 +1,27 @@ +Table users { + id integer [pk] + name varchar + email varchar + active boolean + created_at timestamp +} + +Table posts { + id integer [pk] + user_id integer + title varchar + content text +} + +Ref: users.id < posts.user_id + +Records users(id, name, email, active, created_at) { + 1, "Alice", "alice@example.com", true, "2024-01-15 10:30:00" + 2, "Bob", "bob@example.com", false, "2024-01-16 14:20:00" + 3, "Charlie", null, true, "2024-01-17 09:15:00" +} + +Records posts(id, user_id, title, content) { + 1, 1, "First Post", "Hello World" + 2, 1, "Second Post", "It's a beautiful day" +} diff --git a/packages/dbml-core/__tests__/examples/exporter/mssql_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/exporter/mssql_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..0c884ed56 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/mssql_exporter/output/insert_records.out.sql @@ -0,0 +1,39 @@ +CREATE TABLE [users] ( + [id] integer PRIMARY KEY, + [name] nvarchar(255), + [email] nvarchar(255), + [active] boolean, + [created_at] timestamp +) +GO + +CREATE TABLE [posts] ( + [id] integer PRIMARY KEY, + [user_id] integer, + [title] nvarchar(255), + [content] text +) +GO + +ALTER TABLE [users] ADD FOREIGN KEY ([id]) REFERENCES [posts] ([user_id]) +GO + +-- Disable constraint checks for INSERT (SQL Server does not support DEFERRED) +EXEC sp_MSforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT all"; +GO + +INSERT INTO [users] ([id], [name], [email], [active], [created_at]) +VALUES + (1, 'Alice', 'alice@example.com', 1, '2024-01-15 10:30:00'), + (2, 'Bob', 'bob@example.com', 0, '2024-01-16 14:20:00'), + (3, 'Charlie', NULL, 1, '2024-01-17 09:15:00'); +GO +INSERT INTO [posts] ([id], [user_id], [title], [content]) +VALUES + (1, 1, 'First Post', 'Hello World'), + (2, 1, 'Second Post', 'It''s a beautiful day'); +GO + +-- Re-enable constraint checks +EXEC sp_MSforeachtable "ALTER TABLE ? WITH CHECK CHECK CONSTRAINT all"; +GO diff --git a/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/input/insert_records.in.dbml b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/input/insert_records.in.dbml new file mode 100644 index 000000000..b9c190484 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/input/insert_records.in.dbml @@ -0,0 +1,27 @@ +Table users { + id integer [pk] + name varchar + email varchar + active boolean + created_at timestamp +} + +Table posts { + id integer [pk] + user_id integer + title varchar + content text +} + +Ref: users.id < posts.user_id + +Records users(id, name, email, active, created_at) { + 1, "Alice", "alice@example.com", true, "2024-01-15 10:30:00" + 2, "Bob", "bob@example.com", false, "2024-01-16 14:20:00" + 3, "Charlie", null, true, "2024-01-17 09:15:00" +} + +Records posts(id, user_id, title, content) { + 1, 1, "First Post", "Hello World" + 2, 1, "Second Post", "It's a beautiful day" +} diff --git a/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/input/sample_data_edge_cases.in.dbml b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/input/sample_data_edge_cases.in.dbml new file mode 100644 index 000000000..6d543a255 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/input/sample_data_edge_cases.in.dbml @@ -0,0 +1,20 @@ +Table edge_cases { + id integer [pk] + scientific_notation_pos float + scientific_notation_neg float + signed_positive integer + signed_negative integer + sql_function_default varchar + dbml_expr_default integer + datetime_value timestamp + string_with_newline text + string_with_backslash varchar + string_with_escape_seq varchar + string_with_quotes varchar + null_value varchar +} + +Records edge_cases(id, scientific_notation_pos, scientific_notation_neg, signed_positive, signed_negative, sql_function_default, dbml_expr_default, datetime_value, string_with_newline, string_with_backslash, string_with_escape_seq, string_with_quotes, null_value) { + 1, 1.23e5, -4.56e-3, +42, -100, `NOW()`, `1 + 2 * 3`, "2024-01-15 10:30:00.123456", "Line 1\nLine 2\nLine 3", "C:\\Users\\path\\file.txt", "Tab:\tNewline:\nCarriage return:\r", "She said \"Hello\" and 'Hi'", null + 2, 9.99e10, -1.11e-10, +0, -0, `CURRENT_TIMESTAMP`, `LENGTH('test')`, "2023-12-31 23:59:59", "First line\n\nThird line", "Escaped backslash: \\\\", "Quote: \" Apostrophe: ' Backslash: \\", "O'Reilly's \"book\"", null +} diff --git a/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..6eee67148 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/output/insert_records.out.sql @@ -0,0 +1,32 @@ +CREATE TABLE `users` ( + `id` integer PRIMARY KEY, + `name` varchar(255), + `email` varchar(255), + `active` boolean, + `created_at` timestamp +); + +CREATE TABLE `posts` ( + `id` integer PRIMARY KEY, + `user_id` integer, + `title` varchar(255), + `content` text +); + +ALTER TABLE `users` ADD FOREIGN KEY (`id`) REFERENCES `posts` (`user_id`); + +-- Disable foreign key checks for INSERT (MySQL does not support DEFERRED) +SET FOREIGN_KEY_CHECKS = 0; + +INSERT INTO `users` (`id`, `name`, `email`, `active`, `created_at`) +VALUES + (1, 'Alice', 'alice@example.com', 1, '2024-01-15 10:30:00'), + (2, 'Bob', 'bob@example.com', 0, '2024-01-16 14:20:00'), + (3, 'Charlie', NULL, 1, '2024-01-17 09:15:00'); +INSERT INTO `posts` (`id`, `user_id`, `title`, `content`) +VALUES + (1, 1, 'First Post', 'Hello World'), + (2, 1, 'Second Post', 'It''s a beautiful day'); + +-- Re-enable foreign key checks +SET FOREIGN_KEY_CHECKS = 1; diff --git a/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/output/sample_data_edge_cases.out.sql b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/output/sample_data_edge_cases.out.sql new file mode 100644 index 000000000..e23eb0407 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/mysql_exporter/output/sample_data_edge_cases.out.sql @@ -0,0 +1,31 @@ +CREATE TABLE `edge_cases` ( + `id` integer PRIMARY KEY, + `scientific_notation_pos` float, + `scientific_notation_neg` float, + `signed_positive` integer, + `signed_negative` integer, + `sql_function_default` varchar(255), + `dbml_expr_default` integer, + `datetime_value` timestamp, + `string_with_newline` text, + `string_with_backslash` varchar(255), + `string_with_escape_seq` varchar(255), + `string_with_quotes` varchar(255), + `null_value` varchar(255) +); + +-- Disable foreign key checks for INSERT (MySQL does not support DEFERRED) +SET FOREIGN_KEY_CHECKS = 0; + +INSERT INTO `edge_cases` (`id`, `scientific_notation_pos`, `scientific_notation_neg`, `signed_positive`, `signed_negative`, `sql_function_default`, `dbml_expr_default`, `datetime_value`, `string_with_newline`, `string_with_backslash`, `string_with_escape_seq`, `string_with_quotes`, `null_value`) +VALUES + (1, 123000, -0.00456, 42, -100, NOW(), 1 + 2 * 3, '2024-01-15 10:30:00.123456', 'Line 1 +Line 2 +Line 3', 'C:\\Users\\path\\file.txt', 'Tab: Newline: +Carriage return: ', 'She said "Hello" and ''Hi''', NULL), + (2, 99900000000, -1.11e-10, 0, 0, CURRENT_TIMESTAMP, LENGTH('test'), '2023-12-31 23:59:59', 'First line + +Third line', 'Escaped backslash: \\\\', 'Quote: " Apostrophe: '' Backslash: \\', 'O''Reilly''s "book"', NULL); + +-- Re-enable foreign key checks +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/exporter/oracle_exporter/input/insert_records.in.dbml b/packages/dbml-core/__tests__/examples/exporter/oracle_exporter/input/insert_records.in.dbml new file mode 100644 index 000000000..b9c190484 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/oracle_exporter/input/insert_records.in.dbml @@ -0,0 +1,27 @@ +Table users { + id integer [pk] + name varchar + email varchar + active boolean + created_at timestamp +} + +Table posts { + id integer [pk] + user_id integer + title varchar + content text +} + +Ref: users.id < posts.user_id + +Records users(id, name, email, active, created_at) { + 1, "Alice", "alice@example.com", true, "2024-01-15 10:30:00" + 2, "Bob", "bob@example.com", false, "2024-01-16 14:20:00" + 3, "Charlie", null, true, "2024-01-17 09:15:00" +} + +Records posts(id, user_id, title, content) { + 1, 1, "First Post", "Hello World" + 2, 1, "Second Post", "It's a beautiful day" +} diff --git a/packages/dbml-core/__tests__/examples/exporter/oracle_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/exporter/oracle_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..0cc54d376 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/oracle_exporter/output/insert_records.out.sql @@ -0,0 +1,31 @@ +CREATE TABLE "users" ( + "id" integer PRIMARY KEY, + "name" varchar, + "email" varchar, + "active" boolean, + "created_at" timestamp +); + +CREATE TABLE "posts" ( + "id" integer PRIMARY KEY, + "user_id" integer, + "title" varchar, + "content" text +); + +ALTER TABLE "users" ADD FOREIGN KEY ("id") REFERENCES "posts" ("user_id"); + +-- Use deferred constraints for INSERT +SET CONSTRAINTS ALL DEFERRED; + +INSERT ALL + INTO "users" ("id", "name", "email", "active", "created_at") VALUES (1, 'Alice', 'alice@example.com', 1, '2024-01-15 10:30:00') + INTO "users" ("id", "name", "email", "active", "created_at") VALUES (2, 'Bob', 'bob@example.com', 0, '2024-01-16 14:20:00') + INTO "users" ("id", "name", "email", "active", "created_at") VALUES (3, 'Charlie', NULL, 1, '2024-01-17 09:15:00') +SELECT * FROM dual; +INSERT ALL + INTO "posts" ("id", "user_id", "title", "content") VALUES (1, 1, 'First Post', 'Hello World') + INTO "posts" ("id", "user_id", "title", "content") VALUES (2, 1, 'Second Post', 'It''s a beautiful day') +SELECT * FROM dual; + +COMMIT; diff --git a/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/input/insert_records.in.dbml b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/input/insert_records.in.dbml new file mode 100644 index 000000000..b9c190484 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/input/insert_records.in.dbml @@ -0,0 +1,27 @@ +Table users { + id integer [pk] + name varchar + email varchar + active boolean + created_at timestamp +} + +Table posts { + id integer [pk] + user_id integer + title varchar + content text +} + +Ref: users.id < posts.user_id + +Records users(id, name, email, active, created_at) { + 1, "Alice", "alice@example.com", true, "2024-01-15 10:30:00" + 2, "Bob", "bob@example.com", false, "2024-01-16 14:20:00" + 3, "Charlie", null, true, "2024-01-17 09:15:00" +} + +Records posts(id, user_id, title, content) { + 1, 1, "First Post", "Hello World" + 2, 1, "Second Post", "It's a beautiful day" +} diff --git a/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/input/sample_data_edge_cases.in.dbml b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/input/sample_data_edge_cases.in.dbml new file mode 100644 index 000000000..6d543a255 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/input/sample_data_edge_cases.in.dbml @@ -0,0 +1,20 @@ +Table edge_cases { + id integer [pk] + scientific_notation_pos float + scientific_notation_neg float + signed_positive integer + signed_negative integer + sql_function_default varchar + dbml_expr_default integer + datetime_value timestamp + string_with_newline text + string_with_backslash varchar + string_with_escape_seq varchar + string_with_quotes varchar + null_value varchar +} + +Records edge_cases(id, scientific_notation_pos, scientific_notation_neg, signed_positive, signed_negative, sql_function_default, dbml_expr_default, datetime_value, string_with_newline, string_with_backslash, string_with_escape_seq, string_with_quotes, null_value) { + 1, 1.23e5, -4.56e-3, +42, -100, `NOW()`, `1 + 2 * 3`, "2024-01-15 10:30:00.123456", "Line 1\nLine 2\nLine 3", "C:\\Users\\path\\file.txt", "Tab:\tNewline:\nCarriage return:\r", "She said \"Hello\" and 'Hi'", null + 2, 9.99e10, -1.11e-10, +0, -0, `CURRENT_TIMESTAMP`, `LENGTH('test')`, "2023-12-31 23:59:59", "First line\n\nThird line", "Escaped backslash: \\\\", "Quote: \" Apostrophe: ' Backslash: \\", "O'Reilly's \"book\"", null +} diff --git a/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..db4f3da38 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/output/insert_records.out.sql @@ -0,0 +1,32 @@ +CREATE TABLE "users" ( + "id" integer PRIMARY KEY, + "name" varchar, + "email" varchar, + "active" boolean, + "created_at" timestamp +); + +CREATE TABLE "posts" ( + "id" integer PRIMARY KEY, + "user_id" integer, + "title" varchar, + "content" text +); + +ALTER TABLE "users" ADD FOREIGN KEY ("id") REFERENCES "posts" ("user_id"); + +-- Use deferred constraints for INSERT +BEGIN; +SET CONSTRAINTS ALL DEFERRED; + +INSERT INTO "users" ("id", "name", "email", "active", "created_at") +VALUES + (1, 'Alice', 'alice@example.com', TRUE, '2024-01-15 10:30:00'), + (2, 'Bob', 'bob@example.com', FALSE, '2024-01-16 14:20:00'), + (3, 'Charlie', NULL, TRUE, '2024-01-17 09:15:00'); +INSERT INTO "posts" ("id", "user_id", "title", "content") +VALUES + (1, 1, 'First Post', 'Hello World'), + (2, 1, 'Second Post', 'It''s a beautiful day'); + +COMMIT; diff --git a/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/output/sample_data_edge_cases.out.sql b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/output/sample_data_edge_cases.out.sql new file mode 100644 index 000000000..65b60274d --- /dev/null +++ b/packages/dbml-core/__tests__/examples/exporter/postgres_exporter/output/sample_data_edge_cases.out.sql @@ -0,0 +1,31 @@ +CREATE TABLE "edge_cases" ( + "id" integer PRIMARY KEY, + "scientific_notation_pos" float, + "scientific_notation_neg" float, + "signed_positive" integer, + "signed_negative" integer, + "sql_function_default" varchar, + "dbml_expr_default" integer, + "datetime_value" timestamp, + "string_with_newline" text, + "string_with_backslash" varchar, + "string_with_escape_seq" varchar, + "string_with_quotes" varchar, + "null_value" varchar +); + +-- Use deferred constraints for INSERT +BEGIN; +SET CONSTRAINTS ALL DEFERRED; + +INSERT INTO "edge_cases" ("id", "scientific_notation_pos", "scientific_notation_neg", "signed_positive", "signed_negative", "sql_function_default", "dbml_expr_default", "datetime_value", "string_with_newline", "string_with_backslash", "string_with_escape_seq", "string_with_quotes", "null_value") +VALUES + (1, 123000, -0.00456, 42, -100, NOW(), 1 + 2 * 3, '2024-01-15 10:30:00.123456', 'Line 1 +Line 2 +Line 3', 'C:\Users\path\file.txt', 'Tab: Newline: +Carriage return: ', 'She said "Hello" and ''Hi''', NULL), + (2, 99900000000, -1.11e-10, 0, 0, CURRENT_TIMESTAMP, LENGTH('test'), '2023-12-31 23:59:59', 'First line + +Third line', 'Escaped backslash: \\', 'Quote: " Apostrophe: '' Backslash: \', 'O''Reilly''s "book"', NULL); + +COMMIT; \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/importer/mssql_importer/input/sample_data_edge_cases.in.sql b/packages/dbml-core/__tests__/examples/importer/mssql_importer/input/sample_data_edge_cases.in.sql new file mode 100644 index 000000000..ea270d394 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/importer/mssql_importer/input/sample_data_edge_cases.in.sql @@ -0,0 +1,12 @@ +-- Test edge cases for data types: scientific notation in defaults, signed numbers, datetime +CREATE TABLE [sample_data_test] ( + [id] int, + [scientific_num] decimal(20,10) DEFAULT 1.23e-5, + [signed_positive] int DEFAULT +42, + [signed_negative] int DEFAULT -99, + [sql_func_default] datetime DEFAULT (GETDATE()), + [datetime_val] datetime DEFAULT '2024-01-15 10:30:00', + [string_simple] nvarchar(200) DEFAULT 'test value', + [computed_expr] AS ([id] + 10) PERSISTED +) +GO diff --git a/packages/dbml-core/__tests__/examples/importer/mssql_importer/output/sample_data_edge_cases.out.dbml b/packages/dbml-core/__tests__/examples/importer/mssql_importer/output/sample_data_edge_cases.out.dbml new file mode 100644 index 000000000..6dcc36d5f --- /dev/null +++ b/packages/dbml-core/__tests__/examples/importer/mssql_importer/output/sample_data_edge_cases.out.dbml @@ -0,0 +1,10 @@ +Table "sample_data_test" { + "id" int + "scientific_num" decimal(20,10) [default: 1.23e-5] + "signed_positive" int [default: +42] + "signed_negative" int [default: -99] + "sql_func_default" datetime [default: `GETDATE()`] + "datetime_val" datetime [default: '2024-01-15 10:30:00'] + "string_simple" nvarchar(200) [default: 'test value'] + "computed_expr" "AS [id] + 10 PERSISTED" +} diff --git a/packages/dbml-core/__tests__/examples/importer/mysql_importer/input/sample_data_edge_cases.in.sql b/packages/dbml-core/__tests__/examples/importer/mysql_importer/input/sample_data_edge_cases.in.sql new file mode 100644 index 000000000..f89f8c038 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/importer/mysql_importer/input/sample_data_edge_cases.in.sql @@ -0,0 +1,25 @@ +CREATE TABLE `edge_cases` ( + `id` integer PRIMARY KEY, + `scientific_notation_pos` float, + `scientific_notation_neg` float, + `signed_positive` integer, + `signed_negative` integer, + `sql_function_default` varchar(255), + `dbml_expr_default` integer, + `datetime_value` timestamp, + `string_with_newline` text, + `string_with_backslash` varchar(255), + `string_with_escape_seq` varchar(255), + `string_with_quotes` varchar(255), + `null_value` varchar(255) +); + +INSERT INTO `edge_cases` (`id`, `scientific_notation_pos`, `scientific_notation_neg`, `signed_positive`, `signed_negative`, `sql_function_default`, `dbml_expr_default`, `datetime_value`, `string_with_newline`, `string_with_backslash`, `string_with_escape_seq`, `string_with_quotes`, `null_value`) +VALUES + (1, 123000, -0.00456, 42, -100, NOW(), 1 + 2 * 3, '2024-01-15 10:30:00.123456', 'Line 1 +Line 2 +Line 3', 'C:\\Users\\path\\file.txt', 'Tab: Newline: +Carriage return:', 'She said "Hello" and ''Hi''', NULL), + (2, 9.99e10, -1.11e-10, 0, 0, CURRENT_TIMESTAMP, LENGTH('test'), '2023-12-31 23:59:59', 'First line + +Third line', 'Escaped backslash: \\\\', 'Quote: " Apostrophe: '' Backslash: \\', 'O''Reilly''s "book"', NULL); diff --git a/packages/dbml-core/__tests__/examples/importer/mysql_importer/output/sample_data_edge_cases.out.dbml b/packages/dbml-core/__tests__/examples/importer/mysql_importer/output/sample_data_edge_cases.out.dbml new file mode 100644 index 000000000..01d3ff570 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/importer/mysql_importer/output/sample_data_edge_cases.out.dbml @@ -0,0 +1,25 @@ +Table "edge_cases" { + "id" integer [pk] + "scientific_notation_pos" float + "scientific_notation_neg" float + "signed_positive" integer + "signed_negative" integer + "sql_function_default" varchar(255) + "dbml_expr_default" integer + "datetime_value" timestamp + "string_with_newline" text + "string_with_backslash" varchar(255) + "string_with_escape_seq" varchar(255) + "string_with_quotes" varchar(255) + "null_value" varchar(255) +} + +records "edge_cases"("id", "scientific_notation_pos", "scientific_notation_neg", "signed_positive", "signed_negative", "sql_function_default", "dbml_expr_default", "datetime_value", "string_with_newline", "string_with_backslash", "string_with_escape_seq", "string_with_quotes", "null_value") { + 1, 123000, `-0.00456`, 42, `-100`, `NOW()`, null, null, null, '2024-01-15 10:30:00.123456', '''Line 1 +Line 2 +Line 3''', 'C:\\\\Users\\\\path\\\\file.txt', '''Tab: Newline: +Carriage return:''', 'She said "Hello" and \'\'Hi\'\'', 'NULL' + 2, 99900000000, `-1.11e-10`, 0, 0, null, `LENGTH('test')`, '2023-12-31 23:59:59', '''First line + +Third line''', 'Escaped backslash: \\\\\\\\', 'Quote: " Apostrophe: \'\' Backslash: \\\\', 'O\'\'Reilly\'\'s "book"', 'NULL' +} diff --git a/packages/dbml-core/__tests__/examples/importer/postgres_importer/input/sample_data_edge_cases.in.sql b/packages/dbml-core/__tests__/examples/importer/postgres_importer/input/sample_data_edge_cases.in.sql new file mode 100644 index 000000000..1367208ad --- /dev/null +++ b/packages/dbml-core/__tests__/examples/importer/postgres_importer/input/sample_data_edge_cases.in.sql @@ -0,0 +1,25 @@ +CREATE TABLE "edge_cases" ( + "id" integer PRIMARY KEY, + "scientific_notation_pos" float, + "scientific_notation_neg" float, + "signed_positive" integer, + "signed_negative" integer, + "sql_function_default" varchar, + "dbml_expr_default" integer, + "datetime_value" timestamp, + "string_with_newline" text, + "string_with_backslash" varchar, + "string_with_escape_seq" varchar, + "string_with_quotes" varchar, + "null_value" varchar +); + +INSERT INTO "edge_cases" ("id", "scientific_notation_pos", "scientific_notation_neg", "signed_positive", "signed_negative", "sql_function_default", "dbml_expr_default", "datetime_value", "string_with_newline", "string_with_backslash", "string_with_escape_seq", "string_with_quotes", "null_value") +VALUES + (1, 123000, -0.00456, 42, -100, NOW(), 1 + 2 * 3, '2024-01-15 10:30:00.123456', 'Line 1 +Line 2 +Line 3', 'C:\Users\path\file.txt', 'Tab: Newline: +Carriage return:', 'She said "Hello" and ''Hi''', NULL), + (2, 9.99e10, -1.11e-10, 0, 0, CURRENT_TIMESTAMP, LENGTH('test'), '2023-12-31 23:59:59', 'First line + +Third line', 'Escaped backslash: \\', 'Quote: " Apostrophe: '' Backslash: \', 'O''Reilly''s "book"', NULL); diff --git a/packages/dbml-core/__tests__/examples/importer/postgres_importer/output/sample_data_edge_cases.out.dbml b/packages/dbml-core/__tests__/examples/importer/postgres_importer/output/sample_data_edge_cases.out.dbml new file mode 100644 index 000000000..061fc3a57 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/importer/postgres_importer/output/sample_data_edge_cases.out.dbml @@ -0,0 +1,25 @@ +Table "edge_cases" { + "id" integer [pk] + "scientific_notation_pos" float + "scientific_notation_neg" float + "signed_positive" integer + "signed_negative" integer + "sql_function_default" varchar + "dbml_expr_default" integer + "datetime_value" timestamp + "string_with_newline" text + "string_with_backslash" varchar + "string_with_escape_seq" varchar + "string_with_quotes" varchar + "null_value" varchar +} + +records "edge_cases"("id", "scientific_notation_pos", "scientific_notation_neg", "signed_positive", "signed_negative", "sql_function_default", "dbml_expr_default", "datetime_value", "string_with_newline", "string_with_backslash", "string_with_escape_seq", "string_with_quotes", "null_value") { + 1, 123000, 0.00456, 42, 100, `NOW()`, 1, 2, 3, '2024-01-15 10:30:00.123456', '''Line 1 +Line 2 +Line 3''', 'C:\\Users\\path\\file.txt', '''Tab: Newline: +Carriage return:''', 'She said "Hello" and \'\'Hi\'\'', `NULL` + 2, 99900000000, 1.11e-10, 0, 0, `CURRENT_TIMESTAMP`, `LENGTH('test')`, '2023-12-31 23:59:59', '''First line + +Third line''', 'Escaped backslash: \\\\', 'Quote: " Apostrophe: \'\' Backslash: \\', 'O\'\'Reilly\'\'s "book"', `NULL` +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records.in.json b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records.in.json new file mode 100644 index 000000000..883c38438 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records.in.json @@ -0,0 +1,92 @@ +{ + "schemas": [], + "tables": [ + { + "name": "users", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 10, "line": 1, "column": 11 } + }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 2, "column": 1 }, + "end": { "offset": 10, "line": 2, "column": 11 } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "active", + "type": { + "schemaName": null, + "type_name": "boolean", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 3, "column": 1 }, + "end": { "offset": 10, "line": 3, "column": 11 } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 100, "line": 5, "column": 2 } + }, + "indexes": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "users", + "columns": ["id", "name", "active"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "Alice", "type": "string" }, + { "value": true, "type": "bool" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "Bob", "type": "string" }, + { "value": false, "type": "bool" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": null, "type": "string" }, + { "value": true, "type": "bool" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records_advanced.in.json b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records_advanced.in.json new file mode 100644 index 000000000..f40d6f794 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records_advanced.in.json @@ -0,0 +1,122 @@ +{ + "schemas": [ + { + "name": "myschema", + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 10, "line": 1, "column": 11 } + }, + "tables": [ + { + "name": "products", + "schemaName": "myschema", + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 10, "line": 1, "column": 11 } + }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 2, "column": 1 }, + "end": { "offset": 10, "line": 2, "column": 11 } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "price", + "type": { + "schemaName": null, + "type_name": "decimal", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 3, "column": 1 }, + "end": { "offset": 10, "line": 3, "column": 11 } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "created_at", + "type": { + "schemaName": null, + "type_name": "timestamp", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 4, "column": 1 }, + "end": { "offset": 10, "line": 4, "column": 11 } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 100, "line": 5, "column": 2 } + }, + "indexes": [] + } + ], + "enums": [], + "tableGroups": [], + "refs": [] + } + ], + "tables": [], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": "myschema", + "tableName": "products", + "columns": ["id", "name", "price", "created_at"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "Widget", "type": "string" }, + { "value": 9.99, "type": "real" }, + { "value": "2024-01-15T10:30:00Z", "type": "datetime" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "Gadget's \"Pro\"", "type": "string" }, + { "value": 19.99, "type": "real" }, + { "value": "now()", "type": "expression" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "Item", "type": "string" }, + { "value": 0, "type": "real" }, + { "value": null, "type": "datetime" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records_enum.in.json b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records_enum.in.json new file mode 100644 index 000000000..4c7464116 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/input/records_enum.in.json @@ -0,0 +1,106 @@ +{ + "schemas": [], + "tables": [ + { + "name": "orders", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 10, "line": 1, "column": 11 } + }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "status", + "type": { + "schemaName": null, + "type_name": "status_enum", + "args": null + }, + "token": { + "start": { "offset": 0, "line": 2, "column": 1 }, + "end": { "offset": 10, "line": 2, "column": 11 } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 100, "line": 5, "column": 2 } + }, + "indexes": [] + } + ], + "notes": [], + "refs": [], + "enums": [ + { + "name": "status_enum", + "schemaName": null, + "token": { + "start": { "offset": 0, "line": 1, "column": 1 }, + "end": { "offset": 50, "line": 5, "column": 2 } + }, + "values": [ + { + "name": "pending", + "token": { + "start": { "offset": 0, "line": 2, "column": 1 }, + "end": { "offset": 10, "line": 2, "column": 11 } + } + }, + { + "name": "active", + "token": { + "start": { "offset": 0, "line": 3, "column": 1 }, + "end": { "offset": 10, "line": 3, "column": 11 } + } + }, + { + "name": "completed", + "token": { + "start": { "offset": 0, "line": 4, "column": 1 }, + "end": { "offset": 10, "line": 4, "column": 11 } + } + } + ] + } + ], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "orders", + "columns": ["id", "status"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "status_enum.pending", "type": "status_enum" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "status_enum.active", "type": "status_enum" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "status_enum.completed", "type": "status_enum" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records.out.dbml b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records.out.dbml new file mode 100644 index 000000000..30f798432 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records.out.dbml @@ -0,0 +1,11 @@ +Table "users" { + "id" integer [pk] + "name" varchar + "active" boolean +} + +records "users"("id", "name", "active") { + 1, 'Alice', true + 2, 'Bob', false + 3, null, true +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records_advanced.out.dbml b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records_advanced.out.dbml new file mode 100644 index 000000000..0d19c7e89 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records_advanced.out.dbml @@ -0,0 +1,12 @@ +Table "myschema"."products" { + "id" integer [pk] + "name" varchar + "price" decimal + "created_at" timestamp +} + +records "myschema"."products"("id", "name", "price", "created_at") { + 1, 'Widget', 9.99, '2024-01-15T10:30:00Z' + 2, "Gadget's \"Pro\"", 19.99, `now()` + 3, 'Item', 0, null +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records_enum.out.dbml b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records_enum.out.dbml new file mode 100644 index 000000000..871d7466c --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/dbml_exporter/output/records_enum.out.dbml @@ -0,0 +1,16 @@ +Enum "status_enum" { + "pending" + "active" + "completed" +} + +Table "orders" { + "id" integer [pk] + "status" status_enum +} + +records "orders"("id", "status") { + 1, status_enum.pending + 2, status_enum.active + 3, status_enum.completed +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/input/insert_records.in.json b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/input/insert_records.in.json new file mode 100644 index 000000000..3c0975edc --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/input/insert_records.in.json @@ -0,0 +1,204 @@ +{ + "schemas": [], + "tables": [ + { + "name": "users", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "email", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "active", + "type": { + "schemaName": null, + "type_name": "boolean", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "created_at", + "type": { + "schemaName": null, + "type_name": "timestamp", + "args": null + }, + "token": { "start": { "offset": 0, "line": 5, "column": 1 }, "end": { "offset": 10, "line": 5, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 6, "column": 2 } }, + "indexes": [] + }, + { + "name": "posts", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "user_id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "title", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "content", + "type": { + "schemaName": null, + "type_name": "text", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 5, "column": 2 } }, + "indexes": [] + } + ], + "notes": [], + "refs": [ + { + "name": null, + "endpoints": [ + { + "schemaName": null, + "tableName": "posts", + "fieldNames": ["user_id"], + "relation": "1" + }, + { + "schemaName": null, + "tableName": "users", + "fieldNames": ["id"], + "relation": "*" + } + ] + } + ], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "users", + "columns": ["id", "name", "email", "active", "created_at"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "Alice", "type": "string" }, + { "value": "alice@example.com", "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-15 10:30:00", "type": "timestamp" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "Bob", "type": "string" }, + { "value": "bob@example.com", "type": "string" }, + { "value": false, "type": "bool" }, + { "value": "2024-01-16 14:20:00", "type": "timestamp" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "Charlie", "type": "string" }, + { "value": null, "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-17 09:15:00", "type": "timestamp" } + ] + ] + }, + { + "schemaName": null, + "tableName": "posts", + "columns": ["id", "user_id", "title", "content"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "First Post", "type": "string" }, + { "value": "Hello World", "type": "text" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "Second Post", "type": "string" }, + { "value": "It's a beautiful day", "type": "text" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/input/sample_data_edge_cases.in.json b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/input/sample_data_edge_cases.in.json new file mode 100644 index 000000000..99425ae62 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/input/sample_data_edge_cases.in.json @@ -0,0 +1,185 @@ +{ + "schemas": [], + "tables": [ + { + "name": "sample_data_test", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "scientific_num", + "type": { + "schemaName": null, + "type_name": "decimal(20,10)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "signed_positive", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "signed_negative", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "sql_func", + "type": { + "schemaName": null, + "type_name": "datetime", + "args": null + }, + "token": { "start": { "offset": 0, "line": 5, "column": 1 }, "end": { "offset": 10, "line": 5, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "datetime_val", + "type": { + "schemaName": null, + "type_name": "datetime", + "args": null + }, + "token": { "start": { "offset": 0, "line": 6, "column": 1 }, "end": { "offset": 10, "line": 6, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_newline", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 7, "column": 1 }, "end": { "offset": 10, "line": 7, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_backslash", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 8, "column": 1 }, "end": { "offset": 10, "line": 8, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_escape", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 9, "column": 1 }, "end": { "offset": 10, "line": 9, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "dbml_expr", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 10, "column": 1 }, "end": { "offset": 10, "line": 10, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 11, "column": 2 } }, + "indexes": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "sample_data_test", + "columns": ["id", "scientific_num", "signed_positive", "signed_negative", "sql_func", "datetime_val", "string_newline", "string_backslash", "string_escape", "dbml_expr"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "1.23e-5", "type": "real" }, + { "value": "+42", "type": "integer" }, + { "value": "-99", "type": "integer" }, + { "value": "NOW()", "type": "expression" }, + { "value": "2024-01-15 10:30:00", "type": "datetime" }, + { "value": "line1\\nline2\\nline3", "type": "string" }, + { "value": "path\\\\to\\\\file", "type": "string" }, + { "value": "tab\\there\\nquote\\'end", "type": "string" }, + { "value": "[id] + 10", "type": "expression" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "-3.14E+2", "type": "real" }, + { "value": "+0", "type": "integer" }, + { "value": "-0", "type": "integer" }, + { "value": "CURRENT_TIMESTAMP()", "type": "expression" }, + { "value": "2024-12-31 23:59:59", "type": "datetime" }, + { "value": "multi\\nline\\ntext\\nhere", "type": "string" }, + { "value": "C:\\\\Users\\\\test", "type": "string" }, + { "value": "quote\\\"double", "type": "string" }, + { "value": "[id] * 2", "type": "expression" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "6.022e23", "type": "real" }, + { "value": "+123", "type": "integer" }, + { "value": "-456", "type": "integer" }, + { "value": "UTC_TIMESTAMP()", "type": "expression" }, + { "value": "2024-06-15 12:00:00", "type": "datetime" }, + { "value": "simple text", "type": "string" }, + { "value": "double\\\\\\\\backslash", "type": "string" }, + { "value": "mixed\\ttab\\nand\\rnewline", "type": "string" }, + { "value": "[scientific_num] / 100", "type": "expression" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..a7507d42e --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/output/insert_records.out.sql @@ -0,0 +1,39 @@ +CREATE TABLE [users] ( + [id] integer PRIMARY KEY, + [name] nvarchar(255), + [email] nvarchar(255), + [active] boolean, + [created_at] timestamp +) +GO + +CREATE TABLE [posts] ( + [id] integer PRIMARY KEY, + [user_id] integer, + [title] nvarchar(255), + [content] text +) +GO + +ALTER TABLE [users] ADD FOREIGN KEY ([id]) REFERENCES [posts] ([user_id]) +GO + +-- Disable constraint checks for INSERT (SQL Server does not support DEFERRED) +EXEC sp_MSforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT all"; +GO + +INSERT INTO [users] ([id], [name], [email], [active], [created_at]) +VALUES + (1, 'Alice', 'alice@example.com', 1, '2024-01-15 10:30:00'), + (2, 'Bob', 'bob@example.com', 0, '2024-01-16 14:20:00'), + (3, 'Charlie', NULL, 1, '2024-01-17 09:15:00'); +GO +INSERT INTO [posts] ([id], [user_id], [title], [content]) +VALUES + (1, 1, 'First Post', 'Hello World'), + (2, 1, 'Second Post', 'It''s a beautiful day'); +GO + +-- Re-enable constraint checks +EXEC sp_MSforeachtable "ALTER TABLE ? WITH CHECK CHECK CONSTRAINT all"; +GO \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/output/sample_data_edge_cases.out.sql b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/output/sample_data_edge_cases.out.sql new file mode 100644 index 000000000..35e4b7511 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mssql_exporter/output/sample_data_edge_cases.out.sql @@ -0,0 +1,28 @@ +CREATE TABLE [sample_data_test] ( + [id] int, + [scientific_num] decimal(20,10), + [signed_positive] int, + [signed_negative] int, + [sql_func] datetime, + [datetime_val] datetime, + [string_newline] varchar(200), + [string_backslash] varchar(200), + [string_escape] varchar(200), + [dbml_expr] int +) +GO + +-- Disable constraint checks for INSERT (SQL Server does not support DEFERRED) +EXEC sp_MSforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT all"; +GO + +INSERT INTO [sample_data_test] ([id], [scientific_num], [signed_positive], [signed_negative], [sql_func], [datetime_val], [string_newline], [string_backslash], [string_escape], [dbml_expr]) +VALUES + (1, 1.23e-5, +42, -99, NOW(), '2024-01-15 10:30:00', 'line1\nline2\nline3', 'path\\to\\file', 'tab\there\nquote\''end', [id] + 10), + (2, -3.14E+2, +0, -0, CURRENT_TIMESTAMP(), '2024-12-31 23:59:59', 'multi\nline\ntext\nhere', 'C:\\Users\\test', 'quote\"double', [id] * 2), + (3, 6.022e23, +123, -456, UTC_TIMESTAMP(), '2024-06-15 12:00:00', 'simple text', 'double\\\\backslash', 'mixed\ttab\nand\rnewline', [scientific_num] / 100); +GO + +-- Re-enable constraint checks +EXEC sp_MSforeachtable "ALTER TABLE ? WITH CHECK CHECK CONSTRAINT all"; +GO \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/input/insert_records.in.json b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/input/insert_records.in.json new file mode 100644 index 000000000..3c0975edc --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/input/insert_records.in.json @@ -0,0 +1,204 @@ +{ + "schemas": [], + "tables": [ + { + "name": "users", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "email", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "active", + "type": { + "schemaName": null, + "type_name": "boolean", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "created_at", + "type": { + "schemaName": null, + "type_name": "timestamp", + "args": null + }, + "token": { "start": { "offset": 0, "line": 5, "column": 1 }, "end": { "offset": 10, "line": 5, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 6, "column": 2 } }, + "indexes": [] + }, + { + "name": "posts", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "user_id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "title", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "content", + "type": { + "schemaName": null, + "type_name": "text", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 5, "column": 2 } }, + "indexes": [] + } + ], + "notes": [], + "refs": [ + { + "name": null, + "endpoints": [ + { + "schemaName": null, + "tableName": "posts", + "fieldNames": ["user_id"], + "relation": "1" + }, + { + "schemaName": null, + "tableName": "users", + "fieldNames": ["id"], + "relation": "*" + } + ] + } + ], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "users", + "columns": ["id", "name", "email", "active", "created_at"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "Alice", "type": "string" }, + { "value": "alice@example.com", "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-15 10:30:00", "type": "timestamp" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "Bob", "type": "string" }, + { "value": "bob@example.com", "type": "string" }, + { "value": false, "type": "bool" }, + { "value": "2024-01-16 14:20:00", "type": "timestamp" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "Charlie", "type": "string" }, + { "value": null, "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-17 09:15:00", "type": "timestamp" } + ] + ] + }, + { + "schemaName": null, + "tableName": "posts", + "columns": ["id", "user_id", "title", "content"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "First Post", "type": "string" }, + { "value": "Hello World", "type": "text" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "Second Post", "type": "string" }, + { "value": "It's a beautiful day", "type": "text" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/input/sample_data_edge_cases.in.json b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/input/sample_data_edge_cases.in.json new file mode 100644 index 000000000..a61b56d5a --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/input/sample_data_edge_cases.in.json @@ -0,0 +1,185 @@ +{ + "schemas": [], + "tables": [ + { + "name": "sample_data_test", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "scientific_num", + "type": { + "schemaName": null, + "type_name": "decimal(20,10)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "signed_positive", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "signed_negative", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "sql_func", + "type": { + "schemaName": null, + "type_name": "datetime", + "args": null + }, + "token": { "start": { "offset": 0, "line": 5, "column": 1 }, "end": { "offset": 10, "line": 5, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "datetime_val", + "type": { + "schemaName": null, + "type_name": "datetime", + "args": null + }, + "token": { "start": { "offset": 0, "line": 6, "column": 1 }, "end": { "offset": 10, "line": 6, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_newline", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 7, "column": 1 }, "end": { "offset": 10, "line": 7, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_backslash", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 8, "column": 1 }, "end": { "offset": 10, "line": 8, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_escape", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 9, "column": 1 }, "end": { "offset": 10, "line": 9, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "dbml_expr", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 10, "column": 1 }, "end": { "offset": 10, "line": 10, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 11, "column": 2 } }, + "indexes": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "sample_data_test", + "columns": ["id", "scientific_num", "signed_positive", "signed_negative", "sql_func", "datetime_val", "string_newline", "string_backslash", "string_escape", "dbml_expr"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "1.23e-5", "type": "real" }, + { "value": "+42", "type": "integer" }, + { "value": "-99", "type": "integer" }, + { "value": "NOW()", "type": "expression" }, + { "value": "2024-01-15 10:30:00", "type": "datetime" }, + { "value": "line1\\nline2\\nline3", "type": "string" }, + { "value": "path\\\\to\\\\file", "type": "string" }, + { "value": "tab\\there\\nquote\\'end", "type": "string" }, + { "value": "`id` + 10", "type": "expression" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "-3.14E+2", "type": "real" }, + { "value": "+0", "type": "integer" }, + { "value": "-0", "type": "integer" }, + { "value": "CURRENT_TIMESTAMP()", "type": "expression" }, + { "value": "2024-12-31 23:59:59", "type": "datetime" }, + { "value": "multi\\nline\\ntext\\nhere", "type": "string" }, + { "value": "C:\\\\Users\\\\test", "type": "string" }, + { "value": "quote\\\"double", "type": "string" }, + { "value": "`id` * 2", "type": "expression" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "6.022e23", "type": "real" }, + { "value": "+123", "type": "integer" }, + { "value": "-456", "type": "integer" }, + { "value": "UTC_TIMESTAMP()", "type": "expression" }, + { "value": "2024-06-15 12:00:00", "type": "datetime" }, + { "value": "simple text", "type": "string" }, + { "value": "double\\\\\\\\backslash", "type": "string" }, + { "value": "mixed\\ttab\\nand\\rnewline", "type": "string" }, + { "value": "`scientific_num` / 100", "type": "expression" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..26c58f594 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/output/insert_records.out.sql @@ -0,0 +1,32 @@ +CREATE TABLE `users` ( + `id` integer PRIMARY KEY, + `name` varchar(255), + `email` varchar(255), + `active` boolean, + `created_at` timestamp +); + +CREATE TABLE `posts` ( + `id` integer PRIMARY KEY, + `user_id` integer, + `title` varchar(255), + `content` text +); + +ALTER TABLE `users` ADD FOREIGN KEY (`id`) REFERENCES `posts` (`user_id`); + +-- Disable foreign key checks for INSERT (MySQL does not support DEFERRED) +SET FOREIGN_KEY_CHECKS = 0; + +INSERT INTO `users` (`id`, `name`, `email`, `active`, `created_at`) +VALUES + (1, 'Alice', 'alice@example.com', 1, '2024-01-15 10:30:00'), + (2, 'Bob', 'bob@example.com', 0, '2024-01-16 14:20:00'), + (3, 'Charlie', NULL, 1, '2024-01-17 09:15:00'); +INSERT INTO `posts` (`id`, `user_id`, `title`, `content`) +VALUES + (1, 1, 'First Post', 'Hello World'), + (2, 1, 'Second Post', 'It''s a beautiful day'); + +-- Re-enable foreign key checks +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/output/sample_data_edge_cases.out.sql b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/output/sample_data_edge_cases.out.sql new file mode 100644 index 000000000..34232df8e --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/mysql_exporter/output/sample_data_edge_cases.out.sql @@ -0,0 +1,24 @@ +CREATE TABLE `sample_data_test` ( + `id` int, + `scientific_num` decimal(20,10), + `signed_positive` int, + `signed_negative` int, + `sql_func` datetime, + `datetime_val` datetime, + `string_newline` varchar(200), + `string_backslash` varchar(200), + `string_escape` varchar(200), + `dbml_expr` int +); + +-- Disable foreign key checks for INSERT (MySQL does not support DEFERRED) +SET FOREIGN_KEY_CHECKS = 0; + +INSERT INTO `sample_data_test` (`id`, `scientific_num`, `signed_positive`, `signed_negative`, `sql_func`, `datetime_val`, `string_newline`, `string_backslash`, `string_escape`, `dbml_expr`) +VALUES + (1, 1.23e-5, +42, -99, NOW(), '2024-01-15 10:30:00', 'line1\\nline2\\nline3', 'path\\\\to\\\\file', 'tab\\there\\nquote\\''end', `id` + 10), + (2, -3.14E+2, +0, -0, CURRENT_TIMESTAMP(), '2024-12-31 23:59:59', 'multi\\nline\\ntext\\nhere', 'C:\\\\Users\\\\test', 'quote\\"double', `id` * 2), + (3, 6.022e23, +123, -456, UTC_TIMESTAMP(), '2024-06-15 12:00:00', 'simple text', 'double\\\\\\\\backslash', 'mixed\\ttab\\nand\\rnewline', `scientific_num` / 100); + +-- Re-enable foreign key checks +SET FOREIGN_KEY_CHECKS = 1; \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/model_exporter/oracle_exporter/input/insert_records.in.json b/packages/dbml-core/__tests__/examples/model_exporter/oracle_exporter/input/insert_records.in.json new file mode 100644 index 000000000..3c0975edc --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/oracle_exporter/input/insert_records.in.json @@ -0,0 +1,204 @@ +{ + "schemas": [], + "tables": [ + { + "name": "users", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "email", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "active", + "type": { + "schemaName": null, + "type_name": "boolean", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "created_at", + "type": { + "schemaName": null, + "type_name": "timestamp", + "args": null + }, + "token": { "start": { "offset": 0, "line": 5, "column": 1 }, "end": { "offset": 10, "line": 5, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 6, "column": 2 } }, + "indexes": [] + }, + { + "name": "posts", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "user_id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "title", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "content", + "type": { + "schemaName": null, + "type_name": "text", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 5, "column": 2 } }, + "indexes": [] + } + ], + "notes": [], + "refs": [ + { + "name": null, + "endpoints": [ + { + "schemaName": null, + "tableName": "posts", + "fieldNames": ["user_id"], + "relation": "1" + }, + { + "schemaName": null, + "tableName": "users", + "fieldNames": ["id"], + "relation": "*" + } + ] + } + ], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "users", + "columns": ["id", "name", "email", "active", "created_at"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "Alice", "type": "string" }, + { "value": "alice@example.com", "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-15 10:30:00", "type": "timestamp" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "Bob", "type": "string" }, + { "value": "bob@example.com", "type": "string" }, + { "value": false, "type": "bool" }, + { "value": "2024-01-16 14:20:00", "type": "timestamp" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "Charlie", "type": "string" }, + { "value": null, "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-17 09:15:00", "type": "timestamp" } + ] + ] + }, + { + "schemaName": null, + "tableName": "posts", + "columns": ["id", "user_id", "title", "content"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "First Post", "type": "string" }, + { "value": "Hello World", "type": "text" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "Second Post", "type": "string" }, + { "value": "It's a beautiful day", "type": "text" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/oracle_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/model_exporter/oracle_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..77a6612d5 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/oracle_exporter/output/insert_records.out.sql @@ -0,0 +1,31 @@ +CREATE TABLE "users" ( + "id" integer PRIMARY KEY, + "name" varchar, + "email" varchar, + "active" boolean, + "created_at" timestamp +); + +CREATE TABLE "posts" ( + "id" integer PRIMARY KEY, + "user_id" integer, + "title" varchar, + "content" text +); + +ALTER TABLE "users" ADD FOREIGN KEY ("id") REFERENCES "posts" ("user_id"); + +-- Use deferred constraints for INSERT +SET CONSTRAINTS ALL DEFERRED; + +INSERT ALL + INTO "users" ("id", "name", "email", "active", "created_at") VALUES (1, 'Alice', 'alice@example.com', 1, '2024-01-15 10:30:00') + INTO "users" ("id", "name", "email", "active", "created_at") VALUES (2, 'Bob', 'bob@example.com', 0, '2024-01-16 14:20:00') + INTO "users" ("id", "name", "email", "active", "created_at") VALUES (3, 'Charlie', NULL, 1, '2024-01-17 09:15:00') +SELECT * FROM dual; +INSERT ALL + INTO "posts" ("id", "user_id", "title", "content") VALUES (1, 1, 'First Post', 'Hello World') + INTO "posts" ("id", "user_id", "title", "content") VALUES (2, 1, 'Second Post', 'It''s a beautiful day') +SELECT * FROM dual; + +COMMIT; \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/input/insert_records.in.json b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/input/insert_records.in.json new file mode 100644 index 000000000..3c0975edc --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/input/insert_records.in.json @@ -0,0 +1,204 @@ +{ + "schemas": [], + "tables": [ + { + "name": "users", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "email", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "active", + "type": { + "schemaName": null, + "type_name": "boolean", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "created_at", + "type": { + "schemaName": null, + "type_name": "timestamp", + "args": null + }, + "token": { "start": { "offset": 0, "line": 5, "column": 1 }, "end": { "offset": 10, "line": 5, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 6, "column": 2 } }, + "indexes": [] + }, + { + "name": "posts", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": true, + "unique": false + }, + { + "name": "user_id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "title", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "content", + "type": { + "schemaName": null, + "type_name": "text", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 5, "column": 2 } }, + "indexes": [] + } + ], + "notes": [], + "refs": [ + { + "name": null, + "endpoints": [ + { + "schemaName": null, + "tableName": "posts", + "fieldNames": ["user_id"], + "relation": "1" + }, + { + "schemaName": null, + "tableName": "users", + "fieldNames": ["id"], + "relation": "*" + } + ] + } + ], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "users", + "columns": ["id", "name", "email", "active", "created_at"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "Alice", "type": "string" }, + { "value": "alice@example.com", "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-15 10:30:00", "type": "timestamp" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "Bob", "type": "string" }, + { "value": "bob@example.com", "type": "string" }, + { "value": false, "type": "bool" }, + { "value": "2024-01-16 14:20:00", "type": "timestamp" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "Charlie", "type": "string" }, + { "value": null, "type": "string" }, + { "value": true, "type": "bool" }, + { "value": "2024-01-17 09:15:00", "type": "timestamp" } + ] + ] + }, + { + "schemaName": null, + "tableName": "posts", + "columns": ["id", "user_id", "title", "content"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "First Post", "type": "string" }, + { "value": "Hello World", "type": "text" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": 1, "type": "integer" }, + { "value": "Second Post", "type": "string" }, + { "value": "It's a beautiful day", "type": "text" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/input/sample_data_edge_cases.in.json b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/input/sample_data_edge_cases.in.json new file mode 100644 index 000000000..19dff4f5e --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/input/sample_data_edge_cases.in.json @@ -0,0 +1,185 @@ +{ + "schemas": [], + "tables": [ + { + "name": "sample_data_test", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 10, "line": 1, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "scientific_num", + "type": { + "schemaName": null, + "type_name": "decimal(20,10)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 2, "column": 1 }, "end": { "offset": 10, "line": 2, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "signed_positive", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 3, "column": 1 }, "end": { "offset": 10, "line": 3, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "signed_negative", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 4, "column": 1 }, "end": { "offset": 10, "line": 4, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "sql_func", + "type": { + "schemaName": null, + "type_name": "datetime", + "args": null + }, + "token": { "start": { "offset": 0, "line": 5, "column": 1 }, "end": { "offset": 10, "line": 5, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "datetime_val", + "type": { + "schemaName": null, + "type_name": "datetime", + "args": null + }, + "token": { "start": { "offset": 0, "line": 6, "column": 1 }, "end": { "offset": 10, "line": 6, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_newline", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 7, "column": 1 }, "end": { "offset": 10, "line": 7, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_backslash", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 8, "column": 1 }, "end": { "offset": 10, "line": 8, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "string_escape", + "type": { + "schemaName": null, + "type_name": "varchar(200)", + "args": null + }, + "token": { "start": { "offset": 0, "line": 9, "column": 1 }, "end": { "offset": 10, "line": 9, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "dbml_expr", + "type": { + "schemaName": null, + "type_name": "int", + "args": null + }, + "token": { "start": { "offset": 0, "line": 10, "column": 1 }, "end": { "offset": 10, "line": 10, "column": 11 } }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { "start": { "offset": 0, "line": 1, "column": 1 }, "end": { "offset": 100, "line": 11, "column": 2 } }, + "indexes": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "schemaName": null, + "tableName": "sample_data_test", + "columns": ["id", "scientific_num", "signed_positive", "signed_negative", "sql_func", "datetime_val", "string_newline", "string_backslash", "string_escape", "dbml_expr"], + "values": [ + [ + { "value": 1, "type": "integer" }, + { "value": "1.23e-5", "type": "real" }, + { "value": "+42", "type": "integer" }, + { "value": "-99", "type": "integer" }, + { "value": "NOW()", "type": "expression" }, + { "value": "2024-01-15 10:30:00", "type": "datetime" }, + { "value": "line1\\nline2\\nline3", "type": "string" }, + { "value": "path\\\\to\\\\file", "type": "string" }, + { "value": "tab\\there\\nquote\\'end", "type": "string" }, + { "value": "\"id\" + 10", "type": "expression" } + ], + [ + { "value": 2, "type": "integer" }, + { "value": "-3.14E+2", "type": "real" }, + { "value": "+0", "type": "integer" }, + { "value": "-0", "type": "integer" }, + { "value": "CURRENT_TIMESTAMP()", "type": "expression" }, + { "value": "2024-12-31 23:59:59", "type": "datetime" }, + { "value": "multi\\nline\\ntext\\nhere", "type": "string" }, + { "value": "C:\\\\Users\\\\test", "type": "string" }, + { "value": "quote\\\"double", "type": "string" }, + { "value": "\"id\" * 2", "type": "expression" } + ], + [ + { "value": 3, "type": "integer" }, + { "value": "6.022e23", "type": "real" }, + { "value": "+123", "type": "integer" }, + { "value": "-456", "type": "integer" }, + { "value": "UTC_TIMESTAMP()", "type": "expression" }, + { "value": "2024-06-15 12:00:00", "type": "datetime" }, + { "value": "simple text", "type": "string" }, + { "value": "double\\\\\\\\backslash", "type": "string" }, + { "value": "mixed\\ttab\\nand\\rnewline", "type": "string" }, + { "value": "\"scientific_num\" / 100", "type": "expression" } + ] + ] + } + ] +} diff --git a/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/output/insert_records.out.sql b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/output/insert_records.out.sql new file mode 100644 index 000000000..3ce0a236d --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/output/insert_records.out.sql @@ -0,0 +1,32 @@ +CREATE TABLE "users" ( + "id" integer PRIMARY KEY, + "name" varchar, + "email" varchar, + "active" boolean, + "created_at" timestamp +); + +CREATE TABLE "posts" ( + "id" integer PRIMARY KEY, + "user_id" integer, + "title" varchar, + "content" text +); + +ALTER TABLE "users" ADD FOREIGN KEY ("id") REFERENCES "posts" ("user_id"); + +-- Use deferred constraints for INSERT +BEGIN; +SET CONSTRAINTS ALL DEFERRED; + +INSERT INTO "users" ("id", "name", "email", "active", "created_at") +VALUES + (1, 'Alice', 'alice@example.com', TRUE, '2024-01-15 10:30:00'), + (2, 'Bob', 'bob@example.com', FALSE, '2024-01-16 14:20:00'), + (3, 'Charlie', NULL, TRUE, '2024-01-17 09:15:00'); +INSERT INTO "posts" ("id", "user_id", "title", "content") +VALUES + (1, 1, 'First Post', 'Hello World'), + (2, 1, 'Second Post', 'It''s a beautiful day'); + +COMMIT; \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/output/sample_data_edge_cases.out.sql b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/output/sample_data_edge_cases.out.sql new file mode 100644 index 000000000..900722971 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/model_exporter/postgres_exporter/output/sample_data_edge_cases.out.sql @@ -0,0 +1,24 @@ +CREATE TABLE "sample_data_test" ( + "id" int, + "scientific_num" decimal(20,10), + "signed_positive" int, + "signed_negative" int, + "sql_func" datetime, + "datetime_val" datetime, + "string_newline" varchar(200), + "string_backslash" varchar(200), + "string_escape" varchar(200), + "dbml_expr" int +); + +-- Use deferred constraints for INSERT +BEGIN; +SET CONSTRAINTS ALL DEFERRED; + +INSERT INTO "sample_data_test" ("id", "scientific_num", "signed_positive", "signed_negative", "sql_func", "datetime_val", "string_newline", "string_backslash", "string_escape", "dbml_expr") +VALUES + (1, 1.23e-5, +42, -99, NOW(), '2024-01-15 10:30:00', 'line1\nline2\nline3', 'path\\to\\file', 'tab\there\nquote\''end', "id" + 10), + (2, -3.14E+2, +0, -0, CURRENT_TIMESTAMP(), '2024-12-31 23:59:59', 'multi\nline\ntext\nhere', 'C:\\Users\\test', 'quote\"double', "id" * 2), + (3, 6.022e23, +123, -456, UTC_TIMESTAMP(), '2024-06-15 12:00:00', 'simple text', 'double\\\\backslash', 'mixed\ttab\nand\rnewline', "scientific_num" / 100); + +COMMIT; \ No newline at end of file diff --git a/packages/dbml-core/__tests__/examples/parser/parser.spec.ts b/packages/dbml-core/__tests__/examples/parser/parser.spec.ts index 64a599eb1..197f24760 100644 --- a/packages/dbml-core/__tests__/examples/parser/parser.spec.ts +++ b/packages/dbml-core/__tests__/examples/parser/parser.spec.ts @@ -39,5 +39,9 @@ describe('@dbml/core', () => { test.each(scanTestNames(__dirname, 'oracle-parse/input'))('oracle-parse/%s', (name) => { runTest(name, 'oracle-parse', 'oracle', 'parseOracleToJSON'); }); + + test.each(scanTestNames(__dirname, 'snowflake-parse/input'))('snowflake-parse/%s', (name) => { + runTest(name, 'snowflake-parse', 'snowflake', 'parseSnowflakeToJSON'); + }); }); }); diff --git a/packages/dbml-core/__tests__/examples/parser/snowflake-parse/input/insert_into.in.sql b/packages/dbml-core/__tests__/examples/parser/snowflake-parse/input/insert_into.in.sql new file mode 100644 index 000000000..368db8efb --- /dev/null +++ b/packages/dbml-core/__tests__/examples/parser/snowflake-parse/input/insert_into.in.sql @@ -0,0 +1,10 @@ +-- Simple insert with columns +INSERT INTO users (id, name, email) VALUES (1, 'Alice', 'alice@example.com'); + +-- Bulk insert +INSERT INTO users (id, name, email) VALUES + (2, 'Bob', 'bob@example.com'), + (3, 'Charlie', 'charlie@example.com'); + +-- Insert into schema.table +INSERT INTO test_schema.products (product_id, product_name, price) VALUES (100, 'Widget', 9.99); diff --git a/packages/dbml-core/__tests__/examples/parser/snowflake-parse/output/insert_into.out.json b/packages/dbml-core/__tests__/examples/parser/snowflake-parse/output/insert_into.out.json new file mode 100644 index 000000000..e2e46a616 --- /dev/null +++ b/packages/dbml-core/__tests__/examples/parser/snowflake-parse/output/insert_into.out.json @@ -0,0 +1,98 @@ +{ + "schemas": [], + "tables": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "records": [ + { + "tableName": "users", + "columns": [ + "id", + "name", + "email" + ], + "values": [ + [ + { + "value": "1", + "type": "number" + }, + { + "value": "Alice", + "type": "string" + }, + { + "value": "alice@example.com", + "type": "string" + } + ] + ] + }, + { + "tableName": "users", + "columns": [ + "id", + "name", + "email" + ], + "values": [ + [ + { + "value": "2", + "type": "number" + }, + { + "value": "Bob", + "type": "string" + }, + { + "value": "bob@example.com", + "type": "string" + } + ], + [ + { + "value": "3", + "type": "number" + }, + { + "value": "Charlie", + "type": "string" + }, + { + "value": "charlie@example.com", + "type": "string" + } + ] + ] + }, + { + "tableName": "products", + "schemaName": "test_schema", + "columns": [ + "product_id", + "product_name", + "price" + ], + "values": [ + [ + { + "value": "100", + "type": "number" + }, + { + "value": "Widget", + "type": "string" + }, + { + "value": "9.99", + "type": "number" + } + ] + ] + } + ] +} diff --git a/packages/dbml-core/eslint.config.ts b/packages/dbml-core/eslint.config.ts index d248e8ed5..402d31b2f 100644 --- a/packages/dbml-core/eslint.config.ts +++ b/packages/dbml-core/eslint.config.ts @@ -32,7 +32,8 @@ export default defineConfig( files: ['**/*.js'], languageOptions: { globals: { - ...globals.browser, + // This globals has a key "AudioWorkletGlobalScope " with a trailing space, causing eslint to crash + // ...globals.browser, ...globals.jest, ...globals.node, ...globals.es2022, @@ -64,7 +65,8 @@ export default defineConfig( }, languageOptions: { globals: { - ...globals.browser, + // This globals has a key "AudioWorkletGlobalScope " with a trailing space, causing eslint to crash + // ...globals.browser, ...globals.jest, ...globals.node, ...globals.es2022, diff --git a/packages/dbml-core/src/export/DbmlExporter.js b/packages/dbml-core/src/export/DbmlExporter.js index eac52c0f5..44e59fb57 100644 --- a/packages/dbml-core/src/export/DbmlExporter.js +++ b/packages/dbml-core/src/export/DbmlExporter.js @@ -1,6 +1,6 @@ import { isEmpty, reduce } from 'lodash'; import { addQuoteIfNeeded } from '@dbml/parse'; -import { shouldPrintSchema } from './utils'; +import { shouldPrintSchema, formatDbmlRecordValue } from './utils'; import { DEFAULT_SCHEMA_NAME } from '../model_structure/config'; class DbmlExporter { @@ -347,6 +347,37 @@ class DbmlExporter { }, ''); } + static exportRecords (model) { + const records = model.records; + if (!records || isEmpty(records)) { + return ''; + } + + const recordStrs = Object.values(records).map((record) => { + const { schemaName, tableName, columns, values } = record; + + // Build the table reference with schema if present + const tableRef = schemaName + ? `"${schemaName}"."${tableName}"` + : `"${tableName}"`; + + // Build the column list + const columnList = columns.map((col) => `"${col}"`).join(', '); + + // Build the data rows + const rowStrs = values.map((row) => { + const valueStrs = row.map((val) => formatDbmlRecordValue(val)); + return ` ${valueStrs.join(', ')}`; + }); + + const body = rowStrs.join('\n'); + + return `records ${tableRef}(${columnList}) {\n${body}\n}\n`; + }); + + return recordStrs.length ? recordStrs.join('\n') : ''; + } + static export (model) { const elementStrs = []; const database = model.database['1']; @@ -363,6 +394,7 @@ class DbmlExporter { }); if (!isEmpty(model.notes)) elementStrs.push(DbmlExporter.exportStickyNotes(model)); + if (!isEmpty(model.records)) elementStrs.push(DbmlExporter.exportRecords(model)); // all elements already end with 1 '\n', so join('\n') to separate them with 1 blank line return elementStrs.join('\n'); diff --git a/packages/dbml-core/src/export/MysqlExporter.js b/packages/dbml-core/src/export/MysqlExporter.js index cea972bf7..bb62936b8 100644 --- a/packages/dbml-core/src/export/MysqlExporter.js +++ b/packages/dbml-core/src/export/MysqlExporter.js @@ -5,8 +5,58 @@ import { buildJunctionFields2, buildNewTableName, } from './utils'; +import { + isNumericType, + isStringType, + isBooleanType, + isDateTimeType, + isBinaryType, +} from '@dbml/parse'; class MySQLExporter { + static exportRecords (model) { + const records = Object.values(model.records || {}); + if (_.isEmpty(records)) { + return []; + } + + const insertStatements = records.map((record) => { + const { schemaName, tableName, columns, values } = record; + + // Build the table reference with schema if present + const tableRef = schemaName ? `\`${schemaName}\`.\`${tableName}\`` : `\`${tableName}\``; + + // Build the column list + const columnList = columns.length > 0 + ? `(\`${columns.join('`, `')}\`)` + : ''; + + // Value formatter for MySQL + const formatValue = (val) => { + if (val.value === null) return 'NULL'; + if (val.type === 'expression') return val.value; + + if (isNumericType(val.type)) return val.value; + if (isBooleanType(val.type)) return val.value.toString().toUpperCase() === 'TRUE' ? '1' : '0'; + if (isStringType(val.type) || isBinaryType(val.type) || isDateTimeType(val.type)) return `'${val.value.replace(/'/g, "''").replace(/\\/g, '\\\\')}'`; + // Unknown type - use CAST + return `CAST('${val.value.replace(/'/g, "''").replace(/\\/g, '\\\\')}' AS ${val.type})`; + }; + + // Build the VALUES clause + const valueRows = values.map((row) => { + const valueStrs = row.map(formatValue); + return `(${valueStrs.join(', ')})`; + }); + + const valuesClause = valueRows.join(',\n '); + + return `INSERT INTO ${tableRef} ${columnList}\nVALUES\n ${valuesClause};`; + }); + + return insertStatements; + } + static getFieldLines (tableId, model) { const table = model.tables[tableId]; @@ -345,6 +395,21 @@ class MySQLExporter { refs: [], }); + // Export INSERT statements + // Note: MySQL does not support DEFERRED constraints, so foreign key checks are disabled + const insertStatements = MySQLExporter.exportRecords(model); + const recordsSection = !_.isEmpty(insertStatements) + ? [ + '-- Disable foreign key checks for INSERT (MySQL does not support DEFERRED)', + 'SET FOREIGN_KEY_CHECKS = 0;', + '', + ...insertStatements, + '', + '-- Re-enable foreign key checks', + 'SET FOREIGN_KEY_CHECKS = 1;', + ] + : []; + const res = _.concat( statements.schemas, statements.enums, @@ -352,6 +417,7 @@ class MySQLExporter { statements.indexes, statements.comments, statements.refs, + recordsSection, ).join('\n'); return res; } diff --git a/packages/dbml-core/src/export/OracleExporter.js b/packages/dbml-core/src/export/OracleExporter.js index 68fccab24..e8c8b652a 100644 --- a/packages/dbml-core/src/export/OracleExporter.js +++ b/packages/dbml-core/src/export/OracleExporter.js @@ -6,8 +6,62 @@ import { escapeObjectName, shouldPrintSchema, } from './utils'; +import { + isNumericType, + isStringType, + isBooleanType, + isDateTimeType, + isBinaryType, +} from '@dbml/parse'; class OracleExporter { + static exportRecords (model) { + const records = Object.values(model.records || {}); + if (_.isEmpty(records)) { + return []; + } + + const insertStatements = records.map((record) => { + const { schemaName, tableName, columns, values } = record; + + // Build the table reference with schema if present + const tableRef = schemaName ? `"${schemaName}"."${tableName}"` : `"${tableName}"`; + + // Build the column list + const columnList = columns.length > 0 + ? `("${columns.join('", "')}")` + : ''; + + const valueExporter = (val) => { + if (val.value === null) return 'NULL'; + if (val.type === 'expression') return val.value; + + if (isNumericType(val.type)) return val.value; + if (isBooleanType(val.type)) return val.value.toString().toUpperCase() === 'TRUE' ? '1' : '0'; + if (isStringType(val.type) || isDateTimeType(val.type)) return `'${val.value.replace(/'/g, "''")}'`; + if (isBinaryType(val.type)) return `HEXTORAW('${val.value}')`; + // Unknown type - use CAST + return `CAST('${val.value.replace(/'/g, "''")}' AS ${val.type})`; + }; + + // Build the INSERT ALL statement for multiple rows + if (values.length > 1) { + const intoStatements = values.map((row) => { + const valueStrs = row.map(valueExporter); + return ` INTO ${tableRef} ${columnList} VALUES (${valueStrs.join(', ')})`; + }); + return `INSERT ALL\n${intoStatements.join('\n')}\nSELECT * FROM dual;`; + } + + // Single row INSERT + const valueStrs = values[0].map(valueExporter); + + return `INSERT INTO ${tableRef} ${columnList}\nVALUES (${valueStrs.join(', ')});`; + }); + + return insertStatements; + } + static buildSchemaToTableNameSetMap (model) { const schemaToTableNameSetMap = new Map(); @@ -500,6 +554,19 @@ class OracleExporter { refs: [], }); + // Export INSERT statements with deferred constraint checking + const insertStatements = this.exportRecords(model); + const recordsSection = !_.isEmpty(insertStatements) + ? [ + '-- Use deferred constraints for INSERT', + 'SET CONSTRAINTS ALL DEFERRED;', + '', + ...insertStatements, + '', + 'COMMIT;', + ] + : []; + const res = _.concat( statements.schemas, statements.tables, @@ -507,6 +574,7 @@ class OracleExporter { statements.comments, statements.referenceGrants, statements.refs, + recordsSection, ).join('\n'); return res; } diff --git a/packages/dbml-core/src/export/PostgresExporter.js b/packages/dbml-core/src/export/PostgresExporter.js index cd1e42437..b0000489d 100644 --- a/packages/dbml-core/src/export/PostgresExporter.js +++ b/packages/dbml-core/src/export/PostgresExporter.js @@ -8,6 +8,13 @@ import { hasWhiteSpace, } from './utils'; import { shouldPrintSchemaName } from '../model_structure/utils'; +import { + isNumericType, + isStringType, + isBooleanType, + isDateTimeType, + isBinaryType, +} from '@dbml/parse'; // PostgreSQL built-in data types // Generated from PostgreSQLParser.g4 and PostgreSQLLexer.g4 @@ -138,6 +145,57 @@ const POSTGRES_RESERVED_KEYWORDS = [ ]; class PostgresExporter { + static exportRecords (model) { + const records = Object.values(model.records || {}); + if (_.isEmpty(records)) { + return []; + } + + const insertStatements = records.map((record) => { + const { schemaName, tableName, columns, values } = record; + + // Skip if no values + if (!values || values.length === 0) { + return null; + } + + // Build the table reference with schema if present + const tableRef = schemaName ? `"${schemaName}"."${tableName}"` : `"${tableName}"`; + + // Build the column list + const columnList = columns.length > 0 + ? `(${columns.map((col) => `"${col}"`).join(', ')})` + : ''; + + // Value formatter for PostgreSQL + const formatValue = (val) => { + if (!val || typeof val !== 'object') return String(val); + if (val.value === null) return 'NULL'; + if (val.type === 'expression') return val.value; + + if (isNumericType(val.type)) return val.value; + if (isBooleanType(val.type)) return val.value ? 'TRUE' : 'FALSE'; + if (isStringType(val.type) || isDateTimeType(val.type) || isBinaryType(val.type)) return `'${String(val.value).replace(/'/g, "''")}'`; + // Unknown type - use CAST + return `CAST('${String(val.value).replace(/'/g, "''")}' AS ${val.type})`; + }; + + // Build the VALUES clause + const valueRows = values.map((row) => { + // Check if row is actually an object (single value) or an array + const rowValues = Array.isArray(row) ? row : [row]; + const valueStrs = rowValues.map(formatValue); + return `(${valueStrs.join(', ')})`; + }); + + const valuesClause = valueRows.join(',\n '); + + return `INSERT INTO ${tableRef} ${columnList}\nVALUES\n ${valuesClause};`; + }).filter(Boolean); + + return insertStatements; + } + static exportEnums (enumIds, model) { return enumIds.map((enumId) => { const _enum = model.enums[enumId]; @@ -545,6 +603,20 @@ class PostgresExporter { return prevStatements; }, schemaEnumStatements); + // Export INSERT statements with deferred constraint checking + const insertStatements = PostgresExporter.exportRecords(model); + const recordsSection = !_.isEmpty(insertStatements) + ? [ + '-- Use deferred constraints for INSERT', + 'BEGIN;', + 'SET CONSTRAINTS ALL DEFERRED;', + '', + ...insertStatements, + '', + 'COMMIT;', + ] + : []; + const res = _.concat( statements.schemas, statements.enums, @@ -552,6 +624,7 @@ class PostgresExporter { statements.indexes, statements.comments, statements.refs, + recordsSection, ).join('\n'); return res; } diff --git a/packages/dbml-core/src/export/SqlServerExporter.js b/packages/dbml-core/src/export/SqlServerExporter.js index b274acc02..24861ddaa 100644 --- a/packages/dbml-core/src/export/SqlServerExporter.js +++ b/packages/dbml-core/src/export/SqlServerExporter.js @@ -5,8 +5,59 @@ import { buildJunctionFields2, buildNewTableName, } from './utils'; +import { + isNumericType, + isStringType, + isBooleanType, + isDateTimeType, + isBinaryType, +} from '@dbml/parse'; class SqlServerExporter { + static exportRecords (model) { + const records = Object.values(model.records || {}); + if (_.isEmpty(records)) { + return []; + } + + const insertStatements = records.map((record) => { + const { schemaName, tableName, columns, values } = record; + + // Build the table reference with schema if present + const tableRef = schemaName ? `[${schemaName}].[${tableName}]` : `[${tableName}]`; + + // Build the column list + const columnList = columns.length > 0 + ? `([${columns.join('], [')}])` + : ''; + + // Value formatter for SQL Server + const formatValue = (val) => { + if (val.value === null) return 'NULL'; + if (val.type === 'expression') return val.value; + + if (isNumericType(val.type)) return val.value; + if (isBooleanType(val.type)) return val.value.toString().toUpperCase() === 'TRUE' ? '1' : '0'; + if (isStringType(val.type) || isDateTimeType(val.type)) return `'${val.value.replace(/'/g, "''")}'`; + if (isBinaryType(val.type)) return `0x${val.value}`; // SQL Server binary as hex + // Unknown type - use CAST + return `CAST('${val.value.replace(/'/g, "''")}' AS ${val.type})`; + }; + + // Build the VALUES clause + const valueRows = values.map((row) => { + const valueStrs = row.map(formatValue); + return `(${valueStrs.join(', ')})`; + }); + + const valuesClause = valueRows.join(',\n '); + + return `INSERT INTO ${tableRef} ${columnList}\nVALUES\n ${valuesClause};\nGO`; + }); + + return insertStatements; + } + static getFieldLines (tableId, model) { const table = model.tables[tableId]; @@ -364,6 +415,23 @@ class SqlServerExporter { refs: [], }); + // Export INSERT statements + // Note: SQL Server does not support DEFERRED constraints, so constraint checks are disabled + const insertStatements = SqlServerExporter.exportRecords(model); + const recordsSection = !_.isEmpty(insertStatements) + ? [ + '-- Disable constraint checks for INSERT (SQL Server does not support DEFERRED)', + 'EXEC sp_MSforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT all";', + 'GO', + '', + ...insertStatements, + '', + '-- Re-enable constraint checks', + 'EXEC sp_MSforeachtable "ALTER TABLE ? WITH CHECK CHECK CONSTRAINT all";', + 'GO', + ] + : []; + const res = _.concat( statements.schemas, statements.enums, @@ -371,6 +439,7 @@ class SqlServerExporter { statements.indexes, statements.comments, statements.refs, + recordsSection, ).join('\n'); return res; } diff --git a/packages/dbml-core/src/export/index.js b/packages/dbml-core/src/export/index.js index 3687b8ccf..cae676044 100644 --- a/packages/dbml-core/src/export/index.js +++ b/packages/dbml-core/src/export/index.js @@ -1,5 +1,6 @@ import ModelExporter from './ModelExporter'; import Parser from '../parse/Parser'; +import { formatDbmlRecordValue } from './utils'; function _export (str, format) { const database = (new Parser()).parse(str, 'dbmlv2'); @@ -9,3 +10,5 @@ function _export (str, format) { export default { export: _export, }; + +export { formatDbmlRecordValue }; diff --git a/packages/dbml-core/src/export/utils.js b/packages/dbml-core/src/export/utils.js index eb385c314..39782316e 100644 --- a/packages/dbml-core/src/export/utils.js +++ b/packages/dbml-core/src/export/utils.js @@ -1,4 +1,13 @@ import { DEFAULT_SCHEMA_NAME } from '../model_structure/config'; +import { + isNumericType, + isBooleanType, + isDateTimeType, + tryExtractBoolean, + tryExtractNumeric, + tryExtractString, + tryExtractDateTime, +} from '@dbml/parse'; export function hasWhiteSpace (s) { return /\s/g.test(s); @@ -89,3 +98,58 @@ export function escapeObjectName (name, database) { return `${escapeSignature}${name}${escapeSignature}`; } + +export function formatDbmlRecordValue (recordValue) { + const { value, type } = recordValue; + + // Handle null/undefined values + if (value === null || value === undefined) { + return 'null'; + } + + // Handle expressions (backtick strings) + if (type === 'expression') { + return `\`${value}\``; + } + + // Try to extract typed values using tryExtract functions + // If extraction fails, fall back to function expression + + if (isBooleanType(type)) { + const extracted = tryExtractBoolean(value); + if (extracted !== null) { + return extracted ? 'true' : 'false'; + } + // If extraction failed, wrap in function expression + return `\`${value}\``; + } + + if (isNumericType(type)) { + const extracted = tryExtractNumeric(value); + if (extracted !== null) { + return String(extracted); + } + // If extraction failed, wrap in function expression + return `\`${value}\``; + } + + if (isDateTimeType(type)) { + const extracted = tryExtractDateTime(value); + if (extracted !== null) { + const quote = extracted.includes('\n') ? '\'\'\'' : '\''; + return `${quote}${extracted.replaceAll('\\', '\\\\').replaceAll("'", "\\'")}${quote}`; + } + // If extraction failed, wrap in function expression + return `\`${value}\``; + } + + // Default: string types and others + const extracted = tryExtractString(value); + if (extracted !== null) { + const quote = extracted.includes('\n') ? '\'\'\'' : '\''; + return `${quote}${extracted.replaceAll('\\', '\\\\').replaceAll("'", "\\'")}${quote}`; + } + + // If all extractions failed, wrap in function expression + return `\`${value}\``; +} diff --git a/packages/dbml-core/src/index.js b/packages/dbml-core/src/index.js index 4a7cc4342..b6eece70b 100644 --- a/packages/dbml-core/src/index.js +++ b/packages/dbml-core/src/index.js @@ -2,7 +2,7 @@ import ModelExporter from './export/ModelExporter'; import Parser from './parse/Parser'; import { CompilerError } from './parse/error'; import importer from './import'; -import exporter from './export'; +import exporter, { formatDbmlRecordValue } from './export'; import { renameTable } from './transform'; import { VERSION } from './utils/version'; @@ -14,4 +14,24 @@ export { CompilerError, Parser, VERSION, + formatDbmlRecordValue, }; + +// Re-export types and utilities from @dbml/parse +export { + SqlDialect, + isIntegerType, + isFloatType, + isNumericType, + isBooleanType, + isStringType, + isBinaryType, + isDateTimeType, + isSerialType, + tryExtractBoolean, + tryExtractNumeric, + tryExtractInteger, + tryExtractString, + tryExtractDateTime, + tryExtractEnum, +} from '@dbml/parse'; diff --git a/packages/dbml-core/src/parse/ANTLR/ASTGeneration/snowflake/SnowflakeASTGen.js b/packages/dbml-core/src/parse/ANTLR/ASTGeneration/snowflake/SnowflakeASTGen.js index dc93df10f..178eebf66 100644 --- a/packages/dbml-core/src/parse/ANTLR/ASTGeneration/snowflake/SnowflakeASTGen.js +++ b/packages/dbml-core/src/parse/ANTLR/ASTGeneration/snowflake/SnowflakeASTGen.js @@ -1,6 +1,6 @@ -import { isEmpty, flatten, get, values, add } from 'lodash'; +import { isEmpty, flatten, get, values, add, last, flattenDepth } from 'lodash'; import SnowflakeParserVisitor from '../../parsers/snowflake/SnowflakeParserVisitor'; -import { Endpoint, Enum, Field, Index, Table, Ref } from '../AST'; +import { Endpoint, Enum, Field, Index, Table, Ref, TableRecord } from '../AST'; import { TABLE_CONSTRAINT_KIND, COLUMN_CONSTRAINT_KIND, DATA_TYPE, CONSTRAINT_TYPE } from '../constants'; import { getOriginalText } from '../helpers'; @@ -19,6 +19,7 @@ export default class SnowflakeASTGen extends SnowflakeParserVisitor { tableGroups: [], aliases: [], project: {}, + records: [], }; } @@ -39,6 +40,8 @@ export default class SnowflakeASTGen extends SnowflakeParserVisitor { visitSql_command (ctx) { if (ctx.ddl_command()) { ctx.ddl_command().accept(this); + } else if (ctx.dml_command()) { + ctx.dml_command().accept(this); } } @@ -51,6 +54,20 @@ export default class SnowflakeASTGen extends SnowflakeParserVisitor { } } + // dml_command + // : query_statement + // | insert_statement + // | insert_multi_table_statement + // | update_statement + // | delete_statement + // | merge_statement + // ; + visitDml_command (ctx) { + if (ctx.insert_statement()) { + ctx.insert_statement().accept(this); + } + } + // check SnowflakeParser.g4 line 1442 visitCreate_command (ctx) { if (ctx.create_table()) { @@ -589,4 +606,44 @@ export default class SnowflakeASTGen extends SnowflakeParserVisitor { } return null; } + + // insert_statement + // : INSERT OVERWRITE? INTO object_name column_list_in_parentheses? ( + // values_builder + // | query_statement + // ) + // ; + visitInsert_statement (ctx) { + const [databaseName, schemaName, tableName] = ctx.object_name().accept(this); + const columns = ctx.column_list_in_parentheses() ? ctx.column_list_in_parentheses().accept(this) : []; + + // Only handle values_builder, not query_statement + const values = ctx.values_builder() ? ctx.values_builder().accept(this) : []; + + const record = new TableRecord({ + schemaName, + tableName, + columns, + values, + }); + + this.data.records.push(record); + } + + // values_builder + // : VALUES '(' expr_list ')' (COMMA '(' expr_list ')')? + // ; + visitValues_builder (ctx) { + return ctx.expr_list().map((exprList) => { + const rowValues = exprList.accept(this); + return flattenDepth(rowValues, 1); + }); + } + + // expr_list + // : expr (COMMA expr)* + // ; + visitExpr_list (ctx) { + return ctx.expr().map((expr) => expr.accept(this)); + } } diff --git a/packages/dbml-core/types/export/ModelExporter.d.ts b/packages/dbml-core/types/export/ModelExporter.d.ts index 7ba5f0811..ee30c6ea4 100644 --- a/packages/dbml-core/types/export/ModelExporter.d.ts +++ b/packages/dbml-core/types/export/ModelExporter.d.ts @@ -1,6 +1,7 @@ import Database, { NormalizedDatabase } from '../model_structure/database'; +import { SqlDialect } from '@dbml/parse'; -export declare type ExportFormatOption = 'dbml' | 'mysql' | 'postgres' | 'json' | 'mssql' | 'oracle'; +export declare type ExportFormatOption = SqlDialect | 'dbml' | 'json'; declare class ModelExporter { static export(model: Database | NormalizedDatabase, format: ExportFormatOption, isNormalized?: boolean): string; } diff --git a/packages/dbml-core/types/export/index.d.ts b/packages/dbml-core/types/export/index.d.ts index d866a1af9..733b7ac5e 100644 --- a/packages/dbml-core/types/export/index.d.ts +++ b/packages/dbml-core/types/export/index.d.ts @@ -1,4 +1,12 @@ import { ExportFormatOption } from './ModelExporter'; +import { RecordValueType } from '../model_structure/database'; + +export interface RecordValue { + value: any; + type: RecordValueType; +} + +export declare function formatDbmlRecordValue(recordValue: RecordValue): string; declare function _export(str: string, format: ExportFormatOption): string; declare const _default: { diff --git a/packages/dbml-core/types/import/index.d.ts b/packages/dbml-core/types/import/index.d.ts index cc4eb0683..0415d6737 100644 --- a/packages/dbml-core/types/import/index.d.ts +++ b/packages/dbml-core/types/import/index.d.ts @@ -1,4 +1,6 @@ -declare function _import(str: string, format: 'dbml' | 'mysql' | 'postgres' | 'json' | 'mssql' | 'postgresLegacy' | 'mssqlLegacy' | 'oracle'): string; +import { SqlDialect } from '@dbml/parse'; + +declare function _import(str: string, format: SqlDialect | 'dbml' | 'json' | 'postgresLegacy' | 'mssqlLegacy'): string; /** * @param {any} schemaJson diff --git a/packages/dbml-core/types/index.d.ts b/packages/dbml-core/types/index.d.ts index 2c9ba9853..897abe90c 100644 --- a/packages/dbml-core/types/index.d.ts +++ b/packages/dbml-core/types/index.d.ts @@ -5,3 +5,22 @@ import exporter from './export'; import { renameTable } from './transform'; export { renameTable, importer, exporter, ModelExporter, Parser }; export { CompilerDiagnostic, CompilerError as CompilerDiagnostics, EditorPosition, ErrorCode, WarningLevel, } from './parse/error'; +export { formatDbmlRecordValue, RecordValue } from './export'; +export { RecordValueType } from './model_structure/database'; +export { + SqlDialect, + isIntegerType, + isFloatType, + isNumericType, + isBooleanType, + isStringType, + isBinaryType, + isDateTimeType, + isSerialType, + tryExtractBoolean, + tryExtractNumeric, + tryExtractInteger, + tryExtractString, + tryExtractDateTime, + tryExtractEnum, +} from '@dbml/parse'; diff --git a/packages/dbml-core/types/model_structure/database.d.ts b/packages/dbml-core/types/model_structure/database.d.ts index b12ad4498..08eb34300 100644 --- a/packages/dbml-core/types/model_structure/database.d.ts +++ b/packages/dbml-core/types/model_structure/database.d.ts @@ -19,13 +19,15 @@ export interface Project { name: string; } +export type RecordValueType = 'string' | 'bool' | 'integer' | 'real' | 'date' | 'time' | 'datetime' | string; + interface RawTableRecord { schemaName: string | undefined; tableName: string; columns: string[]; values: { value: any; - type: string; + type: RecordValueType; }[][]; } diff --git a/packages/dbml-core/types/parse/Parser.d.ts b/packages/dbml-core/types/parse/Parser.d.ts index e98d505f1..752946126 100644 --- a/packages/dbml-core/types/parse/Parser.d.ts +++ b/packages/dbml-core/types/parse/Parser.d.ts @@ -1,14 +1,13 @@ -import { Compiler } from '@dbml/parse'; +import { Compiler, SqlDialect } from '@dbml/parse'; import Database, { RawDatabase } from '../model_structure/database'; -export declare type ParseFormat = 'json' - | 'mysql' | 'mysqlLegacy' - | 'postgres' | 'postgresLegacy' +export declare type ParseFormat = SqlDialect + | 'json' + | 'mysqlLegacy' + | 'postgresLegacy' | 'dbml' | 'dbmlv2' - | 'mssql' | 'mssqlLegacy' - | 'schemarb' - | 'snowflake' - | 'oracle'; + | 'mssqlLegacy' + | 'schemarb'; declare class Parser { public DBMLCompiler: Compiler; diff --git a/packages/dbml-parse/__tests__/examples/binder/binder.test.ts b/packages/dbml-parse/__tests__/examples/binder/binder.test.ts index 9fb7fde87..e98628344 100644 --- a/packages/dbml-parse/__tests__/examples/binder/binder.test.ts +++ b/packages/dbml-parse/__tests__/examples/binder/binder.test.ts @@ -1153,4 +1153,141 @@ describe('[example] binder', () => { expect(schemaSymbol.symbolTable.get('Table:users')).toBeInstanceOf(TableSymbol); }); }); + + describe('Records', () => { + test('should bind records to table and columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + `; + const result = analyze(source); + expect(result.getErrors()).toHaveLength(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const tableSymbol = schemaSymbol.symbolTable.get('Table:users') as TableSymbol; + + // Table and columns should have references from records + expect(tableSymbol.references.length).toBe(1); + expect(tableSymbol.references[0].referee).toBe(tableSymbol); + + const idColumn = tableSymbol.symbolTable.get('Column:id') as ColumnSymbol; + const nameColumn = tableSymbol.symbolTable.get('Column:name') as ColumnSymbol; + expect(idColumn.references.length).toBe(1); + expect(nameColumn.references.length).toBe(1); + }); + + test('should bind records with schema-qualified table', () => { + const source = ` + Table auth.users { + id int + email varchar + } + records auth.users(id, email) { + 1, "alice@example.com" + } + `; + const result = analyze(source); + expect(result.getErrors()).toHaveLength(0); + + const ast = result.getValue(); + const publicSchema = ast.symbol as SchemaSymbol; + const authSchema = publicSchema.symbolTable.get('Schema:auth') as SchemaSymbol; + const tableSymbol = authSchema.symbolTable.get('Table:users') as TableSymbol; + + expect(tableSymbol.references.length).toBe(1); + }); + + test('should detect unknown table in records', () => { + const source = ` + records nonexistent(id) { + 1 + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBeGreaterThan(0); + expect(errors[0].diagnostic).toContain('nonexistent'); + }); + + test('should detect unknown column in records', () => { + const source = ` + Table users { + id int + } + records users(id, nonexistent) { + 1, "value" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBeGreaterThan(0); + expect(errors[0].diagnostic).toContain('nonexistent'); + }); + + test('should bind multiple records for same table', () => { + const source = ` + Table users { + id int + name varchar + } + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 2, "Bob" + } + `; + const result = analyze(source); + expect(result.getErrors()).toHaveLength(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const tableSymbol = schemaSymbol.symbolTable.get('Table:users') as TableSymbol; + + // Table should have 2 references from both records elements + expect(tableSymbol.references.length).toBe(2); + }); + + test('should bind records with enum column type', () => { + const source = ` + Enum status { active\n inactive } + Table users { + id int + status status + } + records users(id, status) { + 1, status.active + } + `; + const result = analyze(source); + expect(result.getErrors()).toHaveLength(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const enumSymbol = schemaSymbol.symbolTable.get('Enum:status') as EnumSymbol; + const activeField = enumSymbol.symbolTable.get('Enum field:active') as EnumFieldSymbol; + + // Enum field should have reference from records value + expect(activeField.references.length).toBeGreaterThan(0); + }); + + test('should allow forward reference to table in records', () => { + const source = ` + records users(id, name) { + 1, "Alice" + } + Table users { + id int + name varchar + } + `; + const result = analyze(source); + expect(result.getErrors()).toHaveLength(0); + }); + }); }); diff --git a/packages/dbml-parse/__tests__/examples/binder/records.test.ts b/packages/dbml-parse/__tests__/examples/binder/records.test.ts new file mode 100644 index 000000000..3e109a538 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/binder/records.test.ts @@ -0,0 +1,305 @@ +import { describe, expect, test } from 'vitest'; +import { TableSymbol, EnumSymbol, ColumnSymbol, EnumFieldSymbol, SchemaSymbol } from '@/core/analyzer/symbol/symbols'; +import { analyze } from '@tests/utils'; + +describe('[example] records binder', () => { + test('should bind records to table and columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + `; + const result = analyze(source); + expect(result.getErrors().length).toBe(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const tableSymbol = schemaSymbol.symbolTable.get('Table:users') as TableSymbol; + + // Table should have exactly 1 reference from records + expect(tableSymbol.references.length).toBe(1); + expect(tableSymbol.references[0].referee).toBe(tableSymbol); + + const idColumn = tableSymbol.symbolTable.get('Column:id') as ColumnSymbol; + const nameColumn = tableSymbol.symbolTable.get('Column:name') as ColumnSymbol; + + // Each column should have exactly 1 reference from records column list + expect(idColumn.references.length).toBe(1); + expect(idColumn.references[0].referee).toBe(idColumn); + + expect(nameColumn.references.length).toBe(1); + expect(nameColumn.references[0].referee).toBe(nameColumn); + }); + + test('should bind records with schema-qualified table', () => { + const source = ` + Table auth.users { + id int + email varchar + } + records auth.users(id, email) { + 1, "alice@example.com" + } + `; + const result = analyze(source); + expect(result.getErrors().length).toBe(0); + + const ast = result.getValue(); + const publicSchema = ast.symbol as SchemaSymbol; + const authSchema = publicSchema.symbolTable.get('Schema:auth') as SchemaSymbol; + const tableSymbol = authSchema.symbolTable.get('Table:users') as TableSymbol; + + // Schema should have reference from records + expect(authSchema.references.length).toBe(1); + expect(authSchema.references[0].referee).toBe(authSchema); + + // Table should have exactly 1 reference from records + expect(tableSymbol.references.length).toBe(1); + expect(tableSymbol.references[0].referee).toBe(tableSymbol); + + // Columns should have references + const idColumn = tableSymbol.symbolTable.get('Column:id') as ColumnSymbol; + const emailColumn = tableSymbol.symbolTable.get('Column:email') as ColumnSymbol; + + expect(idColumn.references.length).toBe(1); + + expect(emailColumn.references.length).toBe(1); + }); + + test('should detect unknown table in records', () => { + const source = ` + records nonexistent(id) { + 1 + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(1); + expect(errors[0].diagnostic).toBe("Table 'nonexistent' does not exist in Schema 'public'"); + }); + + test('should detect unknown column in records', () => { + const source = ` + Table users { + id int + } + records users(id, nonexistent) { + 1, "value" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(1); + expect(errors[0].diagnostic).toBe("Column 'nonexistent' does not exist in Table 'users'"); + }); + + test('should bind multiple records for same table', () => { + const source = ` + Table users { + id int + name varchar + } + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 2, "Bob" + } + `; + const result = analyze(source); + expect(result.getErrors().length).toBe(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const tableSymbol = schemaSymbol.symbolTable.get('Table:users') as TableSymbol; + + // Table should have exactly 2 references from both records elements + expect(tableSymbol.references.length).toBe(2); + + // Each column should have exactly 2 references + const idColumn = tableSymbol.symbolTable.get('Column:id') as ColumnSymbol; + const nameColumn = tableSymbol.symbolTable.get('Column:name') as ColumnSymbol; + + expect(idColumn.references.length).toBe(2); + + expect(nameColumn.references.length).toBe(2); + }); + + test('should bind records with enum column type', () => { + const source = ` + Enum status { active\n inactive } + Table users { + id int + status status + } + records users(id, status) { + 1, status.active + } + `; + const result = analyze(source); + expect(result.getErrors().length).toBe(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const enumSymbol = schemaSymbol.symbolTable.get('Enum:status') as EnumSymbol; + const activeField = enumSymbol.symbolTable.get('Enum field:active') as EnumFieldSymbol; + + // Enum should have 2 references: 1 from column type, 1 from records data + expect(enumSymbol.references.length).toBe(2); + + // Enum field should have exactly 1 reference from records value + expect(activeField.references.length).toBe(1); + expect(activeField.references[0].referee).toBe(activeField); + }); + + test('should allow forward reference to table in records', () => { + const source = ` + records users(id, name) { + 1, "Alice" + } + Table users { + id int + name varchar + } + `; + const result = analyze(source); + expect(result.getErrors().length).toBe(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const tableSymbol = schemaSymbol.symbolTable.get('Table:users') as TableSymbol; + + // Verify forward reference is properly bound + expect(tableSymbol.references.length).toBe(1); + + const idColumn = tableSymbol.symbolTable.get('Column:id') as ColumnSymbol; + const nameColumn = tableSymbol.symbolTable.get('Column:name') as ColumnSymbol; + + expect(idColumn.references.length).toBe(1); + expect(nameColumn.references.length).toBe(1); + }); + + test('should bind schema-qualified enum values in records', () => { + const source = ` + Enum auth.role { admin\n user\n guest } + Table auth.users { + id int + role auth.role + } + records auth.users(id, role) { + 1, auth.role.admin + 2, auth.role.user + } + `; + const result = analyze(source); + expect(result.getErrors().length).toBe(0); + + const ast = result.getValue(); + const publicSchema = ast.symbol as SchemaSymbol; + const authSchema = publicSchema.symbolTable.get('Schema:auth') as SchemaSymbol; + const enumSymbol = authSchema.symbolTable.get('Enum:role') as EnumSymbol; + + // Enum should have 3 references: 1 from column type, 2 from records data + expect(enumSymbol.references.length).toBe(3); + + const adminField = enumSymbol.symbolTable.get('Enum field:admin') as EnumFieldSymbol; + const userField = enumSymbol.symbolTable.get('Enum field:user') as EnumFieldSymbol; + + expect(adminField.references.length).toBe(1); + expect(adminField.references[0].referee).toBe(adminField); + + expect(userField.references.length).toBe(1); + expect(userField.references[0].referee).toBe(userField); + }); + + test('should detect unknown enum in records data', () => { + const source = ` + Table users { + id int + status varchar + } + records users(id, status) { + 1, unknown_enum.value + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(1); + expect(errors[0].diagnostic).toBe("Enum 'unknown_enum' does not exist in Schema 'public'"); + }); + + test('should detect unknown enum field in records data', () => { + const source = ` + Enum status { active\n inactive } + Table users { + id int + status status + } + records users(id, status) { + 1, status.unknown_field + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(1); + expect(errors[0].diagnostic).toBe("Enum field 'unknown_field' does not exist in Enum 'status'"); + }); + + test('should bind multiple enum field references in same records', () => { + const source = ` + Enum status { pending\n active\n completed } + Table tasks { + id int + status status + } + records tasks(id, status) { + 1, status.pending + 2, status.active + 3, status.completed + 4, status.pending + } + `; + const result = analyze(source); + expect(result.getErrors().length).toBe(0); + + const ast = result.getValue(); + const schemaSymbol = ast.symbol as SchemaSymbol; + const enumSymbol = schemaSymbol.symbolTable.get('Enum:status') as EnumSymbol; + + const pendingField = enumSymbol.symbolTable.get('Enum field:pending') as EnumFieldSymbol; + const activeField = enumSymbol.symbolTable.get('Enum field:active') as EnumFieldSymbol; + const completedField = enumSymbol.symbolTable.get('Enum field:completed') as EnumFieldSymbol; + + // pending is referenced twice + expect(pendingField.references.length).toBe(2); + + // active is referenced once + expect(activeField.references.length).toBe(1); + + // completed is referenced once + expect(completedField.references.length).toBe(1); + }); + + test('should error when there are duplicate columns in top-level records', () => { + const source = ` + Table tasks { + id int + status status + } + records tasks(id, id, "id") { + 1, 10 + 2, 20 + 3, 30 + 4, 40 + } + `; + const result = analyze(source); + const errors = result.getErrors(); + expect(errors.length).toBe(4); + expect(errors[0].message).toBe('Column \'id\' is referenced more than once in a Records for Table \'tasks\''); + expect(errors[1].message).toBe('Column \'id\' is referenced more than once in a Records for Table \'tasks\''); + expect(errors[2].message).toBe('Column \'id\' is referenced more than once in a Records for Table \'tasks\''); + expect(errors[3].message).toBe('Column \'id\' is referenced more than once in a Records for Table \'tasks\''); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/appendRecords.test.ts b/packages/dbml-parse/__tests__/examples/compiler/appendRecords.test.ts new file mode 100644 index 000000000..7c8cfa34f --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/appendRecords.test.ts @@ -0,0 +1,503 @@ +import Compiler from '@/compiler/index'; + +describe('[example] appendRecords', () => { + describe('basic functionality', () => { + test('should append new records block to empty source', () => { + const input = ` +Table users { + id int [pk] + name varchar +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['id', 'name'], + [ + [{ value: 1, type: 'integer' }, { value: 'Alice', type: 'string' }], + [{ value: 2, type: 'integer' }, { value: 'Bob', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + " + `); + }); + + test('should handle schema-qualified table names', () => { + const input = ` +Table auth.users { + id int [pk] + email varchar +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'auth.users', + ['id', 'email'], + [ + [{ value: 1, type: 'integer' }, { value: 'alice@example.com', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table auth.users { + id int [pk] + email varchar + } + + records auth.users(id, email) { + 1, 'alice@example.com' + } + " + `); + }); + + test('should handle object-style table name input', () => { + const input = ` +Table users { + id int [pk] +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + { table: 'users' }, + ['id'], + [ + [{ value: 1, type: 'integer' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + } + + records users(id) { + 1 + } + " + `); + }); + + test('should handle object-style with schema', () => { + const input = ` +Table auth.users { + id int [pk] +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + { schema: 'auth', table: 'users' }, + ['id'], + [ + [{ value: 1, type: 'integer' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table auth.users { + id int [pk] + } + + records auth.users(id) { + 1 + } + " + `); + }); + }); + + describe('merging into existing records', () => { + test('should merge into last records block with matching columns', () => { + const input = ` +Table users { + id int [pk] + name varchar + email varchar +} + +records users(id, name, email) { + 1, 'Alice', 'alice@example.com' + 2, 'Bob', 'bob@example.com' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['id', 'name'], + [ + [{ value: 3, type: 'integer' }, { value: 'Charlie', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + email varchar + } + + records users(id, name, email) { + 1, 'Alice', 'alice@example.com' + 2, 'Bob', 'bob@example.com' + + 3, 'Charlie', null + } + " + `); + }); + + test('should fill missing columns with null when merging', () => { + const input = ` +Table users { + id int [pk] + name varchar + email varchar + age int +} + +records users(id, name, email, age) { + 1, 'Alice', 'alice@example.com', 30 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['id', 'name'], + [ + [{ value: 2, type: 'integer' }, { value: 'Bob', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + email varchar + age int + } + + records users(id, name, email, age) { + 1, 'Alice', 'alice@example.com', 30 + + 2, 'Bob', null, null + } + " + `); + }); + + test('should create new block if last records missing target columns', () => { + const input = ` +Table users { + id int [pk] + name varchar + email varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['id', 'email'], + [ + [{ value: 3, type: 'integer' }, { value: 'charlie@example.com', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + email varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + + records users(id, email) { + 3, 'charlie@example.com' + } + " + `); + }); + + test('should not merge into records block without body', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['id', 'name'], + [ + [{ value: 1, type: 'integer' }, { value: 'Alice', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + + records users(id, name) + + records users(id, name) { + 1, 'Alice' + } + " + `); + }); + + test('should only check last records block for merging', () => { + const input = ` +Table users { + id int [pk] + name varchar + email varchar +} + +records users(id, name, email) { + 1, 'Alice', 'alice@example.com' +} + +records users(id, name) { + 2, 'Bob' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['id', 'name'], + [ + [{ value: 3, type: 'integer' }, { value: 'Charlie', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + email varchar + } + + records users(id, name, email) { + 1, 'Alice', 'alice@example.com' + } + + records users(id, name) { + 2, 'Bob' + + 3, 'Charlie' + } + " + `); + }); + }); + + describe('data type formatting', () => { + test('should format integer values', () => { + const input = 'Table users { id int }'; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['id'], + [ + [{ value: 1, type: 'integer' }], + [{ value: -42, type: 'integer' }], + [{ value: 0, type: 'integer' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + "Table users { id int } + records users(id) { + 1 + -42 + 0 + } + " + `); + }); + + test('should format boolean values', () => { + const input = 'Table users { active bool }'; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['active'], + [ + [{ value: true, type: 'bool' }], + [{ value: false, type: 'bool' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + "Table users { active bool } + records users(active) { + true + false + } + " + `); + }); + + test('should format string values with single quotes', () => { + const input = 'Table users { name varchar }'; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['name'], + [ + [{ value: 'Alice', type: 'string' }], + [{ value: 'Bob Smith', type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + "Table users { name varchar } + records users(name) { + 'Alice' + 'Bob Smith' + } + " + `); + }); + + test('should format null values', () => { + const input = 'Table users { email varchar }'; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['email'], + [ + [{ value: null, type: 'string' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + "Table users { email varchar } + records users(email) { + null + } + " + `); + }); + + test('should format datetime values', () => { + const input = 'Table events { created_at timestamp }'; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'events', + ['created_at'], + [ + [{ value: '2024-01-15 10:30:00', type: 'timestamp' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + "Table events { created_at timestamp } + records events(created_at) { + '2024-01-15 10:30:00' + } + " + `); + }); + + test('should format expression values with backticks', () => { + const input = 'Table users { created_at timestamp }'; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords( + 'users', + ['created_at'], + [ + [{ value: 'now()', type: 'expression' }], + ], + ); + + expect(result).toMatchInlineSnapshot(` + "Table users { created_at timestamp } + records users(created_at) { + \`now()\` + } + " + `); + }); + }); + + describe('error handling', () => { + test('should throw error when columns array is empty', () => { + const compiler = new Compiler(); + compiler.setSource('Table users { id int }'); + + expect(() => { + compiler.appendRecords('users', [], []); + }).toThrow('Columns must not be empty'); + }); + + test('should return unchanged source when values array is empty', () => { + const input = 'Table users { id int }'; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.appendRecords('users', ['id'], []); + + expect(result).toBe(input); + }); + + test('should throw error when row has mismatched column count', () => { + const compiler = new Compiler(); + compiler.setSource('Table users { id int, name varchar }'); + + expect(() => { + compiler.appendRecords('users', ['id', 'name'], [ + [{ value: 1, type: 'integer' }], // Only 1 value but 2 columns + ]); + }).toThrow('Data record entry does not have the same columns'); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/deleteRecordRow.test.ts b/packages/dbml-parse/__tests__/examples/compiler/deleteRecordRow.test.ts new file mode 100644 index 000000000..5dd8b595c --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/deleteRecordRow.test.ts @@ -0,0 +1,263 @@ +import Compiler from '@/compiler/index'; + +describe('[example] deleteRecordRow', () => { + describe('basic deletion', () => { + test('should delete first row by index', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' + 3, 'Charlie' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 0); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + + records users(id, name) { + 2, 'Bob' + 3, 'Charlie' + } + " + `); + }); + + test('should delete middle row by index', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' + 3, 'Charlie' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 1); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + + records users(id, name) { + 1, 'Alice' + 3, 'Charlie' + } + " + `); + }); + + test('should delete last row by index', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' + 3, 'Charlie' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 2); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + " + `); + }); + }); + + describe('multiple Records blocks', () => { + test('should count rows across multiple blocks', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' +} + +records users(id, name) { + 3, 'Charlie' + 4, 'David' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 2); // First row of second block + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + + records users(id, name) { + 4, 'David' + } + " + `); + }); + + test('should delete from correct block based on cumulative index', () => { + const input = ` +Table users { + id int +} + +records users(id) { + 1 +} + +records users(id) { + 2 + 3 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 1); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + } + + records users(id) { + 1 + } + + records users(id) { + 3 + } + " + `); + }); + }); + + describe('edge cases', () => { + test('should return unchanged source when index out of range', () => { + const input = ` +Table users { + id int +} + +records users(id) { + 1 + 2 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 10); + + expect(result).toBe(input); + }); + + test('should return unchanged source when no Records exist', () => { + const input = ` +Table users { + id int [pk] +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 0); + + expect(result).toBe(input); + }); + + test('should handle schema-qualified table names', () => { + const input = ` +Table auth.users { + id int +} + +records auth.users(id) { + 1 + 2 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('auth.users', 0); + + expect(result).toMatchInlineSnapshot(` + " + Table auth.users { + id int + } + + records auth.users(id) { + 2 + } + " + `); + }); + + test('should delete only row leaving empty block', () => { + const input = ` +Table users { + id int +} + +records users(id) { + 1 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordRow('users', 0); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + } + " + `); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/deleteRecordValue.test.ts b/packages/dbml-parse/__tests__/examples/compiler/deleteRecordValue.test.ts new file mode 100644 index 000000000..d6a236784 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/deleteRecordValue.test.ts @@ -0,0 +1,260 @@ +import Compiler from '@/compiler/index'; + +describe('[example] deleteRecordValue', () => { + describe('basic deletion', () => { + test('should set value to null at specified row and column', () => { + const input = ` +Table users { + id int [pk] + name varchar + email varchar +} + +records users(id, name, email) { + 1, 'Alice', 'alice@example.com' + 2, 'Bob', 'bob@example.com' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 0, 'email'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + email varchar + } + + records users(id, name, email) { + 1, 'Alice', null + 2, 'Bob', 'bob@example.com' + } + " + `); + }); + + test('should delete value in middle column', () => { + const input = ` +Table users { + id int + name varchar + email varchar +} + +records users(id, name, email) { + 1, 'Alice', 'alice@example.com' + 2, 'Bob', 'bob@example.com' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 1, 'name'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + name varchar + email varchar + } + + records users(id, name, email) { + 1, 'Alice', 'alice@example.com' + 2, null, 'bob@example.com' + } + " + `); + }); + + test('should delete value in first column', () => { + const input = ` +Table users { + id int + name varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 1, 'id'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + name varchar + } + + records users(id, name) { + 1, 'Alice' + null, 'Bob' + } + " + `); + }); + }); + + describe('multiple Records blocks', () => { + test('should count rows across blocks for correct deletion', () => { + const input = ` +Table users { + id int + name varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' +} + +records users(id, name) { + 3, 'Charlie' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 2, 'name'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + name varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + + records users(id, name) { + 3, null + } + " + `); + }); + + test('should only affect specified block when deleting', () => { + const input = ` +Table users { + id int + name varchar +} + +records users(id, name) { + 1, 'Alice' +} + +records users(id, name) { + 2, 'Bob' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 0, 'name'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + name varchar + } + + records users(id, name) { + 1, null + } + + records users(id, name) { + 2, 'Bob' + } + " + `); + }); + }); + + describe('edge cases', () => { + test('should return unchanged source when row index out of range', () => { + const input = ` +Table users { + id int + name varchar +} + +records users(id, name) { + 1, 'Alice' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 10, 'name'); + + expect(result).toBe(input); + }); + + test('should return unchanged source when column not found', () => { + const input = ` +Table users { + id int + name varchar +} + +records users(id, name) { + 1, 'Alice' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 0, 'nonexistent'); + + expect(result).toBe(input); + }); + + test('should return unchanged source when no Records exist', () => { + const input = ` +Table users { + id int +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('users', 0, 'id'); + + expect(result).toBe(input); + }); + + test('should handle schema-qualified table names', () => { + const input = ` +Table auth.users { + id int + email varchar +} + +records auth.users(id, email) { + 1, 'alice@example.com' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.deleteRecordValue('auth.users', 0, 'email'); + + expect(result).toMatchInlineSnapshot(` + " + Table auth.users { + id int + email varchar + } + + records auth.users(id, email) { + 1, null + } + " + `); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/identifierUtils.test.ts b/packages/dbml-parse/__tests__/examples/compiler/identifierUtils.test.ts new file mode 100644 index 000000000..685c8db11 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/identifierUtils.test.ts @@ -0,0 +1,87 @@ +import { isValidIdentifier, addDoubleQuoteIfNeeded } from '@/compiler/index'; + +describe('isValidIdentifier', () => { + test('should return true for simple alphanumeric identifier', () => { + expect(isValidIdentifier('users')).toBe(true); + expect(isValidIdentifier('User')).toBe(true); + expect(isValidIdentifier('TABLE123')).toBe(true); + }); + + test('should return true for identifier with underscores', () => { + expect(isValidIdentifier('user_name')).toBe(true); + expect(isValidIdentifier('_private')).toBe(true); + expect(isValidIdentifier('__internal__')).toBe(true); + expect(isValidIdentifier('my_table_123')).toBe(true); + }); + + test('should return false for identifier starting with digit', () => { + expect(isValidIdentifier('123users')).toBe(false); + expect(isValidIdentifier('1table')).toBe(false); + expect(isValidIdentifier('9_column')).toBe(false); + }); + + test('should return false for identifier with spaces', () => { + expect(isValidIdentifier('user name')).toBe(false); + expect(isValidIdentifier('my table')).toBe(false); + expect(isValidIdentifier(' users')).toBe(false); + expect(isValidIdentifier('users ')).toBe(false); + }); + + test('should return false for identifier with special characters', () => { + expect(isValidIdentifier('user-name')).toBe(false); + expect(isValidIdentifier('user.name')).toBe(false); + expect(isValidIdentifier('user@domain')).toBe(false); + expect(isValidIdentifier('user$var')).toBe(false); + expect(isValidIdentifier('user#tag')).toBe(false); + }); + + test('should return false for empty string', () => { + expect(isValidIdentifier('')).toBe(false); + }); + + test('should return false for identifier with unicode characters that do not fall into the whitespace category', () => { + expect(isValidIdentifier('user_名前')).toBe(true); + expect(isValidIdentifier('таблица')).toBe(true); + expect(isValidIdentifier('用户')).toBe(true); + }); +}); + +describe('addDoubleQuoteIfNeeded', () => { + test('should not add quotes to valid identifiers', () => { + expect(addDoubleQuoteIfNeeded('users')).toBe('users'); + expect(addDoubleQuoteIfNeeded('user_name')).toBe('user_name'); + expect(addDoubleQuoteIfNeeded('_private')).toBe('_private'); + expect(addDoubleQuoteIfNeeded('TABLE123')).toBe('TABLE123'); + }); + + test('should add quotes to identifier with spaces', () => { + expect(addDoubleQuoteIfNeeded('user name')).toBe('"user name"'); + expect(addDoubleQuoteIfNeeded('my table')).toBe('"my table"'); + expect(addDoubleQuoteIfNeeded(' users')).toBe('" users"'); + }); + + test('should add quotes to identifier starting with digit', () => { + expect(addDoubleQuoteIfNeeded('123users')).toBe('"123users"'); + expect(addDoubleQuoteIfNeeded('1table')).toBe('"1table"'); + }); + + test('should add quotes to identifier with special characters', () => { + expect(addDoubleQuoteIfNeeded('user-name')).toBe('"user-name"'); + expect(addDoubleQuoteIfNeeded('user.name')).toBe('"user.name"'); + expect(addDoubleQuoteIfNeeded('user@domain')).toBe('"user@domain"'); + }); + + test('should add quotes to empty string', () => { + expect(addDoubleQuoteIfNeeded('')).toBe('""'); + }); + + test('should not add quotes to identifier with unicode characters that do not fall into the whitespace category', () => { + expect(addDoubleQuoteIfNeeded('user_名前')).toBe('user_名前'); + expect(addDoubleQuoteIfNeeded('таблица')).toBe('таблица'); + }); + + test('should handle identifiers that already need quotes for other reasons', () => { + expect(addDoubleQuoteIfNeeded('table-123')).toBe('"table-123"'); + expect(addDoubleQuoteIfNeeded('my.schema.table')).toBe('"my.schema.table"'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/removeAllRecords.test.ts b/packages/dbml-parse/__tests__/examples/compiler/removeAllRecords.test.ts new file mode 100644 index 000000000..25d276c03 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/removeAllRecords.test.ts @@ -0,0 +1,302 @@ +import Compiler from '@/compiler/index'; + +describe('[example] removeAllRecords', () => { + describe('basic removal', () => { + test('should remove single Records block', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + " + `); + }); + + test('should remove all Records blocks for a table', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) { + 1, 'Alice' +} + +records users(id, name) { + 2, 'Bob' +} + +records users(id, name) { + 3, 'Charlie' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + " + `); + }); + + test('should remove Records without body', () => { + const input = ` +Table users { + id int +} + +records users(id) + +records users(id) { + 1 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + } + " + `); + }); + }); + + describe('selective removal', () => { + test('should only remove Records for specified table', () => { + const input = ` +Table users { + id int +} + +Table posts { + id int +} + +records users(id) { + 1 +} + +records posts(id) { + 100 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + } + + Table posts { + id int + } + + records posts(id) { + 100 + } + " + `); + }); + + test('should handle schema-qualified tables separately', () => { + const input = ` +Table users { + id int +} + +Table auth.users { + id int +} + +records users(id) { + 1 +} + +records auth.users(id) { + 2 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + } + + Table auth.users { + id int + } + + records auth.users(id) { + 2 + } + " + `); + }); + }); + + describe('edge cases', () => { + test('should return unchanged source when no Records exist', () => { + const input = ` +Table users { + id int [pk] + name varchar +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toBe(input); + }); + + test('should handle schema-qualified table names', () => { + const input = ` +Table auth.users { + id int +} + +records auth.users(id) { + 1 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('auth.users'); + + expect(result).toMatchInlineSnapshot(` + " + Table auth.users { + id int + } + " + `); + }); + + test('should clean up extra blank lines', () => { + const input = ` +Table users { + id int +} + +records users(id) { + 1 +} + + +records users(id) { + 2 +} + + +Table posts { + id int +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + } + + + Table posts { + id int + } + " + `); + }); + + test('should handle object-style table name input', () => { + const input = ` +Table auth.users { + id int +} + +records auth.users(id) { + 1 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords({ schema: 'auth', table: 'users' }); + + expect(result).toMatchInlineSnapshot(` + " + Table auth.users { + id int + } + " + `); + }); + + test('should preserve other elements when removing Records', () => { + const input = ` +Table users { + id int + indexes { + id [pk] + } +} + +records users(id) { + 1 +} + +Ref: posts.user_id > users.id +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.removeAllRecords('users'); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + indexes { + id [pk] + } + } + + Ref: posts.user_id > users.id + " + `); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/splitQualifiedIdentifier.test.ts b/packages/dbml-parse/__tests__/examples/compiler/splitQualifiedIdentifier.test.ts new file mode 100644 index 000000000..0e09e990d --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/splitQualifiedIdentifier.test.ts @@ -0,0 +1,58 @@ +import { splitQualifiedIdentifier } from '@/compiler/queries/utils'; + +describe('splitQualifiedIdentifier', () => { + it('should split simple unquoted identifiers', () => { + expect(splitQualifiedIdentifier('schema')).toEqual(['schema']); + expect(splitQualifiedIdentifier('schema.table')).toEqual(['schema', 'table']); + expect(splitQualifiedIdentifier('schema.table.column')).toEqual(['schema', 'table', 'column']); + }); + + it('should split quoted identifiers and remove quotes', () => { + expect(splitQualifiedIdentifier('"schema"')).toEqual(['schema']); + expect(splitQualifiedIdentifier('"schema name"')).toEqual(['schema name']); + expect(splitQualifiedIdentifier('"schema"."table"')).toEqual(['schema', 'table']); + }); + + it('should handle quoted identifiers with dots inside', () => { + expect(splitQualifiedIdentifier('"schema.with.dots"')).toEqual(['schema.with.dots']); + expect(splitQualifiedIdentifier('"schema.with.dots".table')).toEqual(['schema.with.dots', 'table']); + expect(splitQualifiedIdentifier('"schema.with.dots"."table.with.dots"')).toEqual(['schema.with.dots', 'table.with.dots']); + expect(splitQualifiedIdentifier('"schema.with.dots"."table.with.dots".column')).toEqual(['schema.with.dots', 'table.with.dots', 'column']); + }); + + it('should handle mixed quoted and unquoted identifiers', () => { + expect(splitQualifiedIdentifier('schema."table name"')).toEqual(['schema', 'table name']); + expect(splitQualifiedIdentifier('"schema name".table')).toEqual(['schema name', 'table']); + expect(splitQualifiedIdentifier('schema."table name"."column name"')).toEqual(['schema', 'table name', 'column name']); + expect(splitQualifiedIdentifier('"schema name".table.column')).toEqual(['schema name', 'table', 'column']); + }); + + it('should handle identifiers with whitespace around dots', () => { + expect(splitQualifiedIdentifier('schema . table')).toEqual(['schema', 'table']); + expect(splitQualifiedIdentifier('"schema name" . table')).toEqual(['schema name', 'table']); + expect(splitQualifiedIdentifier('schema . "table name" . column')).toEqual(['schema', 'table name', 'column']); + }); + + it('should handle leading and trailing whitespace', () => { + expect(splitQualifiedIdentifier(' schema.table ')).toEqual(['schema', 'table']); + expect(splitQualifiedIdentifier(' "schema name".table ')).toEqual(['schema name', 'table']); + }); + + it('should preserve spaces in unquoted identifiers', () => { + expect(splitQualifiedIdentifier('app users')).toEqual(['app users']); + expect(splitQualifiedIdentifier('my schema.my table')).toEqual(['my schema', 'my table']); + }); + + it('should handle empty string', () => { + expect(splitQualifiedIdentifier('')).toEqual([]); + }); + + it('should handle single quoted component', () => { + expect(splitQualifiedIdentifier('"single component"')).toEqual(['single component']); + }); + + it('should handle escaped quotes within quoted identifiers', () => { + expect(splitQualifiedIdentifier('"schema\\"name"')).toEqual(['schema"name']); + expect(splitQualifiedIdentifier('"schema\\"name".table')).toEqual(['schema"name', 'table']); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/stringUtils.test.ts b/packages/dbml-parse/__tests__/examples/compiler/stringUtils.test.ts new file mode 100644 index 000000000..5192f61ef --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/stringUtils.test.ts @@ -0,0 +1,80 @@ +import { unescapeString, escapeString } from '@/compiler/queries/utils'; + +describe('unescapeString', () => { + it('should handle escaped quotes', () => { + expect(unescapeString('table\\"name')).toBe('table"name'); + expect(unescapeString("table\\'name")).toBe("table'name"); + }); + + it('should handle common escape sequences', () => { + expect(unescapeString('line1\\nline2')).toBe('line1\nline2'); + expect(unescapeString('tab\\there')).toBe('tab\there'); + expect(unescapeString('carriage\\rreturn')).toBe('carriage\rreturn'); + expect(unescapeString('back\\\\slash')).toBe('back\\slash'); + }); + + it('should handle unicode escape sequences', () => { + expect(unescapeString('\\u0041')).toBe('A'); + expect(unescapeString('\\u0041BC')).toBe('ABC'); + expect(unescapeString('Hello\\u0020World')).toBe('Hello World'); + expect(unescapeString('\\u03B1\\u03B2\\u03B3')).toBe('αβγ'); + }); + + it('should handle invalid unicode sequences as regular escapes', () => { + expect(unescapeString('\\u')).toBe('u'); + expect(unescapeString('\\u1')).toBe('u1'); + expect(unescapeString('\\u12')).toBe('u12'); + expect(unescapeString('\\u123')).toBe('u123'); + expect(unescapeString('\\uGGGG')).toBe('uGGGG'); + }); + + it('should handle arbitrary escape sequences', () => { + expect(unescapeString('\\x')).toBe('x'); + expect(unescapeString('\\a')).toBe('a'); + expect(unescapeString('\\z')).toBe('z'); + }); + + it('should handle mixed content', () => { + expect(unescapeString('table\\"name\\nwith\\ttab')).toBe('table"name\nwith\ttab'); + expect(unescapeString('\\u0041\\nB\\tC')).toBe('A\nB\tC'); + }); + + it('should handle empty string', () => { + expect(unescapeString('')).toBe(''); + }); + + it('should handle string without escapes', () => { + expect(unescapeString('plain text')).toBe('plain text'); + }); +}); + +describe('escapeString', () => { + it('should escape quotes', () => { + expect(escapeString('table"name')).toBe('table\\"name'); + expect(escapeString("table'name")).toBe("table\\'name"); + }); + + it('should escape special characters', () => { + expect(escapeString('line1\nline2')).toBe('line1\\nline2'); + expect(escapeString('tab\there')).toBe('tab\\there'); + expect(escapeString('carriage\rreturn')).toBe('carriage\\rreturn'); + expect(escapeString('back\\slash')).toBe('back\\\\slash'); + }); + + it('should handle mixed content', () => { + expect(escapeString('table"name\nwith\ttab')).toBe('table\\"name\\nwith\\ttab'); + }); + + it('should handle empty string', () => { + expect(escapeString('')).toBe(''); + }); + + it('should handle string without special chars', () => { + expect(escapeString('plain text')).toBe('plain text'); + }); + + it('should roundtrip with unescapeString', () => { + const original = 'table"name\nwith\ttab'; + expect(unescapeString(escapeString(original))).toBe(original); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/compiler/updateRecordField.test.ts b/packages/dbml-parse/__tests__/examples/compiler/updateRecordField.test.ts new file mode 100644 index 000000000..94c99f93b --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/compiler/updateRecordField.test.ts @@ -0,0 +1,237 @@ +import Compiler from '@/compiler/index'; + +describe('[example] updateRecordField', () => { + describe('updating existing field', () => { + test('should update field value when field exists', () => { + const input = ` +Table users { + id int [pk] + name varchar + status varchar +} + +records users(id, name, status) { + 1, 'Alice', 'active' + 2, 'Bob', 'inactive' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.updateRecordField( + 'users', + 0, + 'status', + { value: 'pending', type: 'string' }, + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + status varchar + } + + records users(id, name, status) { + 1, 'Alice', 'pending' + 2, 'Bob', 'inactive' + } + " + `); + }); + + test('should update field in multiple Records blocks', () => { + const input = ` +Table users { + id int [pk] + name varchar +} + +records users(id, name) { + 1, 'Alice' +} + +records users(id, name) { + 2, 'Bob' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.updateRecordField( + 'users', + 1, + 'name', + { value: 'Updated', type: 'string' }, + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int [pk] + name varchar + } + + records users(id, name) { + 1, 'Alice' + } + + records users(id, name) { + 2, 'Updated' + } + " + `); + }); + + test('should handle different data types', () => { + const input = ` +Table products { + id int + price decimal +} + +records products(id, price) { + 1, 99.99 + 2, 149.50 +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.updateRecordField( + 'products', + 0, + 'price', + { value: 0, type: 'integer' }, + ); + + expect(result).toMatchInlineSnapshot(` + " + Table products { + id int + price decimal + } + + records products(id, price) { + 1, 0 + 2, 149.50 + } + " + `); + }); + }); + + describe('field not found', () => { + test('should return unchanged source when field does not exist', () => { + const input = ` +Table users { + id int [pk] + name varchar + status varchar +} + +records users(id, name) { + 1, 'Alice' + 2, 'Bob' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.updateRecordField( + 'users', + 0, + 'status', + { value: 'active', type: 'string' }, + ); + + expect(result).toBe(input); + }); + }); + + describe('edge cases', () => { + test('should return unchanged source when no Records exist', () => { + const input = ` +Table users { + id int [pk] + name varchar +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.updateRecordField( + 'users', + 0, + 'name', + { value: 'Test', type: 'string' }, + ); + + expect(result).toBe(input); + }); + + test('should handle schema-qualified table names', () => { + const input = ` +Table auth.users { + id int + name varchar +} + +records auth.users(id, name) { + 1, 'Alice' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.updateRecordField( + 'auth.users', + 0, + 'name', + { value: 'Updated', type: 'string' }, + ); + + expect(result).toMatchInlineSnapshot(` + " + Table auth.users { + id int + name varchar + } + + records auth.users(id, name) { + 1, 'Updated' + } + " + `); + }); + + test('should handle null values', () => { + const input = ` +Table users { + id int + email varchar +} + +records users(id, email) { + 1, 'alice@example.com' +} +`; + const compiler = new Compiler(); + compiler.setSource(input); + const result = compiler.updateRecordField( + 'users', + 0, + 'email', + { value: null, type: 'string' }, + ); + + expect(result).toMatchInlineSnapshot(` + " + Table users { + id int + email varchar + } + + records users(id, email) { + 1, null + } + " + `); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/interpreter.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/interpreter.test.ts index 1d2f2979f..d32c636c4 100644 --- a/packages/dbml-parse/__tests__/examples/interpreter/interpreter.test.ts +++ b/packages/dbml-parse/__tests__/examples/interpreter/interpreter.test.ts @@ -1061,4 +1061,336 @@ describe('[example] interpreter', () => { }); }); }); + + describe('records interpretation', () => { + test('should interpret basic records', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + `; + const db = interpret(source).getValue()!; + + expect(db.records).toHaveLength(1); + expect(db.records[0].tableName).toBe('users'); + expect(db.records[0].columns).toEqual(['id', 'name']); + expect(db.records[0].values).toHaveLength(2); + }); + + test('should interpret integer values correctly', () => { + const source = ` + Table data { id int } + records data(id) { + 1 + 42 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + expect(errors).toHaveLength(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0].type).toBe('integer'); + expect(db.records[0].values[0][0].value).toBe(1); + expect(db.records[0].values[1][0].value).toBe(42); + }); + + test('should interpret float values correctly', () => { + const source = ` + Table data { value decimal(10,2) } + records data(value) { + 3.14 + 0.01 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + expect(errors).toHaveLength(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0].type).toBe('real'); + expect(db.records[0].values[0][0].value).toBe(3.14); + expect(db.records[0].values[1][0].value).toBe(0.01); + }); + + test('should interpret scientific notation correctly', () => { + const source = ` + Table data { value decimal } + records data(value) { + 1e10 + 3.14e-5 + 2E+8 + } + `; + const db = interpret(source).getValue()!; + + expect(db.records[0].values[0][0].type).toBe('real'); + expect(db.records[0].values[0][0].value).toBe(1e10); + expect(db.records[0].values[1][0].value).toBe(3.14e-5); + expect(db.records[0].values[2][0].value).toBe(2e8); + }); + + test('should interpret boolean values correctly', () => { + const source = ` + Table data { flag boolean } + records data(flag) { + true + false + } + `; + const db = interpret(source).getValue()!; + + expect(db.records[0].values[0][0].type).toBe('bool'); + expect(db.records[0].values[0][0].value).toBe(true); + expect(db.records[0].values[1][0].value).toBe(false); + }); + + test('should interpret string values correctly', () => { + const source = ` + Table data { name varchar } + records data(name) { + "Alice" + 'Bob' + } + `; + const db = interpret(source).getValue()!; + + expect(db.records[0].values[0][0].type).toBe('string'); + expect(db.records[0].values[0][0].value).toBe('Alice'); + expect(db.records[0].values[1][0].value).toBe('Bob'); + }); + + test('should interpret null values correctly', () => { + const source = ` + Table data { name varchar } + records data(name) { + null + "" + } + `; + const db = interpret(source).getValue()!; + + expect(db.records[0].values[0][0].type).toBe('string'); + expect(db.records[0].values[0][0].value).toBe(null); + expect(db.records[0].values[1][0].type).toBe('string'); + }); + + test('should interpret function expressions correctly', () => { + const source = ` + Table data { created_at timestamp } + records data(created_at) { + \`now()\` + \`uuid_generate_v4()\` + } + `; + const db = interpret(source).getValue()!; + + expect(db.records[0].values[0][0].type).toBe('expression'); + expect(db.records[0].values[0][0].value).toBe('now()'); + expect(db.records[0].values[1][0].value).toBe('uuid_generate_v4()'); + }); + + test('should interpret enum values correctly', () => { + const source = ` + Enum status { active\n inactive } + Table users { + id int + status status + } + records users(id, status) { + 1, status.active + 2, status.inactive + } + `; + const db = interpret(source).getValue()!; + + expect(db.records[0].values[0][1].type).toBe('string'); + expect(db.records[0].values[0][1].value).toBe('active'); + expect(db.records[0].values[1][1].value).toBe('inactive'); + }); + + test('should group multiple records blocks for same table', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 2, "Bob" + } + `; + const db = interpret(source).getValue()!; + + // Should be grouped into one records entry + expect(db.records).toHaveLength(1); + expect(db.records[0].values).toHaveLength(2); + expect(db.records[0].values[0][0].value).toBe(1); + expect(db.records[0].values[1][0].value).toBe(2); + }); + + test('should interpret records with schema-qualified table', () => { + const source = ` + Table auth.users { + id int + email varchar + } + records auth.users(id, email) { + 1, "alice@example.com" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + expect(errors).toHaveLength(0); + + const db = result.getValue()!; + expect(db.records).toHaveLength(1); + // tableName extracted from table declaration + expect(db.records[0].values).toHaveLength(1); + }); + + test('should interpret mixed data types in same row', () => { + const source = ` + Table data { + id int + value decimal + active boolean + name varchar + } + records data(id, value, active, name) { + 1, 3.14, true, "test" + 2, -2.5, false, "hello" + } + `; + const db = interpret(source).getValue()!; + + const row1 = db.records[0].values[0]; + expect(row1[0]).toEqual({ type: 'integer', value: 1 }); + expect(row1[1]).toEqual({ type: 'real', value: 3.14 }); + expect(row1[2]).toEqual({ type: 'bool', value: true }); + expect(row1[3]).toEqual({ type: 'string', value: 'test' }); + }); + + test('should handle empty records block', () => { + const source = ` + Table users { id int } + records users(id) { + } + `; + const db = interpret(source).getValue()!; + + expect(db.records).toHaveLength(0); + }); + + test('should detect column count mismatch', () => { + const source = ` + Table users { + id int + name varchar + } + records users(id, name) { + 1 + } + `; + const result = interpret(source); + expect(result.getErrors().length).toBeGreaterThan(0); + }); + + test('should validate type compatibility', () => { + const source = ` + Table data { + value int + } + records data(value) { + "not a number" + } + `; + const result = interpret(source); + // Should have a type compatibility warning + expect(result.getWarnings().length).toBeGreaterThan(0); + }); + + test.skip('should validate precision and scale', () => { + const source = ` + Table data { + value decimal(5, 2) + } + records data(value) { + 12345.123 + } + `; + const result = interpret(source); + // Should have precision/scale warning + expect(result.getWarnings().length).toBeGreaterThan(0); + }); + + test('should validate not null constraint', () => { + const source = ` + Table users { + id int [pk] + name varchar [not null] + } + records users(id, name) { + 1, null + } + `; + const result = interpret(source); + expect(result.getWarnings().length).toBeGreaterThan(0); + }); + + test('should validate primary key uniqueness', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 1, "Bob" + } + `; + const result = interpret(source); + expect(result.getWarnings().length).toBeGreaterThan(0); + }); + + test('should validate unique constraint', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + } + records users(id, email) { + 1, "test@example.com" + 2, "test@example.com" + } + `; + const result = interpret(source); + expect(result.getWarnings().length).toBeGreaterThan(0); + }); + + test('should validate constraints across multiple records blocks', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 1, "Bob" + } + `; + const result = interpret(source); + // Should detect duplicate PK across blocks + expect(result.getWarnings().length).toBeGreaterThan(0); + }); + }); }); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/multi_records/fk_multi_blocks.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/fk_multi_blocks.test.ts new file mode 100644 index 000000000..f17ada717 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/fk_multi_blocks.test.ts @@ -0,0 +1,323 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] FK validation across multiple records blocks', () => { + test('should validate FK across records blocks with different columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + + Table orders { + id int [pk] + user_id int [ref: > users.id] + total decimal + } + + records users(id, name) { + 1, 'Alice' + } + + records users(id) { + 2 + } + + records orders(id, user_id) { + 100, 1 // Valid: user 1 exists + } + + records orders(id, user_id, total) { + 101, 2, 250.00 // Valid: user 2 exists + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect FK violation when referenced value not in any records block', () => { + const source = ` + Table users { + id int [pk] + name varchar + email varchar + } + + Table orders { + id int [pk] + user_id int [ref: > users.id] + } + + records users(id, name) { + 1, 'Alice' + } + + records users(id, email) { + 2, 'bob@example.com' + } + + records orders(id, user_id) { + 100, 3 // Invalid: user 3 doesn't exist in any block + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('FK violation'); + }); + + test('should validate composite FK across multiple records blocks', () => { + const source = ` + Table users { + tenant_id int + user_id int + name varchar + indexes { + (tenant_id, user_id) [pk] + } + } + + Table posts { + id int [pk] + tenant_id int + author_id int + } + + Ref: posts.(tenant_id, author_id) > users.(tenant_id, user_id) + + records users(tenant_id, user_id) { + 1, 100 + } + + records users(tenant_id, user_id, name) { + 1, 101, 'Bob' + 2, 200, 'Charlie' + } + + records posts(id, tenant_id, author_id) { + 1, 1, 100 // Valid: (1, 100) exists + 2, 1, 101 // Valid: (1, 101) exists + 3, 2, 200 // Valid: (2, 200) exists + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect composite FK violation across blocks', () => { + const source = ` + Table users { + tenant_id int + user_id int + email varchar + indexes { + (tenant_id, user_id) [pk] + } + } + + Table posts { + id int [pk] + tenant_id int + author_id int + } + + Ref: posts.(tenant_id, author_id) > users.(tenant_id, user_id) + + records users(tenant_id, user_id) { + 1, 100 + } + + records users(tenant_id, user_id, email) { + 2, 200, 'user@example.com' + } + + records posts(id, tenant_id, author_id) { + 1, 1, 101 // Invalid: (1, 101) doesn't exist + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('FK violation'); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toContain('FK violation'); + }); + + test('should handle FK when referenced column appears in some but not all blocks', () => { + const source = ` + Table categories { + id int [pk] + name varchar + description text + } + + Table products { + id int [pk] + category_id int [ref: > categories.id] + name varchar + } + + // Block 1: has id but not category_id + records categories(id, name) { + 1, 'Electronics' + } + + // Block 2: has different columns + records categories(id, description) { + 2, 'Category 2 description' + } + + // Block 3: has id again + records categories(id, name) { + 3, 'Home' + } + + records products(id, category_id, name) { + 100, 1, 'Laptop' + 101, 2, 'Mouse' + 102, 3, 'Chair' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should validate FK with NULL values across blocks', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + + Table orders { + id int [pk] + user_id int [ref: > users.id] + notes varchar + } + + records users(id, name) { + 1, 'Alice' + } + + records orders(id, user_id) { + 100, 1 // Valid + 101, null // Valid: NULL FK allowed + } + + records orders(id, notes) { + 102, 'No user' // Valid: user_id implicitly NULL + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should validate bidirectional FK (1-1) across multiple blocks', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + + Table profiles { + id int [pk] + user_id int [unique] + } + + Ref: users.id <> profiles.user_id + + records users(id) { + 1 + } + + records users(id, name) { + 2, 'Bob' + } + + records profiles(id, user_id) { + 10, 1 + 11, 2 + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect bidirectional FK violation', () => { + const source = ` + Table users { + id int [pk] + } + + Table profiles { + id int [pk] + user_id int [unique] + } + + Ref: users.id <> profiles.user_id + + records users(id) { + 1 + } + + records profiles(id, user_id) { + 10, 1 + 11, 3 // Invalid: user 3 doesn't exist + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBeGreaterThan(0); + expect(warnings.some((e) => e.diagnostic.includes('FK violation'))).toBe(true); + }); + + test('should validate FK across nested and top-level records', () => { + const source = ` + Table categories { + id int [pk] + name varchar + + records (id) { + 1 + } + } + + records categories(id, name) { + 2, 'Electronics' + } + + Table products { + id int [pk] + category_id int [ref: > categories.id] + + records (id, category_id) { + 100, 1 // References nested record + } + } + + records products(id, category_id) { + 101, 2 // References top-level record + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/multi_records/general.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/general.test.ts new file mode 100644 index 000000000..777f417d7 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/general.test.ts @@ -0,0 +1,144 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] multiple records blocks', () => { + test('should handle multiple records blocks for the same table with different columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + age int + email varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + + records users(id, age) { + 3, 25 + 4, 30 + } + `; + + const result = interpret(source); + const errors = result.getErrors(); + expect(errors.length).toBe(0); + + const db = result.getValue()!; + // Multiple records blocks for the same table are merged into one + expect(db.records.length).toBe(1); + expect(db.records[0].tableName).toBe('users'); + + // The merged records contain all unique columns that were actually used + expect(db.records[0].columns).toEqual(['id', 'name', 'age']); + + // Check the data rows (columns not included in a specific records block may be undefined or null) + expect(db.records[0].values.length).toBe(4); + + // First two rows from records users(id, name) + // columns = ['id', 'name', 'age'] + expect(db.records[0].values[0][0]).toMatchObject({ type: 'integer', value: 1 }); // id + expect(db.records[0].values[0][1]).toMatchObject({ type: 'string', value: 'Alice' }); // name + // age column may not exist on rows that only specified (id, name) + if (db.records[0].values[0].length > 2) { + expect(db.records[0].values[0][2]).toMatchObject({ type: 'unknown', value: null }); // age + } + + expect(db.records[0].values[1][0]).toMatchObject({ type: 'integer', value: 2 }); // id + expect(db.records[0].values[1][1]).toMatchObject({ type: 'string', value: 'Bob' }); // name + if (db.records[0].values[1].length > 2) { + expect(db.records[0].values[1][2]).toMatchObject({ type: 'unknown', value: null }); // age + } + + // Next two rows from records users(id, age) + expect(db.records[0].values[2][0]).toMatchObject({ type: 'integer', value: 3 }); // id + if (db.records[0].values[2].length > 1) { + expect(db.records[0].values[2][1]).toMatchObject({ type: 'unknown', value: null }); // name + } + expect(db.records[0].values[2][2]).toMatchObject({ type: 'integer', value: 25 }); // age + + expect(db.records[0].values[3][0]).toMatchObject({ type: 'integer', value: 4 }); // id + if (db.records[0].values[3].length > 1) { + expect(db.records[0].values[3][1]).toMatchObject({ type: 'unknown', value: null }); // name + } + expect(db.records[0].values[3][2]).toMatchObject({ type: 'integer', value: 30 }); // age + }); + + test('should handle multiple records blocks, one with explicit columns and one without', () => { + const source = ` + Table posts { + id int [pk] + title varchar + content text + } + + records posts(id, title) { + 1, 'First post' + } + + records posts(id, title, content) { + 2, 'Second post', 'Content of second post' + } + `; + + const result = interpret(source); + const errors = result.getErrors(); + expect(errors.length).toBe(0); + + const db = result.getValue()!; + // Multiple records blocks for the same table are merged into one + expect(db.records.length).toBe(1); + expect(db.records[0].tableName).toBe('posts'); + + // The merged records contain all unique columns + expect(db.records[0].columns).toEqual(['id', 'title', 'content']); + + // Check the data rows + expect(db.records[0].values.length).toBe(2); + + // First row from records posts(id, title) + // columns = ['id', 'title', 'content'] + expect(db.records[0].values[0][0]).toMatchObject({ type: 'integer', value: 1 }); // id + expect(db.records[0].values[0][1]).toMatchObject({ type: 'string', value: 'First post' }); // title + // content column may not exist on this row, or may be null + if (db.records[0].values[0].length > 2) { + expect(db.records[0].values[0][2]).toMatchObject({ type: 'unknown', value: null }); // content + } + + // Second row from records posts(id, title, content) + expect(db.records[0].values[1][0]).toMatchObject({ type: 'integer', value: 2 }); // id + expect(db.records[0].values[1][1]).toMatchObject({ type: 'string', value: 'Second post' }); // title + expect(db.records[0].values[1][2]).toMatchObject({ type: 'string', value: 'Content of second post' }); // content + }); + + test('should report error for inconsistent column count in implicit records', () => { + const source = ` + Table products { + id int [pk] + name varchar + price decimal + } + + records products(id, name) { + 1, 'Laptop' + } + + records products(id, name) { + 2, 'Mouse' // Has 2 values for 2 columns - this is valid + } + + records products(id, name, price) { + 3, 'Keyboard' // Missing price - only 2 values for 3 columns + } + `; + + const result = interpret(source); + const errors = result.getErrors(); + expect(errors.length).toBe(1); + expect(errors[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(errors[0].diagnostic).toBe('Expected 3 values but got 2'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/multi_records/nested_mixed.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/nested_mixed.test.ts new file mode 100644 index 000000000..1966d6ad7 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/nested_mixed.test.ts @@ -0,0 +1,254 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] nested and top-level records mixed', () => { + test('should handle records inside table with explicit columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + email varchar + + records (id, name) { + 1, 'Alice' + 2, 'Bob' + } + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].columns).toEqual(['id', 'name']); + expect(db.records[0].values).toHaveLength(2); + }); + + test('should handle records inside table without explicit columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + email varchar + + records { + 1, 'Alice', 'alice@example.com' + 2, 'Bob', 'bob@example.com' + } + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].columns).toEqual(['id', 'name', 'email']); + expect(db.records[0].values).toHaveLength(2); + }); + + test('should mix nested and top-level records for same table', () => { + const source = ` + Table users { + id int [pk] + name varchar + email varchar + + records (id, name) { + 1, 'Alice' + } + } + + records users(id, email) { + 2, 'bob@example.com' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + // All records for the same table should be merged into one TableRecord + expect(db.records.length).toBe(1); + + const record = db.records[0]; + // Columns should include all unique columns from all record blocks + expect(record.columns).toContain('id'); + expect(record.columns).toContain('name'); + expect(record.columns).toContain('email'); + + // Should have 2 data rows (array-based) + expect(record.values).toHaveLength(2); + + // First row has id and name + // columns order varies, but should contain id, name, email + const idIndex = record.columns.indexOf('id'); + const nameIndex = record.columns.indexOf('name'); + const emailIndex = record.columns.indexOf('email'); + + expect(record.values[0][idIndex]).toBeDefined(); + expect(record.values[0][nameIndex]).toBeDefined(); + + // Second row has id and email + expect(record.values[1][idIndex]).toBeDefined(); + expect(record.values[1][emailIndex]).toBeDefined(); + }); + + test('should merge multiple nested records blocks with same columns', () => { + const source = ` + Table products { + id int [pk] + name varchar + price decimal + + records (id, name) { + 1, 'Laptop' + } + + records (id, name) { + 2, 'Mouse' + } + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].values).toHaveLength(2); + }); + + test('should merge nested records blocks with different columns', () => { + const source = ` + Table products { + id int [pk] + name varchar + price decimal + + records (id, name) { + 1, 'Laptop' + } + + records (id, price) { + 2, 999.99 + } + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + // All records for the same table are merged into one + expect(db.records.length).toBe(1); + + const record = db.records[0]; + // All unique columns should be present + expect(record.columns).toContain('id'); + expect(record.columns).toContain('name'); + expect(record.columns).toContain('price'); + + // 2 rows, each with different columns populated + expect(record.values).toHaveLength(2); + }); + + test('should handle complex mix of nested, top-level, with and without columns', () => { + const source = ` + Table orders { + id int [pk] + user_id int + total decimal + status varchar + + records (id, user_id) { + 1, 100 + } + + records { + 2, 101, 250.50, 'pending' + } + } + + records orders(id, total) { + 3, 500.00 + } + + records orders(id, status) { + 4, 'completed' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + // All records for orders table merged into one + expect(db.records.length).toBe(1); + + const record = db.records[0]; + // All columns should be present + expect(record.columns).toContain('id'); + expect(record.columns).toContain('user_id'); + expect(record.columns).toContain('total'); + expect(record.columns).toContain('status'); + + // 4 data rows total + expect(record.values).toHaveLength(4); + }); + + test('should validate PK across nested and top-level records', () => { + const source = ` + Table users { + id int [pk] + name varchar + + records (id, name) { + 1, 'Alice' + } + } + + records users(id, name) { + 1, 'Bob' // Duplicate PK + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('Duplicate PK'); + }); + + test('should validate unique across nested and top-level records', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + name varchar + + records (id, email) { + 1, 'alice@example.com' + } + } + + records users(id, email, name) { + 2, 'alice@example.com', 'Alice2' // Duplicate email + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('Duplicate UNIQUE'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/multi_records/pk_multi_blocks.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/pk_multi_blocks.test.ts new file mode 100644 index 000000000..bfe05fd94 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/pk_multi_blocks.test.ts @@ -0,0 +1,313 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] PK validation across multiple records blocks', () => { + test('should validate PK uniqueness across blocks with different columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + email varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + + records users(id, email) { + 3, 'charlie@example.com' + 4, 'david@example.com' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect PK duplicate across blocks with different columns', () => { + const source = ` + Table users { + id int [pk] + name varchar + email varchar + } + + records users(id, name) { + 1, 'Alice' + 2, 'Bob' + } + + records users(id, email) { + 2, 'bob2@example.com' // Duplicate PK: 2 already exists + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('Duplicate PK'); + }); + + test('should validate composite PK across multiple blocks', () => { + const source = ` + Table order_items { + order_id int + product_id int + quantity int + price decimal + indexes { + (order_id, product_id) [pk] + } + } + + records order_items(order_id, product_id, quantity) { + 1, 100, 2 + 1, 101, 1 + } + + records order_items(order_id, product_id, price) { + 2, 100, 50.00 + 2, 101, 75.00 + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect composite PK duplicate across blocks', () => { + const source = ` + Table order_items { + order_id int + product_id int + quantity int + indexes { + (order_id, product_id) [pk] + } + } + + records order_items(order_id, product_id, quantity) { + 1, 100, 2 + } + + records order_items(order_id, product_id) { + 1, 100 // Duplicate: (1, 100) already exists + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('Duplicate Composite PK'); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toContain('Duplicate Composite PK'); + }); + + test('should handle PK validation when PK column missing from some blocks', () => { + const source = ` + Table users { + id int [pk] + name varchar + bio text + } + + records users(id, name) { + 1, 'Alice' + } + + records users(name, bio) { + 'Bob', 'Bio text' // Missing PK column + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + // With merged records, missing PK column results in undefined/NULL value + expect(warnings[0].diagnostic).toContain('NULL in PK'); + }); + + test('should validate PK with NULL across blocks', () => { + const source = ` + Table products { + id int [pk] + name varchar + sku varchar + } + + records products(id, name) { + null, 'Product A' // NULL PK not allowed + } + + records products(id, sku) { + 1, 'SKU-001' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toContain('NULL in PK'); + }); + + test('should allow NULL for auto-increment PK across blocks', () => { + const source = ` + Table users { + id int [pk, increment] + name varchar + email varchar + } + + records users(id, name) { + null, 'Alice' + null, 'Bob' + } + + records users(id, email) { + null, 'charlie@example.com' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect duplicate non-NULL PK with increment', () => { + const source = ` + Table users { + id int [pk, increment] + name varchar + email varchar + } + + records users(id, name) { + 1, 'Alice' + } + + records users(id, email) { + 1, 'alice@example.com' // Duplicate even with increment + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toContain('Duplicate PK'); + }); + + test('should validate PK across nested and top-level records', () => { + const source = ` + Table products { + id int [pk] + name varchar + price decimal + + records (id, name) { + 1, 'Laptop' + } + } + + records products(id, price) { + 2, 999.99 + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect PK duplicate between nested and top-level', () => { + const source = ` + Table products { + id int [pk] + name varchar + + records (id) { + 1 + } + } + + records products(id, name) { + 1, 'Laptop' // Duplicate + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toContain('Duplicate PK'); + }); + + test('should validate complex scenario with multiple blocks and mixed columns', () => { + const source = ` + Table users { + id int [pk] + username varchar + email varchar + created_at timestamp + } + + records users(id, username) { + 1, 'alice' + 2, 'bob' + } + + records users(id, email) { + 3, 'charlie@example.com' + 4, 'david@example.com' + } + + records users(id, created_at) { + 5, '2024-01-01' + } + + records users(id, username, email) { + 6, 'eve', 'eve@example.com' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect multiple PK violations across many blocks', () => { + const source = ` + Table events { + id int [pk] + name varchar + date varchar + location varchar + } + + records events(id, name) { + 1, 'Event A' + 2, 'Event B' + } + + records events(id, date) { + 2, '2024-01-01' // Duplicate 1 + 3, '2024-01-02' + } + + records events(id, location) { + 1, 'Location A' // Duplicate 2 + 4, 'Location B' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(2); + expect(warnings.every((e) => e.diagnostic.includes('Duplicate PK'))).toBe(true); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/multi_records/unique_multi_blocks.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/unique_multi_blocks.test.ts new file mode 100644 index 000000000..b1dee4786 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/multi_records/unique_multi_blocks.test.ts @@ -0,0 +1,350 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] Unique validation across multiple records blocks', () => { + test('should validate unique constraint across blocks with different columns', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + username varchar [unique] + } + + records users(id, email) { + 1, 'alice@example.com' + 2, 'bob@example.com' + } + + records users(id, username) { + 3, 'charlie' + 4, 'david' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect unique violation across blocks', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + name varchar + } + + records users(id, email) { + 1, 'alice@example.com' + } + + records users(id, email, name) { + 2, 'alice@example.com', 'Alice2' // Duplicate email + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('Duplicate UNIQUE'); + }); + + test('should validate composite unique across multiple blocks', () => { + const source = ` + Table user_roles { + id int [pk] + user_id int + role_id int + granted_by int + indexes { + (user_id, role_id) [unique] + } + } + + records user_roles(id, user_id, role_id) { + 1, 100, 1 + 2, 100, 2 + } + + records user_roles(id, user_id, role_id, granted_by) { + 3, 101, 1, 999 + 4, 102, 1, 999 + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect composite unique violation across blocks', () => { + const source = ` + Table user_roles { + id int [pk] + user_id int + role_id int + indexes { + (user_id, role_id) [unique] + } + } + + records user_roles(id, user_id, role_id) { + 1, 100, 1 + } + + records user_roles(id, user_id, role_id) { + 2, 100, 1 // Duplicate (100, 1) + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toContain('Duplicate Composite UNIQUE'); + expect(warnings[1].diagnostic).toContain('Duplicate Composite UNIQUE'); + }); + + test('should allow NULL for unique constraint across blocks', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + phone varchar [unique] + } + + records users(id, email) { + 1, null + 2, null // Multiple NULLs allowed + } + + records users(id, phone) { + 3, null + 4, null // Multiple NULLs allowed + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should handle unique when column missing from some blocks', () => { + const source = ` + Table products { + id int [pk] + sku varchar [unique] + name varchar + description text + } + + records products(id, name) { + 1, 'Product A' // sku missing, implicitly NULL + } + + records products(id, sku) { + 2, 'SKU-001' + 3, 'SKU-002' + } + + records products(id, description) { + 4, 'Description text' // sku missing, implicitly NULL + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should validate multiple unique constraints on same table across blocks', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + username varchar [unique] + phone varchar [unique] + } + + records users(id, email, username) { + 1, 'alice@example.com', 'alice' + } + + records users(id, phone) { + 2, '555-0001' + } + + records users(id, email) { + 3, 'bob@example.com' + } + + records users(id, username, phone) { + 4, 'charlie', '555-0002' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect violations of different unique constraints', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + username varchar [unique] + } + + records users(id, email) { + 1, 'alice@example.com' + } + + records users(id, username) { + 2, 'bob' + } + + records users(id, email, username) { + 3, 'alice@example.com', 'charlie' // Duplicate email + 4, 'david@example.com', 'bob' // Duplicate username + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(2); + expect(warnings.some((e) => e.diagnostic.includes('email'))).toBe(true); + expect(warnings.some((e) => e.diagnostic.includes('username'))).toBe(true); + }); + + test('should validate unique across nested and top-level records', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + username varchar + + records (id, email) { + 1, 'alice@example.com' + } + } + + records users(id, username) { + 2, 'bob' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect unique violation between nested and top-level', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + + records (id, email) { + 1, 'alice@example.com' + } + } + + records users(id, email) { + 2, 'alice@example.com' // Duplicate + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toContain('Duplicate UNIQUE'); + }); + + test('should handle complex scenario with multiple unique constraints', () => { + const source = ` + Table employees { + id int [pk] + email varchar [unique] + employee_code varchar [unique] + ssn varchar [unique] + name varchar + } + + records employees(id, email, employee_code) { + 1, 'emp1@company.com', 'EMP001' + } + + records employees(id, ssn) { + 2, '123-45-6789' + } + + records employees(id, email, ssn) { + 3, 'emp3@company.com', '987-65-4321' + } + + records employees(id, employee_code, name) { + 4, 'EMP004', 'John Doe' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); + + test('should detect multiple unique violations in complex scenario', () => { + const source = ` + Table products { + id int [pk] + sku varchar [unique] + barcode varchar [unique] + name varchar + } + + records products(id, sku, barcode) { + 1, 'SKU-001', 'BAR-001' + } + + records products(id, sku) { + 2, 'SKU-002' + } + + records products(id, sku, name) { + 3, 'SKU-001', 'Product 3' // Duplicate SKU + } + + records products(id, barcode) { + 4, 'BAR-001' // Duplicate barcode + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toContain('Duplicate UNIQUE'); + expect(warnings[1].diagnostic).toContain('Duplicate UNIQUE'); + }); + + test('should validate unique with both PK and unique constraints', () => { + const source = ` + Table users { + id int [pk, unique] // Both PK and unique + email varchar [unique] + } + + records users(id) { + 1 + } + + records users(id, email) { + 2, 'alice@example.com' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + expect(warnings.length).toBe(0); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/composite_fk.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/composite_fk.test.ts new file mode 100644 index 000000000..e7e412beb --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/composite_fk.test.ts @@ -0,0 +1,213 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] composite foreign key constraints', () => { + test('should accept valid composite FK references', () => { + const source = ` + Table merchants { + id int + country_code varchar + + indexes { + (id, country_code) [pk] + } + } + Table orders { + id int [pk] + merchant_id int + country varchar + amount decimal + } + Ref: orders.(merchant_id, country) > merchants.(id, country_code) + + records merchants(id, country_code) { + 1, "US" + 1, "UK" + 2, "US" + } + records orders(id, merchant_id, country, amount) { + 1, 1, "US", 100.00 + 2, 1, "UK", 200.50 + 3, 2, "US", 50.00 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(2); + + // Merchants table + // columns = ['id', 'country_code'] + expect(db.records[0].tableName).toBe('merchants'); + expect(db.records[0].values.length).toBe(3); + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'US' }); + + // Orders table + // columns = ['id', 'merchant_id', 'country', 'amount'] + expect(db.records[1].tableName).toBe('orders'); + expect(db.records[1].values.length).toBe(3); + expect(db.records[1].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[1].values[0][1]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[1].values[0][2]).toEqual({ type: 'string', value: 'US' }); + expect(db.records[1].values[0][3]).toEqual({ type: 'real', value: 100.00 }); + }); + + test('should reject composite FK when partial key match fails', () => { + const source = ` + Table merchants { + id int + country_code varchar + + indexes { + (id, country_code) [pk] + } + } + Table orders { + id int [pk] + merchant_id int + country varchar + } + Ref: orders.(merchant_id, country) > merchants.(id, country_code) + + records merchants(id, country_code) { + 1, "US" + 2, "UK" + } + records orders(id, merchant_id, country) { + 1, 1, "US" + 2, 1, "UK" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('FK violation: (orders.merchant_id, orders.country) = (1, "UK") does not exist in (merchants.id, merchants.country_code)'); + expect(warnings[1].diagnostic).toBe('FK violation: (orders.merchant_id, orders.country) = (1, "UK") does not exist in (merchants.id, merchants.country_code)'); + }); + + test('should allow NULL in composite FK columns', () => { + const source = ` + Table merchants { + id int + country_code varchar + + indexes { + (id, country_code) [pk] + } + } + Table orders { + id int [pk] + merchant_id int + country varchar + status varchar + } + Ref: orders.(merchant_id, country) > merchants.(id, country_code) + + records merchants(id, country_code) { + 1, "US" + } + records orders(id, merchant_id, country, status) { + 1, 1, "US", "confirmed" + 2, null, "UK", "pending" + 3, 1, null, "processing" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[1].values.length).toBe(3); + + // Row 2: null FK column + // columns = ['id', 'merchant_id', 'country', 'status'] + expect(db.records[1].values[1][1].value).toBe(null); // merchant_id + expect(db.records[1].values[1][2]).toEqual({ type: 'string', value: 'UK' }); // country + expect(db.records[1].values[1][3]).toEqual({ type: 'string', value: 'pending' }); // status + + // Row 3: null FK column + expect(db.records[1].values[2][0]).toEqual({ type: 'integer', value: 3 }); // id + expect(db.records[1].values[2][2].value).toBe(null); // country + expect(db.records[1].values[2][3]).toEqual({ type: 'string', value: 'processing' }); // status + }); + + test('should validate many-to-many composite FK both directions', () => { + const source = ` + Table products { + id int + region varchar + + indexes { + (id, region) [pk] + } + } + Table categories { + id int + region varchar + + indexes { + (id, region) [pk] + } + } + Ref: products.(id, region) <> categories.(id, region) + + records products(id, region) { + 1, "US" + 2, "US" + } + records categories(id, region) { + 1, "US" + 3, "EU" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(4); + expect(warnings[0].diagnostic).toBe('FK violation: (products.id, products.region) = (2, "US") does not exist in (categories.id, categories.region)'); + expect(warnings[1].diagnostic).toBe('FK violation: (products.id, products.region) = (2, "US") does not exist in (categories.id, categories.region)'); + expect(warnings[2].diagnostic).toBe('FK violation: (categories.id, categories.region) = (3, "EU") does not exist in (products.id, products.region)'); + expect(warnings[3].diagnostic).toBe('FK violation: (categories.id, categories.region) = (3, "EU") does not exist in (products.id, products.region)'); + }); + + test('should validate composite FK with schema-qualified tables', () => { + const source = ` + Table auth.users { + id int + tenant_id int + + indexes { + (id, tenant_id) [pk] + } + } + Table public.posts { + id int [pk] + user_id int + tenant_id int + content text + } + Ref: public.posts.(user_id, tenant_id) > auth.users.(id, tenant_id) + + records auth.users(id, tenant_id) { + 1, 100 + 2, 100 + } + records public.posts(id, user_id, tenant_id, content) { + 1, 1, 100, "Hello" + 2, 999, 100, "Invalid user" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('FK violation: (public.posts.user_id, public.posts.tenant_id) = (999, 100) does not exist in (auth.users.id, auth.users.tenant_id)'); + expect(warnings[1].diagnostic).toBe('FK violation: (public.posts.user_id, public.posts.tenant_id) = (999, 100) does not exist in (auth.users.id, auth.users.tenant_id)'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/composite_pk.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/composite_pk.test.ts new file mode 100644 index 000000000..7e2931097 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/composite_pk.test.ts @@ -0,0 +1,166 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] composite primary key constraints', () => { + test('should accept valid unique composite primary key values', () => { + const source = ` + Table order_items { + order_id int + product_id int + quantity int + + indexes { + (order_id, product_id) [pk] + } + } + records order_items(order_id, product_id, quantity) { + 1, 100, 2 + 1, 101, 1 + 2, 100, 3 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].tableName).toBe('order_items'); + expect(db.records[0].columns).toEqual(['order_id', 'product_id', 'quantity']); + expect(db.records[0].values.length).toBe(3); + + // Row 1: order_id=1, product_id=100, quantity=2 + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: 100 }); + expect(db.records[0].values[0][2]).toEqual({ type: 'integer', value: 2 }); + + // Row 2: order_id=1, product_id=101, quantity=1 + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'integer', value: 101 }); + expect(db.records[0].values[1][2]).toEqual({ type: 'integer', value: 1 }); + + // Row 3: order_id=2, product_id=100, quantity=3 + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'integer', value: 100 }); + expect(db.records[0].values[2][2]).toEqual({ type: 'integer', value: 3 }); + }); + + test('should reject duplicate composite primary key values', () => { + const source = ` + Table order_items { + order_id int + product_id int + quantity int + + indexes { + (order_id, product_id) [pk] + } + } + records order_items(order_id, product_id, quantity) { + 1, 100, 2 + 1, 100, 5 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('Duplicate Composite PK: (order_items.order_id, order_items.product_id) = (1, 100)'); + expect(warnings[1].diagnostic).toBe('Duplicate Composite PK: (order_items.order_id, order_items.product_id) = (1, 100)'); + }); + + test('should reject NULL in any column of composite primary key', () => { + const source = ` + Table order_items { + order_id int + product_id int + quantity int + + indexes { + (order_id, product_id) [pk] + } + } + records order_items(order_id, product_id, quantity) { + 1, null, 2 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('NULL in Composite PK: (order_items.order_id, order_items.product_id) cannot be NULL'); + expect(warnings[1].diagnostic).toBe('NULL in Composite PK: (order_items.order_id, order_items.product_id) cannot be NULL'); + }); + + test('should detect duplicate composite pk across multiple records blocks', () => { + const source = ` + Table order_items { + order_id int + product_id int + quantity int + + indexes { + (order_id, product_id) [pk] + } + } + records order_items(order_id, product_id, quantity) { + 1, 100, 2 + } + records order_items(order_id, product_id, quantity) { + 1, 100, 5 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('Duplicate Composite PK: (order_items.order_id, order_items.product_id) = (1, 100)'); + expect(warnings[1].diagnostic).toBe('Duplicate Composite PK: (order_items.order_id, order_items.product_id) = (1, 100)'); + }); + + test('should allow same value in one pk column when other differs', () => { + const source = ` + Table user_roles { + user_id int + role_id int + assigned_at timestamp + + indexes { + (user_id, role_id) [pk] + } + } + records user_roles(user_id, role_id, assigned_at) { + 1, 1, "2024-01-01" + 1, 2, "2024-01-02" + 2, 1, "2024-01-03" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].values.length).toBe(3); + + // Row 1: user_id=1, role_id=1, assigned_at="2024-01-01" + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][2].type).toBe('datetime'); + expect(db.records[0].values[0][2].value).toBe('2024-01-01'); + + // Row 2: user_id=1, role_id=2, assigned_at="2024-01-02" + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][2].type).toBe('datetime'); + expect(db.records[0].values[1][2].value).toBe('2024-01-02'); + + // Row 3: user_id=2, role_id=1, assigned_at="2024-01-03" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[2][2].type).toBe('datetime'); + expect(db.records[0].values[2][2].value).toBe('2024-01-03'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/composite_unique.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/composite_unique.test.ts new file mode 100644 index 000000000..aba7663eb --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/composite_unique.test.ts @@ -0,0 +1,183 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] composite unique constraints', () => { + test('should accept valid unique composite values', () => { + const source = ` + Table user_profiles { + user_id int + profile_type varchar + data text + + indexes { + (user_id, profile_type) [unique] + } + } + records user_profiles(user_id, profile_type, data) { + 1, "work", "Software Engineer" + 1, "personal", "Loves hiking" + 2, "work", "Designer" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].tableName).toBe('user_profiles'); + expect(db.records[0].columns).toEqual(['user_id', 'profile_type', 'data']); + expect(db.records[0].values.length).toBe(3); + + // Row 1: user_id=1, profile_type="work", data="Software Engineer" + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'work' }); + expect(db.records[0].values[0][2]).toEqual({ type: 'string', value: 'Software Engineer' }); + + // Row 2: user_id=1, profile_type="personal", data="Loves hiking" + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: 'personal' }); + expect(db.records[0].values[1][2]).toEqual({ type: 'string', value: 'Loves hiking' }); + + // Row 3: user_id=2, profile_type="work", data="Designer" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'string', value: 'work' }); + expect(db.records[0].values[2][2]).toEqual({ type: 'string', value: 'Designer' }); + }); + + test('should reject duplicate composite unique values', () => { + const source = ` + Table user_profiles { + user_id int + profile_type varchar + data text + + indexes { + (user_id, profile_type) [unique] + } + } + records user_profiles(user_id, profile_type, data) { + 1, "work", "Software Engineer" + 1, "work", "Updated job title" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('Duplicate Composite UNIQUE: (user_profiles.user_id, user_profiles.profile_type) = (1, "work")'); + expect(warnings[1].diagnostic).toBe('Duplicate Composite UNIQUE: (user_profiles.user_id, user_profiles.profile_type) = (1, "work")'); + }); + + test('should allow NULL values in composite unique (NULLs dont conflict)', () => { + const source = ` + Table user_settings { + user_id int + category varchar + value varchar + + indexes { + (user_id, category) [unique] + } + } + records user_settings(user_id, category, value) { + 1, null, "default" + 1, null, "another default" + 1, "theme", "dark" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values.length).toBe(3); + + // Row 1: user_id=1, category=null, value="default" + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1].value).toBe(null); + expect(db.records[0].values[0][2]).toEqual({ type: 'string', value: 'default' }); + + // Row 2: user_id=1, category=null, value="another default" + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[1][1].value).toBe(null); + expect(db.records[0].values[1][2]).toEqual({ type: 'string', value: 'another default' }); + + // Row 3: user_id=1, category="theme", value="dark" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'string', value: 'theme' }); + expect(db.records[0].values[2][2]).toEqual({ type: 'string', value: 'dark' }); + }); + + test('should detect duplicate composite unique across multiple records blocks', () => { + const source = ` + Table user_profiles { + user_id int + profile_type varchar + data text + + indexes { + (user_id, profile_type) [unique] + } + } + records user_profiles(user_id, profile_type, data) { + 1, "work", "Engineer" + } + records user_profiles(user_id, profile_type, data) { + 1, "work", "Developer" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('Duplicate Composite UNIQUE: (user_profiles.user_id, user_profiles.profile_type) = (1, "work")'); + expect(warnings[1].diagnostic).toBe('Duplicate Composite UNIQUE: (user_profiles.user_id, user_profiles.profile_type) = (1, "work")'); + }); + + test('should allow same value in one unique column when other differs', () => { + const source = ` + Table event_registrations { + event_id int + attendee_id int + registration_date timestamp + + indexes { + (event_id, attendee_id) [unique] + } + } + records event_registrations(event_id, attendee_id, registration_date) { + 1, 100, "2024-01-01" + 1, 101, "2024-01-02" + 2, 100, "2024-01-03" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values.length).toBe(3); + + // Row 1: event_id=1, attendee_id=100, registration_date="2024-01-01" + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: 100 }); + expect(db.records[0].values[0][2].type).toBe('datetime'); + expect(db.records[0].values[0][2].value).toBe('2024-01-01'); + + // Row 2: event_id=1, attendee_id=101, registration_date="2024-01-02" + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'integer', value: 101 }); + expect(db.records[0].values[1][2].type).toBe('datetime'); + expect(db.records[0].values[1][2].value).toBe('2024-01-02'); + + // Row 3: event_id=2, attendee_id=100, registration_date="2024-01-03" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'integer', value: 100 }); + expect(db.records[0].values[2][2].type).toBe('datetime'); + expect(db.records[0].values[2][2].value).toBe('2024-01-03'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/constraints_table_partial.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/constraints_table_partial.test.ts new file mode 100644 index 000000000..d1d952ba3 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/constraints_table_partial.test.ts @@ -0,0 +1,599 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] Constraints in table partials', () => { + describe('Primary Key', () => { + test('should validate PK from injected table partial', () => { + const source = ` + TablePartial id_partial { + id int [pk] + } + + Table users { + name varchar + ~id_partial + } + + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect duplicate PK from injected table partial', () => { + const source = ` + TablePartial id_partial { + id int [pk] + } + + Table users { + name varchar + ~id_partial + } + + records users(id, name) { + 1, "Alice" + 1, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe('Duplicate PK: users.id = 1'); + }); + + test('should validate composite PK from injected table partial', () => { + const source = ` + TablePartial region_id { + country_code varchar [pk] + region_code varchar [pk] + } + + Table regions { + name varchar + ~region_id + } + + records regions(country_code, region_code, name) { + "US", "CA", "California" + "US", "NY", "New York" + "CA", "BC", "British Columbia" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect duplicate composite PK from injected table partial', () => { + const source = ` + TablePartial region_id { + country_code varchar [pk] + region_code varchar [pk] + } + + Table regions { + name varchar + ~region_id + } + + records regions(country_code, region_code, name) { + "US", "CA", "California" + "US", "CA", "California Duplicate" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe('Duplicate Composite PK: (regions.country_code, regions.region_code) = ("US", "CA")'); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toBe('Duplicate Composite PK: (regions.country_code, regions.region_code) = ("US", "CA")'); + }); + + test('should detect NULL in PK from injected table partial', () => { + const source = ` + TablePartial id_partial { + id int [pk] + } + + Table users { + name varchar + ~id_partial + } + + records users(id, name) { + 1, "Alice" + null, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe('NULL in PK: users.id cannot be NULL'); + }); + }); + + describe('UNIQUE constraint', () => { + test('should validate UNIQUE constraint from injected table partial', () => { + const source = ` + TablePartial unique_email { + email varchar [unique] + } + + Table users { + id int [pk] + name varchar + ~unique_email + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + 2, "Bob", "bob@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect UNIQUE violation from injected table partial', () => { + const source = ` + TablePartial unique_email { + email varchar [unique] + } + + Table users { + id int [pk] + name varchar + ~unique_email + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + 2, "Bob", "alice@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate UNIQUE: users.email = "alice@example.com"'); + }); + + test('should allow NULL in UNIQUE columns from partial', () => { + const source = ` + TablePartial unique_email { + email varchar [unique] + } + + Table users { + id int [pk] + name varchar + ~unique_email + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + 2, "Bob", null + 3, "Charlie", null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should validate multiple UNIQUE constraints from different partials', () => { + const source = ` + TablePartial unique_email { + email varchar [unique] + } + + TablePartial unique_username { + username varchar [unique] + } + + Table users { + id int [pk] + name varchar + ~unique_email + ~unique_username + } + + records users(id, name, email, username) { + 1, "Alice", "alice@example.com", "alice123" + 2, "Bob", "bob@example.com", "bob456" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect UNIQUE violations from multiple partials', () => { + const source = ` + TablePartial unique_email { + email varchar [unique] + } + + TablePartial unique_username { + username varchar [unique] + } + + Table users { + id int [pk] + name varchar + ~unique_email + ~unique_username + } + + records users(id, name, email, username) { + 1, "Alice", "alice@example.com", "alice123" + 2, "Bob", "alice@example.com", "bob456" + 3, "Charlie", "charlie@example.com", "alice123" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + // One error for email, one for username + const errorMessages = warnings.map((e) => e.diagnostic); + expect(errorMessages.some((msg) => msg.includes('email'))).toBe(true); + expect(errorMessages.some((msg) => msg.includes('username'))).toBe(true); + }); + + test('should validate UNIQUE with table indexes from partial', () => { + const source = ` + TablePartial indexed_fields { + field1 varchar + field2 varchar + indexes { + (field1, field2) [unique] + } + } + + Table data { + id int [pk] + ~indexed_fields + } + + records data(id, field1, field2) { + 1, "a", "x" + 2, "a", "y" + 3, "b", "x" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect UNIQUE index violation from partial', () => { + const source = ` + TablePartial indexed_fields { + field1 varchar + field2 varchar + indexes { + (field1, field2) [unique] + } + } + + Table data { + id int [pk] + ~indexed_fields + } + + records data(id, field1, field2) { + 1, "a", "x" + 2, "a", "x" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe('Duplicate Composite UNIQUE: (data.field1, data.field2) = ("a", "x")'); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toBe('Duplicate Composite UNIQUE: (data.field1, data.field2) = ("a", "x")'); + }); + }); + + describe('NOT NULL constraint', () => { + test('should validate NOT NULL constraint from injected table partial', () => { + const source = ` + TablePartial required_fields { + email varchar [not null] + } + + Table users { + id int [pk] + name varchar + ~required_fields + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + 2, "Bob", "bob@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect NOT NULL violation from injected table partial', () => { + const source = ` + TablePartial required_fields { + email varchar [not null] + } + + Table users { + id int [pk] + name varchar + ~required_fields + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + 2, "Bob", null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("NULL not allowed for non-nullable column 'email' without default and increment"); + }); + + test('should validate multiple NOT NULL constraints from partial', () => { + const source = ` + TablePartial required_fields { + email varchar [not null] + phone varchar [not null] + } + + Table users { + id int [pk] + name varchar + ~required_fields + } + + records users(id, name, email, phone) { + 1, "Alice", "alice@example.com", "555-1234" + 2, "Bob", "bob@example.com", "555-5678" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect multiple NOT NULL violations from partial', () => { + const source = ` + TablePartial required_fields { + email varchar [not null] + phone varchar [not null] + } + + Table users { + id int [pk] + name varchar + ~required_fields + } + + records users(id, name, email, phone) { + 1, "Alice", "alice@example.com", "555-1234" + 2, "Bob", null, "555-5678" + 3, "Charlie", "charlie@example.com", null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + // Both warnings should be about NULL not allowed + const warningMessages = warnings.map((e) => e.diagnostic); + expect(warningMessages.every((msg) => msg.includes('NULL not allowed'))).toBe(true); + }); + + test('should allow nullable columns from partial when not marked as NOT NULL', () => { + const source = ` + TablePartial optional_fields { + middle_name varchar + nickname varchar + } + + Table users { + id int [pk] + first_name varchar [not null] + last_name varchar [not null] + ~optional_fields + } + + records users(id, first_name, last_name, middle_name, nickname) { + 1, "Alice", "Smith", "Jane", "Ali" + 2, "Bob", "Jones", null, null + 3, "Charlie", "Brown", "Robert", null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + }); + + describe('Mixed constraints from table and partials', () => { + test('should validate mixed constraints from table and multiple partials', () => { + const source = ` + TablePartial id_partial { + id int [pk] + } + + TablePartial unique_email { + email varchar [unique] + } + + TablePartial required_phone { + phone varchar [not null] + } + + Table users { + name varchar [not null] + ~id_partial + ~unique_email + ~required_phone + } + + records users(id, name, email, phone) { + 1, "Alice", "alice@example.com", "555-1234" + 2, "Bob", "bob@example.com", "555-5678" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect mixed constraint violations from table and partials', () => { + const source = ` + TablePartial id_partial { + id int [pk] + } + + TablePartial unique_email { + email varchar [unique] + } + + TablePartial required_phone { + phone varchar [not null] + } + + Table users { + name varchar [not null] + ~id_partial + ~unique_email + ~required_phone + } + + records users(id, name, email, phone) { + 1, "Alice", "alice@example.com", "555-1234" + 1, "Bob", "alice@example.com", null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + // Should detect: duplicate PK (id - warning), duplicate UNIQUE (email - warning), NOT NULL (phone - warning) + expect(warnings.length).toBe(3); + expect(warnings.every((e) => e.code === CompileErrorCode.INVALID_RECORDS_FIELD)).toBe(true); + const errorMessages = warnings.map((e) => e.diagnostic); + expect(errorMessages.some((msg) => msg.includes('Duplicate PK'))).toBe(true); + expect(errorMessages.some((msg) => msg.includes('Duplicate UNIQUE'))).toBe(true); + expect(errorMessages.some((msg) => msg.includes('NULL not allowed'))).toBe(true); + }); + }); + + describe('Constraints when partial injected into multiple tables', () => { + test('should validate constraints independently for each table', () => { + const source = ` + TablePartial id_and_email { + id int [pk] + email varchar [unique, not null] + } + + Table users { + name varchar + ~id_and_email + } + + Table admins { + role varchar + ~id_and_email + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + 2, "Bob", "bob@example.com" + } + + records admins(id, role, email) { + 1, "Admin", "admin@example.com" + 2, "Super", "super@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + // Same IDs and emails across different tables are allowed + expect(warnings.length).toBe(0); + }); + + test('should detect constraint violations independently in each table', () => { + const source = ` + TablePartial id_and_email { + id int [pk] + email varchar [unique, not null] + } + + Table users { + name varchar + ~id_and_email + } + + Table admins { + role varchar + ~id_and_email + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + } + + records admins(id, role, email) { + 1, "Admin", "admin@example.com" + 1, "Duplicate ID", "duplicate@example.com" + 2, "Super", "admin@example.com" + 3, "Invalid", null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + // Should have warnings in admins table: duplicate PK, duplicate UNIQUE, NOT NULL + expect(warnings.length).toBe(3); + expect(warnings.every((e) => e.code === CompileErrorCode.INVALID_RECORDS_FIELD)).toBe(true); + const errorMessages = warnings.map((e) => e.diagnostic); + expect(errorMessages.some((msg) => msg.includes('Duplicate PK'))).toBe(true); + expect(errorMessages.some((msg) => msg.includes('Duplicate UNIQUE'))).toBe(true); + expect(errorMessages.some((msg) => msg.includes('NULL not allowed'))).toBe(true); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/data.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/data.test.ts new file mode 100644 index 000000000..14d2e05c1 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/data.test.ts @@ -0,0 +1,297 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] data type interpretation', () => { + test('should interpret integer values correctly', () => { + const source = ` + Table data { + id int + count integer + small smallint + big bigint + } + records data(id, count, small, big) { + 1, 42, -100, 9999999999 + 0, 0, 0, 0 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: 42 }); + expect(db.records[0].values[0][2]).toEqual({ type: 'integer', value: -100 }); + expect(db.records[0].values[0][3]).toEqual({ type: 'integer', value: 9999999999 }); + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 0 }); + }); + + test('should interpret float and decimal values correctly', () => { + const source = ` + Table data { + price decimal(10,2) + rate float + amount numeric + } + records data(price, rate, amount) { + 99.99, 3.14159, 0.001 + 50.5, 0.5, 100 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + // Note: float/numeric/decimal types are normalized to 'real' + expect(db.records[0].values[0][0]).toEqual({ type: 'real', value: 99.99 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'real', value: 3.14159 }); + expect(db.records[0].values[0][2]).toEqual({ type: 'real', value: 0.001 }); + expect(db.records[0].values[1][0]).toEqual({ type: 'real', value: 50.5 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'real', value: 0.5 }); + expect(db.records[0].values[1][2]).toEqual({ type: 'real', value: 100 }); + }); + + test('should interpret boolean values correctly', () => { + const source = ` + Table data { + active boolean + verified bool + } + records data(active, verified) { + true, false + false, true + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + // Note: boolean types are normalized to 'bool' + expect(db.records[0].values[0][0]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: false }); + expect(db.records[0].values[1][0]).toEqual({ type: 'bool', value: false }); + expect(db.records[0].values[1][1]).toEqual({ type: 'bool', value: true }); + }); + + test('should interpret string values correctly', () => { + const source = ` + Table data { + name varchar(255) + description text + code char(10) + } + records data(name, description, code) { + "Alice", 'A short description', "ABC123" + "Bob", "Another description", "" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'string', value: 'Alice' }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'A short description' }); + expect(db.records[0].values[0][2]).toEqual({ type: 'string', value: 'ABC123' }); + expect(db.records[0].values[1][0]).toEqual({ type: 'string', value: 'Bob' }); + }); + + test('should interpret datetime values correctly', () => { + const source = ` + Table events { + created_at timestamp + event_date date + event_time time + } + records events(created_at, event_date, event_time) { + "2024-01-15T10:30:00Z", "2024-01-15", "10:30:00" + "2024-12-31T23:59:59", "2024-12-31", "23:59:59" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + // Note: timestamp->datetime, date->date, time->time + expect(db.records[0].values[0][0].type).toBe('datetime'); + expect(db.records[0].values[0][0].value).toBe('2024-01-15T10:30:00Z'); + expect(db.records[0].values[0][1].type).toBe('date'); + expect(db.records[0].values[0][1].value).toBe('2024-01-15'); + expect(db.records[0].values[0][2].type).toBe('time'); + expect(db.records[0].values[0][2].value).toBe('10:30:00'); + }); + + test('should handle nested records with partial columns', () => { + const source = ` + Table products { + id int [pk] + name varchar + price decimal + description text + + records (id, name) { + 1, 'Laptop' + } + + records (id, price, description) { + 2, 999.99, 'High-end gaming laptop' + } + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].tableName).toBe('products'); + expect(db.records[0].values).toHaveLength(2); + + // Columns should be merged from both records blocks + // First block: (id, name), Second block: (id, price, description) + // Merged columns: ['id', 'name', 'price', 'description'] + expect(db.records[0].columns).toEqual(['id', 'name', 'price', 'description']); + + // First row has id and name, but no price or description + const idIdx = db.records[0].columns.indexOf('id'); + const nameIdx = db.records[0].columns.indexOf('name'); + const priceIdx = db.records[0].columns.indexOf('price'); + const descIdx = db.records[0].columns.indexOf('description'); + + expect(db.records[0].values[0][idIdx]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][nameIdx]).toEqual({ type: 'string', value: 'Laptop' }); + expect(db.records[0].values[0][priceIdx]).toEqual({ type: 'unknown', value: null }); + expect(db.records[0].values[0][descIdx]).toEqual({ type: 'unknown', value: null }); + + // Second row has id, price, and description, but no name + expect(db.records[0].values[1][idIdx]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][nameIdx]).toEqual({ type: 'unknown', value: null }); + expect(db.records[0].values[1][priceIdx]).toEqual({ type: 'real', value: 999.99 }); + expect(db.records[0].values[1][descIdx]).toEqual({ type: 'string', value: 'High-end gaming laptop' }); + }); + + test('should handle nested and top-level records with different data types', () => { + const source = ` + Table metrics { + id int [pk] + name varchar + metric_value decimal + timestamp timestamp + active boolean + + records (id, name, metric_value) { + 1, 'CPU Usage', 85.5 + } + } + + records metrics(id, timestamp, active) { + 2, '2024-01-15T10:00:00Z', true + } + + records metrics(id, name, metric_value, timestamp, active) { + 3, 'Memory Usage', 60.2, '2024-01-15T11:00:00Z', false + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].tableName).toBe('metrics'); + expect(db.records[0].values).toHaveLength(3); + + // All unique columns should be in the merged columns list + expect(db.records[0].columns).toContain('id'); + expect(db.records[0].columns).toContain('name'); + expect(db.records[0].columns).toContain('metric_value'); + expect(db.records[0].columns).toContain('timestamp'); + expect(db.records[0].columns).toContain('active'); + + // First row: id, name, metric_value (nested) + const idIdx = db.records[0].columns.indexOf('id'); + const nameIdx = db.records[0].columns.indexOf('name'); + const metricValueIdx = db.records[0].columns.indexOf('metric_value'); + const timestampIdx = db.records[0].columns.indexOf('timestamp'); + const activeIdx = db.records[0].columns.indexOf('active'); + + expect(db.records[0].values[0][idIdx]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][nameIdx]).toEqual({ type: 'string', value: 'CPU Usage' }); + expect(db.records[0].values[0][metricValueIdx]).toEqual({ type: 'real', value: 85.5 }); + expect(db.records[0].values[0][timestampIdx]).toEqual({ type: 'unknown', value: null }); + expect(db.records[0].values[0][activeIdx]).toEqual({ type: 'unknown', value: null }); + + // Second row: id, timestamp, active (top-level) + expect(db.records[0].values[1][idIdx]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][nameIdx]).toEqual({ type: 'unknown', value: null }); + expect(db.records[0].values[1][metricValueIdx]).toEqual({ type: 'unknown', value: null }); + expect(db.records[0].values[1][timestampIdx].type).toBe('datetime'); + expect(db.records[0].values[1][activeIdx]).toEqual({ type: 'bool', value: true }); + + // Third row: all columns (top-level with explicit columns) + expect(db.records[0].values[2][idIdx]).toEqual({ type: 'integer', value: 3 }); + expect(db.records[0].values[2][nameIdx]).toEqual({ type: 'string', value: 'Memory Usage' }); + expect(db.records[0].values[2][metricValueIdx]).toEqual({ type: 'real', value: 60.2 }); + expect(db.records[0].values[2][timestampIdx].type).toBe('datetime'); + expect(db.records[0].values[2][activeIdx]).toEqual({ type: 'bool', value: false }); + }); + + test('should handle multiple nested records blocks for same table', () => { + const source = ` + Table events { + id int [pk] + type varchar + user_id int + data text + created_at timestamp + + records (id, type, user_id) { + 1, 'login', 100 + 2, 'logout', 100 + } + + records (id, type, data) { + 3, 'purchase', 'item_id: 42' + } + + records (id, created_at) { + 4, '2024-01-15T10:00:00Z' + } + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values).toHaveLength(4); + + // Verify different column combinations are merged correctly + const idIdx2 = db.records[0].columns.indexOf('id'); + const typeIdx = db.records[0].columns.indexOf('type'); + const userIdIdx = db.records[0].columns.indexOf('user_id'); + const dataIdx = db.records[0].columns.indexOf('data'); + const createdAtIdx = db.records[0].columns.indexOf('created_at'); + + expect(db.records[0].values[0][idIdx2]).toBeDefined(); + expect(db.records[0].values[0][typeIdx]).toBeDefined(); + expect(db.records[0].values[0][userIdIdx]).toBeDefined(); + expect(db.records[0].values[0][dataIdx]).toEqual({ type: 'unknown', value: null }); + + expect(db.records[0].values[2][idIdx2]).toBeDefined(); + expect(db.records[0].values[2][userIdIdx]).toEqual({ type: 'unknown', value: null }); + + expect(db.records[0].values[3][idIdx2]).toBeDefined(); + expect(db.records[0].values[3][typeIdx]).toEqual({ type: 'unknown', value: null }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/enum_validation.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/enum_validation.test.ts new file mode 100644 index 000000000..914ac162f --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/enum_validation.test.ts @@ -0,0 +1,283 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] Enum validation', () => { + test('should accept valid enum values with enum access syntax', () => { + const source = ` + Enum status { + active + inactive + pending + } + + Table users { + id int [pk] + name varchar + status status + } + + records users(id, name, status) { + 1, "Alice", status.active + 2, "Bob", status.inactive + 3, "Charlie", status.pending + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should accept valid enum values with string literals', () => { + const source = ` + Enum status { + active + inactive + } + + Table users { + id int [pk] + name varchar + status status + } + + records users(id, name, status) { + 1, "Alice", "active" + 2, "Bob", "inactive" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should detect invalid enum value with enum access syntax', () => { + const source = ` + Enum status { + active + inactive + } + + Table users { + id int [pk] + name varchar + status status + } + + records users(id, name, status) { + 1, "Alice", status.active + 2, "Bob", status.invalid + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + // Enum access with invalid value produces a BINDING_ERROR (can't resolve status.invalid) + expect(errors.length).toBe(1); + expect(errors[0].code).toBe(CompileErrorCode.BINDING_ERROR); + expect(errors[0].diagnostic).toContain('invalid'); + }); + + test('should detect invalid enum value with string literal', () => { + const source = ` + Enum status { + active + inactive + } + + Table users { + id int [pk] + name varchar + status status + } + + records users(id, name, status) { + 1, "Alice", "active" + 2, "Bob", "invalid_value" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Invalid enum value \"invalid_value\" for column 'status' of type 'status' (valid values: active, inactive)"); + }); + + test('should validate multiple enum columns', () => { + const source = ` + Enum status { + active + inactive + } + + Enum role { + admin + user + } + + Table users { + id int [pk] + name varchar + status status + role role + } + + records users(id, name, status, role) { + 1, "Alice", "active", "admin" + 2, "Bob", "invalid_status", "user" + 3, "Charlie", "active", "invalid_role" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(2); + expect(warnings.every((e) => e.code === CompileErrorCode.INVALID_RECORDS_FIELD)).toBe(true); + const warningMessages = warnings.map((e) => e.diagnostic); + expect(warningMessages.some((msg) => msg.includes('invalid_status'))).toBe(true); + expect(warningMessages.some((msg) => msg.includes('invalid_role'))).toBe(true); + }); + + test('should allow NULL for enum columns', () => { + const source = ` + Enum status { + active + inactive + } + + Table users { + id int [pk] + name varchar + status status + } + + records users(id, name, status) { + 1, "Alice", "active" + 2, "Bob", null + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should validate enum with schema-qualified name', () => { + const source = ` + Enum app.status { + active + inactive + } + + Table app.users { + id int [pk] + status app.status + } + + records app.users(id, status) { + 1, app.status.active + 2, app.status.invalid + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + // app.status.invalid produces a BINDING_ERROR (can't resolve invalid field) + expect(errors.length).toBe(1); + expect(errors[0].code).toBe(CompileErrorCode.BINDING_ERROR); + expect(errors[0].diagnostic).toContain('invalid'); + }); + + test('should reject string literal for schema-qualified enum', () => { + const source = ` + Enum app.status { + active + inactive + } + + Table app.users { + id int [pk] + status app.status + } + + records app.users(id, status) { + 1, "active" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('fully qualified'); + expect(warnings[0].diagnostic).toContain('app.status.active'); + }); + + test('should reject unqualified enum access for schema-qualified enum', () => { + const source = ` + Enum app.status { + active + inactive + } + + Table app.users { + id int [pk] + status app.status + } + + records app.users(id, status) { + 1, status.active + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + // The binder catches this error - it can't resolve 'status' in the app schema context + expect(errors.length).toBe(1); + expect(errors[0].code).toBe(CompileErrorCode.BINDING_ERROR); + expect(errors[0].diagnostic).toContain('status'); + }); + + test('should validate enum from table partial', () => { + const source = ` + Enum priority { + low + medium + high + } + + TablePartial audit_fields { + priority priority + } + + Table tasks { + id int [pk] + name varchar + ~audit_fields + } + + records tasks(id, name, priority) { + 1, "Task 1", "high" + 2, "Task 2", "invalid_priority" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('invalid_priority'); + expect(warnings[0].diagnostic).toContain('priority'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/fk_empty_target.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/fk_empty_target.test.ts new file mode 100644 index 000000000..992791d37 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/fk_empty_target.test.ts @@ -0,0 +1,35 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('FK with empty target table', () => { + test('should detect FK violation when target table is empty', () => { + const source = ` + Table follows { + following_user_id integer + followed_user_id integer + created_at timestamp + } + + Table users { + id integer [primary key] + username varchar + } + + Ref: users.id < follows.following_user_id + Ref: users.id < follows.followed_user_id + + Records follows(following_user_id, followed_user_id, created_at) { + 1, 2, '2026-01-01' + } + `; + + const result = interpret(source); + const warnings = result.getWarnings(); + + // Should have FK violations since users table is empty but follows references it + expect(warnings.length).toBe(2); // Two FK violations: following_user_id and followed_user_id + expect(warnings.every((e) => e.code === CompileErrorCode.INVALID_RECORDS_FIELD)).toBe(true); + expect(warnings.every((e) => e.diagnostic.includes('does not exist in'))).toBe(true); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/fk_table_partial.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/fk_table_partial.test.ts new file mode 100644 index 000000000..cf45d748c --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/fk_table_partial.test.ts @@ -0,0 +1,339 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] FK in table partials', () => { + test('should validate FK from injected table partial', () => { + const source = ` + TablePartial fk_partial { + user_id int [ref: > users.id] + } + + Table users { + id int [pk] + name varchar + } + + Table posts { + id int [pk] + title varchar + ~fk_partial + } + + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + + records posts(id, title, user_id) { + 1, "Post 1", 1 + 2, "Post 2", 2 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect FK violation from injected table partial', () => { + const source = ` + TablePartial fk_partial { + user_id int [ref: > users.id] + } + + Table users { + id int [pk] + name varchar + } + + Table posts { + id int [pk] + title varchar + ~fk_partial + } + + records users(id, name) { + 1, "Alice" + } + + records posts(id, title, user_id) { + 1, "Post 1", 1 + 2, "Post 2", 999 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe('FK violation: posts.user_id = 999 does not exist in users.id'); + }); + + test('should validate FK when partial injected into multiple tables', () => { + const source = ` + TablePartial timestamps { + created_by int [ref: > users.id] + } + + Table users { + id int [pk] + name varchar + } + + Table posts { + id int [pk] + title varchar + ~timestamps + } + + Table comments { + id int [pk] + content varchar + ~timestamps + } + + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + + records posts(id, title, created_by) { + 1, "Post 1", 1 + 2, "Post 2", 2 + } + + records comments(id, content, created_by) { + 1, "Comment 1", 1 + 2, "Comment 2", 2 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect FK violation in one table when partial injected into multiple tables', () => { + const source = ` + TablePartial timestamps { + created_by int [ref: > users.id] + } + + Table users { + id int [pk] + name varchar + } + + Table posts { + id int [pk] + title varchar + ~timestamps + } + + Table comments { + id int [pk] + content varchar + ~timestamps + } + + records users(id, name) { + 1, "Alice" + } + + records posts(id, title, created_by) { + 1, "Post 1", 1 + } + + records comments(id, content, created_by) { + 1, "Comment 1", 1 + 2, "Comment 2", 999 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe('FK violation: comments.created_by = 999 does not exist in users.id'); + }); + + test('should allow NULL FK values from injected table partial', () => { + const source = ` + TablePartial optional_user { + user_id int [ref: > users.id] + } + + Table users { + id int [pk] + name varchar + } + + Table posts { + id int [pk] + title varchar + ~optional_user + } + + records users(id, name) { + 1, "Alice" + } + + records posts(id, title, user_id) { + 1, "Post 1", 1 + 2, "Anonymous Post", null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should validate FK with multiple partials injected', () => { + const source = ` + TablePartial user_ref { + user_id int [ref: > users.id] + } + + TablePartial category_ref { + category_id int [ref: > categories.id] + } + + Table users { + id int [pk] + name varchar + } + + Table categories { + id int [pk] + name varchar + } + + Table posts { + id int [pk] + title varchar + ~user_ref + ~category_ref + } + + records users(id, name) { + 1, "Alice" + } + + records categories(id, name) { + 1, "Tech" + } + + records posts(id, title, user_id, category_id) { + 1, "Post 1", 1, 1 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect FK violation with multiple partials injected', () => { + const source = ` + TablePartial user_ref { + user_id int [ref: > users.id] + } + + TablePartial category_ref { + category_id int [ref: > categories.id] + } + + Table users { + id int [pk] + name varchar + } + + Table categories { + id int [pk] + name varchar + } + + Table posts { + id int [pk] + title varchar + ~user_ref + ~category_ref + } + + records users(id, name) { + 1, "Alice" + } + + records categories(id, name) { + 1, "Tech" + } + + records posts(id, title, user_id, category_id) { + 1, "Valid Post", 1, 1 + 2, "Invalid Category", 1, 999 + 3, "Invalid User", 999, 1 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + // Verify both errors are FK violations + const errorMessages = warnings.map((e) => e.diagnostic); + expect(errorMessages.every((msg) => msg.startsWith('FK violation'))).toBe(true); + }); + + test('should validate self-referencing FK from injected table partial', () => { + const source = ` + TablePartial hierarchical { + parent_id int [ref: > nodes.id] + } + + Table nodes { + id int [pk] + name varchar + ~hierarchical + } + + records nodes(id, name, parent_id) { + 1, "Root", null + 2, "Child 1", 1 + 3, "Child 2", 1 + 4, "Grandchild", 2 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect self-referencing FK violation from injected table partial', () => { + const source = ` + TablePartial hierarchical { + parent_id int [ref: > nodes.id] + } + + Table nodes { + id int [pk] + name varchar + ~hierarchical + } + + records nodes(id, name, parent_id) { + 1, "Root", null + 2, "Invalid Child", 999 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe('FK violation: nodes.parent_id = 999 does not exist in nodes.id'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/increment.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/increment.test.ts new file mode 100644 index 000000000..e37706595 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/increment.test.ts @@ -0,0 +1,113 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] auto-increment and serial type constraints', () => { + test('should allow NULL in pk column with increment flag', () => { + const source = ` + Table users { + id int [pk, increment] + name varchar + } + records users(id, name) { + null, "Alice" + null, "Bob" + 1, "Charlie" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].values.length).toBe(3); + + // Row 1: id=null (auto-generated), name="Alice" + expect(db.records[0].values[0][0].value).toBe(null); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'Alice' }); + + // Row 2: id=null (auto-generated), name="Bob" + expect(db.records[0].values[1][0].value).toBe(null); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: 'Bob' }); + + // Row 3: id=1, name="Charlie" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'string', value: 'Charlie' }); + }); + + test('should allow NULL in pk column with serial type', () => { + const source = ` + Table users { + id serial [pk] + name varchar + } + records users(id, name) { + null, "Alice" + null, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values.length).toBe(2); + }); + + test('should allow NULL in pk column with bigserial type', () => { + const source = ` + Table users { + id bigserial [pk] + name varchar + } + records users(id, name) { + null, "Alice" + null, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should detect duplicate pk for non-null values with increment', () => { + const source = ` + Table users { + id int [pk, increment] + name varchar + } + records users(id, name) { + 1, "Alice" + 1, "Bob" + null, "Charlie" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate PK: users.id = 1'); + }); + + test('should detect duplicate pk with not null + dbdefault', () => { + const source = ` + Table users { + id int [pk, not null, default: 1] + name varchar + } + records users(id, name) { + null, "Alice" + null, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + // Both NULLs resolve to default value 1, which is a duplicate + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate PK: users.id = null'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/numeric_validation.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/numeric_validation.test.ts new file mode 100644 index 000000000..5af85b980 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/numeric_validation.test.ts @@ -0,0 +1,421 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] Numeric type validation', () => { + describe('Integer validation', () => { + test('should accept valid integer values', () => { + const source = ` + Table products { + id int + quantity bigint + serial_num smallint + } + + records products(id, quantity, serial_num) { + 1, 1000, 5 + 2, -500, -10 + 3, 0, 0 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should reject decimal value for integer column', () => { + const source = ` + Table products { + id int + quantity int + } + + records products(id, quantity) { + 1, 10.5 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Invalid integer value 10.5 for column 'quantity': expected integer, got decimal"); + }); + + test('should reject multiple decimal values for integer columns', () => { + const source = ` + Table products { + id int + quantity int + stock int + } + + records products(id, quantity, stock) { + 1, 10.5, 20 + 2, 15, 30.7 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Invalid integer value 10.5 for column 'quantity': expected integer, got decimal"); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toBe("Invalid integer value 30.7 for column 'stock': expected integer, got decimal"); + }); + + test('should accept negative integers', () => { + const source = ` + Table transactions { + id int + amount int + } + + records transactions(id, amount) { + 1, -100 + 2, -500 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + }); + + describe('Decimal/numeric precision and scale validation', () => { + test('should accept valid decimal values within precision and scale', () => { + const source = ` + Table products { + id int + price decimal(10, 2) + rate numeric(5, 3) + } + + records products(id, price, rate) { + 1, 99.99, 1.234 + 2, 12345678.90, 12.345 + 3, -999.99, -0.001 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should reject decimal value exceeding precision', () => { + const source = ` + Table products { + id int + price decimal(5, 2) + } + + records products(id, price) { + 1, 12345.67 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Numeric value 12345.67 for column 'price' exceeds precision: expected at most 5 total digits, got 7"); + }); + + test('should reject decimal value exceeding scale', () => { + const source = ` + Table products { + id int + price decimal(10, 2) + } + + records products(id, price) { + 1, 99.999 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Numeric value 99.999 for column 'price' exceeds scale: expected at most 2 decimal digits, got 3"); + }); + + test('should accept decimal value with fewer decimal places than scale', () => { + const source = ` + Table products { + id int + price decimal(10, 2) + } + + records products(id, price) { + 1, 99.9 + 2, 100 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should handle negative decimal values correctly', () => { + const source = ` + Table transactions { + id int + amount decimal(8, 2) + } + + records transactions(id, amount) { + 1, -12345.67 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should reject negative decimal value exceeding precision', () => { + const source = ` + Table transactions { + id int + amount decimal(5, 2) + } + + records transactions(id, amount) { + 1, -12345.67 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Numeric value -12345.67 for column 'amount' exceeds precision: expected at most 5 total digits, got 7"); + }); + + test('should validate multiple decimal columns', () => { + const source = ` + Table products { + id int + price decimal(5, 2) + tax_rate decimal(5, 2) + } + + records products(id, price, tax_rate) { + 1, 12345.67, 0.99 + 2, 99.99, 10.123 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Numeric value 12345.67 for column 'price' exceeds precision: expected at most 5 total digits, got 7"); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toBe("Numeric value 10.123 for column 'tax_rate' exceeds scale: expected at most 2 decimal digits, got 3"); + }); + + test('should allow decimal/numeric types without precision parameters', () => { + const source = ` + Table products { + id int + price decimal + rate numeric + } + + records products(id, price, rate) { + 1, 999999999.999999, 123456.789012 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + }); + + describe('Float/double validation', () => { + test('should accept valid float values', () => { + const source = ` + Table measurements { + id int + temperature float + pressure double + } + + records measurements(id, temperature, pressure) { + 1, 98.6, 101325.5 + 2, -40.0, 0.001 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should accept integers for float columns', () => { + const source = ` + Table measurements { + id int + value float + } + + records measurements(id, value) { + 1, 100 + 2, -50 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + }); + + describe('Scientific notation validation', () => { + test('should accept scientific notation that evaluates to integer', () => { + const source = ` + Table data { + id int + count int + } + + records data(id, count) { + 1, 1e2 + 2, 2E3 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should reject scientific notation that evaluates to decimal for integer column', () => { + const source = ` + Table data { + id int + count int + } + + records data(id, count) { + 1, 2e-1 + 2, 3.5e-1 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(2); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Invalid integer value 0.2 for column 'count': expected integer, got decimal"); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toBe("Invalid integer value 0.35 for column 'count': expected integer, got decimal"); + }); + + test('should accept scientific notation for decimal/numeric types', () => { + const source = ` + Table data { + id int + value decimal(10, 2) + } + + records data(id, value) { + 1, 1.5e2 + 2, 3.14e1 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('should validate precision/scale for scientific notation', () => { + const source = ` + Table data { + id int + value decimal(5, 2) + } + + records data(id, value) { + 1, 1e6 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Numeric value 1000000 for column 'value' exceeds precision: expected at most 5 total digits, got 7"); + }); + + test('should accept scientific notation for float types', () => { + const source = ` + Table measurements { + id int + temperature float + distance double + } + + records measurements(id, temperature, distance) { + 1, 3.14e2, 1.5e10 + 2, -2.5e-3, 6.67e-11 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + }); + + describe('Mixed numeric type validation', () => { + test('should validate multiple numeric types in one table', () => { + const source = ` + Table products { + id int + quantity int + price decimal(10, 2) + weight float + } + + records products(id, quantity, price, weight) { + 1, 10, 99.99, 1.5 + 2, 20.5, 199.99, 2.75 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("Invalid integer value 20.5 for column 'quantity': expected integer, got decimal"); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/simple_fk.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/simple_fk.test.ts new file mode 100644 index 000000000..bfac866a1 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/simple_fk.test.ts @@ -0,0 +1,423 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] simple foreign key constraints', () => { + test('should accept valid many-to-one FK references', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + Table posts { + id int [pk] + user_id int + title varchar + } + Ref: posts.user_id > users.id + + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + records posts(id, user_id, title) { + 1, 1, "Alice's Post" + 2, 1, "Another Post" + 3, 2, "Bob's Post" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(2); + + // Users table + expect(db.records[0].tableName).toBe('users'); + expect(db.records[0].values.length).toBe(2); + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'Alice' }); + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: 'Bob' }); + + // Posts table + expect(db.records[1].tableName).toBe('posts'); + expect(db.records[1].values.length).toBe(3); + expect(db.records[1].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[1].values[0][1]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[1].values[0][2]).toEqual({ type: 'string', value: "Alice's Post" }); + }); + + test('should reject FK values that dont exist in referenced table', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + Table posts { + id int [pk] + user_id int + title varchar + } + Ref: posts.user_id > users.id + + records users(id, name) { + 1, "Alice" + } + records posts(id, user_id, title) { + 1, 1, "Valid Post" + 2, 999, "Invalid FK" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('FK violation: posts.user_id = 999 does not exist in users.id'); + }); + + test('should allow NULL FK values (optional relationship)', () => { + const source = ` + Table categories { + id int [pk] + name varchar + } + Table products { + id int [pk] + category_id int + name varchar + } + Ref: products.category_id > categories.id + + records categories(id, name) { + 1, "Electronics" + } + records products(id, category_id, name) { + 1, 1, "Laptop" + 2, null, "Uncategorized Item" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[1].values.length).toBe(2); + + // Row 1: id=1, category_id=1, name="Laptop" + expect(db.records[1].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[1].values[0][1]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[1].values[0][2]).toEqual({ type: 'string', value: 'Laptop' }); + + // Row 2: id=2, category_id=null, name="Uncategorized Item" + expect(db.records[1].values[1][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[1].values[1][1].value).toBe(null); + expect(db.records[1].values[1][2]).toEqual({ type: 'string', value: 'Uncategorized Item' }); + }); + + test('should validate one-to-one FK both directions', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + Table user_profiles { + id int [pk] + user_id int + bio text + } + Ref: user_profiles.user_id - users.id + + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + records user_profiles(id, user_id, bio) { + 1, 1, "Alice's bio" + 2, 3, "Invalid user" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + // One-to-one validates both directions: + // 1. user_profiles.user_id=3 doesn't exist in users.id + // 2. users.id=2 (Bob) doesn't have a matching user_profiles.user_id + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('FK violation: user_profiles.user_id = 3 does not exist in users.id'); + expect(warnings[1].diagnostic).toBe('FK violation: users.id = 2 does not exist in user_profiles.user_id'); + }); + + test('should validate one-to-many FK from parent side', () => { + const source = ` + Table departments { + id int [pk] + name varchar + } + Table employees { + id int [pk] + dept_id int + name varchar + } + Ref: departments.id < employees.dept_id + + records departments(id, name) { + 1, "Engineering" + } + records employees(id, dept_id, name) { + 1, 1, "Alice" + 2, 999, "Bob with invalid dept" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('FK violation: employees.dept_id = 999 does not exist in departments.id'); + }); + + test('should accept valid string FK values', () => { + const source = ` + Table countries { + code varchar(2) [pk] + name varchar + } + Table cities { + id int [pk] + country_code varchar(2) + name varchar + } + Ref: cities.country_code > countries.code + + records countries(code, name) { + "US", "United States" + "UK", "United Kingdom" + } + records cities(id, country_code, name) { + 1, "US", "New York" + 2, "UK", "London" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[1].values[0][1]).toEqual({ type: 'string', value: 'US' }); + expect(db.records[1].values[1][1]).toEqual({ type: 'string', value: 'UK' }); + }); + + test('should reject invalid string FK values', () => { + const source = ` + Table countries { + code varchar(2) [pk] + name varchar + } + Table cities { + id int [pk] + country_code varchar(2) + name varchar + } + Ref: cities.country_code > countries.code + + records countries(code, name) { + "US", "United States" + } + records cities(id, country_code, name) { + 1, "US", "New York" + 2, "FR", "Paris" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('FK violation: cities.country_code = "FR" does not exist in countries.code'); + }); + + test('should validate FK with zero values', () => { + const source = ` + Table items { + id int [pk] + name varchar + } + Table orders { + id int [pk] + item_id int + } + Ref: orders.item_id > items.id + + records items(id, name) { + 0, "Default Item" + 1, "Item One" + } + records orders(id, item_id) { + 1, 0 + 2, 1 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should validate FK with negative values', () => { + const source = ` + Table accounts { + id int [pk] + name varchar + } + Table transactions { + id int [pk] + account_id int + amount decimal + } + Ref: transactions.account_id > accounts.id + + records accounts(id, name) { + -1, "System Account" + 1, "User Account" + } + records transactions(id, account_id, amount) { + 1, -1, 100.00 + 2, 1, 50.00 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should validate FK across multiple records blocks', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + Table posts { + id int [pk] + user_id int + title varchar + } + Ref: posts.user_id > users.id + + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 2, "Bob" + } + records posts(id, user_id, title) { + 1, 1, "Alice's Post" + } + records posts(id, user_id, title) { + 2, 2, "Bob's Post" + 3, 3, "Invalid Post" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('FK violation: posts.user_id = 3 does not exist in users.id'); + }); + + test('should accept inline ref syntax for FK', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + Table posts { + id int [pk] + user_id int [ref: > users.id] + title varchar + } + + records users(id, name) { + 1, "Alice" + } + records posts(id, user_id, title) { + 1, 1, "Valid Post" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should reject invalid inline ref FK value', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + Table posts { + id int [pk] + user_id int [ref: > users.id] + title varchar + } + + records users(id, name) { + 1, "Alice" + } + records posts(id, user_id, title) { + 1, 1, "Valid Post" + 2, 999, "Invalid Post" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('FK violation: posts.user_id = 999 does not exist in users.id'); + }); + + test('should accept self-referencing FK', () => { + const source = ` + Table employees { + id int [pk] + manager_id int + name varchar + } + Ref: employees.manager_id > employees.id + + records employees(id, manager_id, name) { + 1, null, "CEO" + 2, 1, "Manager" + 3, 2, "Employee" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should reject invalid self-referencing FK', () => { + const source = ` + Table employees { + id int [pk] + manager_id int + name varchar + } + Ref: employees.manager_id > employees.id + + records employees(id, manager_id, name) { + 1, null, "CEO" + 2, 999, "Invalid Manager Reference" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('FK violation: employees.manager_id = 999 does not exist in employees.id'); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/simple_pk.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/simple_pk.test.ts new file mode 100644 index 000000000..8a55851a8 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/simple_pk.test.ts @@ -0,0 +1,231 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] simple primary key constraints', () => { + test('should accept valid unique primary key values', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 2, "Bob" + 3, "Charlie" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].tableName).toBe('users'); + expect(db.records[0].columns).toEqual(['id', 'name']); + expect(db.records[0].values.length).toBe(3); + + // Row 1: id=1, name="Alice" + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'Alice' }); + + // Row 2: id=2, name="Bob" + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: 'Bob' }); + + // Row 3: id=3, name="Charlie" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 3 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'string', value: 'Charlie' }); + }); + + test('should reject duplicate primary key values', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 1, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate PK: users.id = 1'); + }); + + test('should reject NULL values in primary key column', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + null, "Alice" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('NULL in PK: users.id cannot be NULL'); + }); + + test('should detect duplicate pk across multiple records blocks', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 1, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate PK: users.id = 1'); + }); + + test('should report error when pk column is missing from record', () => { + const source = ` + Table users { + id int [pk] + name varchar + email varchar + } + records users(name, email) { + "Alice", "alice@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('PK: Column users.id is missing from record and has no default value'); + }); + + test('should accept string primary keys', () => { + const source = ` + Table countries { + code varchar(2) [pk] + name varchar + } + records countries(code, name) { + "US", "United States" + "UK", "United Kingdom" + "CA", "Canada" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'string', value: 'US' }); + expect(db.records[0].values[1][0]).toEqual({ type: 'string', value: 'UK' }); + expect(db.records[0].values[2][0]).toEqual({ type: 'string', value: 'CA' }); + }); + + test('should reject duplicate string primary keys', () => { + const source = ` + Table countries { + code varchar(2) [pk] + name varchar + } + records countries(code, name) { + "US", "United States" + "US", "USA" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate PK: countries.code = "US"'); + }); + + test('should accept primary key alias syntax', () => { + const source = ` + Table users { + id int [primary key] + name varchar + } + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should handle zero as valid pk value', () => { + const source = ` + Table items { + id int [pk] + name varchar + } + records items(id, name) { + 0, "Zero Item" + 1, "One Item" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 0 }); + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 1 }); + }); + + test('should handle negative numbers as pk values', () => { + const source = ` + Table transactions { + id int [pk] + amount decimal + } + records transactions(id, amount) { + -1, 100.00 + 1, 50.00 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: -1 }); + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 1 }); + }); + + test('should accept valid pk with auto-increment', () => { + const source = ` + Table users { + id int [pk, increment] + name varchar + } + records users(id, name) { + null, "Alice" + null, "Bob" + 3, "Charlie" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/simple_unique.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/simple_unique.test.ts new file mode 100644 index 000000000..764c5ce5e --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/simple_unique.test.ts @@ -0,0 +1,271 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] simple unique constraints', () => { + test('should accept valid unique values', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + } + records users(id, email) { + 1, "alice@example.com" + 2, "bob@example.com" + 3, "charlie@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].tableName).toBe('users'); + expect(db.records[0].columns).toEqual(['id', 'email']); + expect(db.records[0].values.length).toBe(3); + + // Row 1: id=1, email="alice@example.com" + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'alice@example.com' }); + + // Row 2: id=2, email="bob@example.com" + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: 'bob@example.com' }); + + // Row 3: id=3, email="charlie@example.com" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 3 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'string', value: 'charlie@example.com' }); + }); + + test('should reject duplicate unique values', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + } + records users(id, email) { + 1, "alice@example.com" + 2, "alice@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate UNIQUE: users.email = "alice@example.com"'); + }); + + test('should allow NULL values in unique column (NULLs dont conflict)', () => { + const source = ` + Table users { + id int [pk] + phone varchar [unique] + } + records users(id, phone) { + 1, null + 2, "" + 3, "555-1234" + 4, + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values.length).toBe(4); + + // Row 1: id=1, phone=null + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: null }); + + // Row 2: id=2, phone=null + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: '' }); + + // Row 3: id=3, phone="555-1234" + expect(db.records[0].values[2][0]).toEqual({ type: 'integer', value: 3 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'string', value: '555-1234' }); + + // Row 4: id=4, phone=null + expect(db.records[0].values[3][0]).toEqual({ type: 'integer', value: 4 }); + expect(db.records[0].values[3][1]).toEqual({ type: 'string', value: null }); + }); + + test('should detect duplicate unique across multiple records blocks', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + } + records users(id, email) { + 1, "alice@example.com" + } + records users(id, email) { + 2, "alice@example.com" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate UNIQUE: users.email = "alice@example.com"'); + }); + + test('should validate multiple unique columns independently', () => { + const source = ` + Table users { + id int [pk] + email varchar [unique] + username varchar [unique] + } + records users(id, email, username) { + 1, "alice@example.com", "alice" + 2, "bob@example.com", "alice" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate UNIQUE: users.username = "alice"'); + }); + + test('should accept unique constraint with numeric values', () => { + const source = ` + Table products { + id int [pk] + sku int [unique] + name varchar + } + records products(id, sku, name) { + 1, 1001, "Product A" + 2, 1002, "Product B" + 3, 1003, "Product C" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: 1001 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'integer', value: 1002 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'integer', value: 1003 }); + }); + + test('should reject duplicate numeric unique values', () => { + const source = ` + Table products { + id int [pk] + sku int [unique] + name varchar + } + records products(id, sku, name) { + 1, 1001, "Product A" + 2, 1001, "Product B" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe('Duplicate UNIQUE: products.sku = 1001'); + }); + + test('should accept zero as unique value', () => { + const source = ` + Table items { + id int [pk] + code int [unique] + } + records items(id, code) { + 1, 0 + 2, 1 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should handle negative numbers in unique constraint', () => { + const source = ` + Table balances { + id int [pk] + account_num int [unique] + } + records balances(id, account_num) { + 1, -100 + 2, 100 + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: -100 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'integer', value: 100 }); + }); + + test('should accept both pk and unique on same column', () => { + const source = ` + Table items { + id int [pk, unique] + name varchar + } + records items(id, name) { + 1, "Item 1" + 2, "Item 2" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); + + test('should reject duplicate when column has both pk and unique', () => { + const source = ` + Table items { + id int [pk, unique] + name varchar + } + records items(id, name) { + 1, "Item 1" + 1, "Item 2" + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + // Both pk and unique violations are reported + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe('Duplicate PK: items.id = 1'); + expect(warnings[1].diagnostic).toBe('Duplicate UNIQUE: items.id = 1'); + }); + + test('should allow all null values in unique column', () => { + const source = ` + Table data { + id int [pk] + optional_code varchar [unique] + } + records data(id, optional_code) { + 1, null + 2, null + 3, null + } + `; + const result = interpret(source); + const warnings = result.getWarnings(); + + expect(warnings.length).toBe(0); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/string_length_validation.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/string_length_validation.test.ts new file mode 100644 index 000000000..6b8389c13 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/string_length_validation.test.ts @@ -0,0 +1,353 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; +import { CompileErrorCode } from '@/core/errors'; + +describe('[example - record] String length validation', () => { + describe('VARCHAR length validation', () => { + test('should accept string values within length limit', () => { + const source = ` + Table users { + id int + name varchar(50) + email varchar(100) + } + + records users(id, name, email) { + 1, "Alice", "alice@example.com" + 2, "Bob Smith", "bob.smith@company.org" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(0); + }); + + test('should reject string value exceeding length limit', () => { + const source = ` + Table users { + id int + name varchar(5) + } + + records users(id, name) { + 1, "Alice Johnson" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("String value for column 'name' exceeds maximum length: expected at most 5 bytes (UTF-8), got 13 bytes"); + }); + + test('should accept empty string for varchar', () => { + const source = ` + Table users { + id int + name varchar(50) + } + + records users(id, name) { + 1, "" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(0); + }); + + test('should accept string at exact length limit', () => { + const source = ` + Table users { + id int + code varchar(5) + } + + records users(id, code) { + 1, "ABCDE" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(0); + }); + + test('should validate multiple varchar columns', () => { + const source = ` + Table users { + id int + first_name varchar(10) + last_name varchar(10) + } + + records users(id, first_name, last_name) { + 1, "Alice", "Smith" + 2, "Christopher", "Johnson" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("String value for column 'first_name' exceeds maximum length: expected at most 10 bytes (UTF-8), got 11 bytes"); + }); + }); + + describe('CHAR length validation', () => { + test('should accept string values within char limit', () => { + const source = ` + Table codes { + id int + code char(10) + } + + records codes(id, code) { + 1, "ABC123" + 2, "XYZ" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(0); + }); + + test('should reject string value exceeding char limit', () => { + const source = ` + Table codes { + id int + code char(3) + } + + records codes(id, code) { + 1, "ABCD" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("String value for column 'code' exceeds maximum length: expected at most 3 bytes (UTF-8), got 4 bytes"); + }); + }); + + describe('Other string types with length', () => { + test('should validate nvarchar length', () => { + const source = ` + Table users { + id int + name nvarchar(5) + } + + records users(id, name) { + 1, "Alice Johnson" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("String value for column 'name' exceeds maximum length: expected at most 5 bytes (UTF-8), got 13 bytes"); + }); + + test('should validate nchar length', () => { + const source = ` + Table codes { + id int + code nchar(3) + } + + records codes(id, code) { + 1, "ABCD" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("String value for column 'code' exceeds maximum length: expected at most 3 bytes (UTF-8), got 4 bytes"); + }); + + test('should validate character varying length', () => { + const source = ` + Table users { + id int + name "character varying"(10) + } + + records users(id, name) { + 1, "Christopher" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("String value for column 'name' exceeds maximum length: expected at most 10 bytes (UTF-8), got 11 bytes"); + }); + }); + + describe('String types without length parameter', () => { + test('should allow any length for text type', () => { + const source = ` + Table articles { + id int + content text + } + + records articles(id, content) { + 1, "This is a very long text content that can be arbitrarily long without any length restrictions because text type does not have a length parameter" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(0); + }); + + test('should allow any length for varchar without parameter', () => { + const source = ` + Table users { + id int + description varchar + } + + records users(id, description) { + 1, "This is a very long description that can be arbitrarily long" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(0); + }); + }); + + describe('Edge cases', () => { + test('should count unicode characters using UTF-8 byte length', () => { + const source = ` + Table messages { + id int + text varchar(20) + } + + records messages(id, text) { + 1, "Hello" + 2, "😀😁😂😃😄" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + // "😀😁😂😃😄" is 5 emojis × 4 bytes each = 20 bytes + expect(errors.length).toBe(0); + }); + + test('should reject string with multi-byte characters exceeding byte limit', () => { + const source = ` + Table messages { + id int + text varchar(10) + } + + records messages(id, text) { + 1, "😀😁😂" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + // "😀😁😂" is 3 emojis × 4 bytes each = 12 bytes, exceeds varchar(10) + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toContain('exceeds maximum length: expected at most 10 bytes'); + }); + + test('should validate multiple errors in one record', () => { + const source = ` + Table users { + id int + first_name varchar(5) + last_name varchar(5) + email varchar(10) + } + + records users(id, first_name, last_name, email) { + 1, "Christopher", "Johnson", "chris.johnson@example.com" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(3); + expect(warnings[0].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[0].diagnostic).toBe("String value for column 'first_name' exceeds maximum length: expected at most 5 bytes (UTF-8), got 11 bytes"); + expect(warnings[1].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[1].diagnostic).toBe("String value for column 'last_name' exceeds maximum length: expected at most 5 bytes (UTF-8), got 7 bytes"); + expect(warnings[2].code).toBe(CompileErrorCode.INVALID_RECORDS_FIELD); + expect(warnings[2].diagnostic).toBe("String value for column 'email' exceeds maximum length: expected at most 10 bytes (UTF-8), got 25 bytes"); + }); + + test('should validate across multiple records', () => { + const source = ` + Table users { + id int + name varchar(5) + } + + records users(id, name) { + 1, "Alice" + 2, "Bob" + 3, "Christopher" + 4, "Dave" + 5, "Elizabeth" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe("String value for column 'name' exceeds maximum length: expected at most 5 bytes (UTF-8), got 11 bytes"); + expect(warnings[1].diagnostic).toBe("String value for column 'name' exceeds maximum length: expected at most 5 bytes (UTF-8), got 9 bytes"); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/interpreter/record/type_compatibility.test.ts b/packages/dbml-parse/__tests__/examples/interpreter/record/type_compatibility.test.ts new file mode 100644 index 000000000..22a9942c0 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/interpreter/record/type_compatibility.test.ts @@ -0,0 +1,804 @@ +import { describe, expect, test } from 'vitest'; +import { interpret } from '@tests/utils'; + +describe('[example - record] type compatibility validation', () => { + describe('boolean type validation', () => { + test('- should accept all valid boolean literal values', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, true + 2, false + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records.length).toBe(1); + expect(db.records[0].values.length).toBe(2); + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[1][1]).toEqual({ type: 'bool', value: false }); + }); + + test('- should accept string boolean values (true/false)', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, 'true' + 2, "false" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[1][1]).toEqual({ type: 'bool', value: false }); + }); + + test('- should accept string boolean values (t/f)', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, 't' + 2, 'f' + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[1][1]).toEqual({ type: 'bool', value: false }); + }); + + test('- should accept string boolean values (y/n)', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, 'y' + 2, 'n' + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[1][1]).toEqual({ type: 'bool', value: false }); + }); + + test('- should accept string boolean values (yes/no)', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, 'yes' + 2, "no" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[1][1]).toEqual({ type: 'bool', value: false }); + }); + + test('- should accept numeric boolean values (1/0)', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, 1 + 2, 0 + 3, '1' + 4, "0" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[1][1]).toEqual({ type: 'bool', value: false }); + expect(db.records[0].values[2][1]).toEqual({ type: 'bool', value: true }); + expect(db.records[0].values[3][1]).toEqual({ type: 'bool', value: false }); + }); + + test('- should reject invalid string value for boolean column', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, "invalid" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe("Invalid boolean value for column 'active'"); + }); + + test('- should reject numeric values other than 0/1 for boolean column', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, 2 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe("Invalid boolean value for column 'active'"); + }); + }); + + describe('numeric type validation', () => { + test('- should reject string value for integer column', () => { + const source = ` + Table data { + id int + name varchar + } + records data(id, name) { + "not a number", "Alice" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe("Invalid numeric value for column 'id'"); + }); + + test('- should accept valid decimal values', () => { + const source = ` + Table data { + id int + price decimal(10,2) + rate float + } + records data(id, price, rate) { + 1, 99.99, 3.14159 + 2, -50.00, -2.5 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'real', value: 99.99 }); + expect(db.records[0].values[0][2]).toEqual({ type: 'real', value: 3.14159 }); + }); + + test('- should accept scientific notation for numeric columns', () => { + const source = ` + Table data { + id int + value decimal + } + records data(id, value) { + 1, 1e10 + 2, 3.14e-5 + 3, 2E+8 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'real', value: 1e10 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'real', value: 3.14e-5 }); + expect(db.records[0].values[2][1]).toEqual({ type: 'real', value: 2e8 }); + }); + }); + + describe('string type validation', () => { + test('- should accept single-quoted strings', () => { + const source = ` + Table data { + id int + name varchar + } + records data(id, name) { + 1, 'Alice' + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'Alice' }); + }); + + test('- should accept double-quoted strings', () => { + const source = ` + Table data { + id int + name varchar + } + records data(id, name) { + 1, "Bob" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: 'Bob' }); + }); + + test('- should accept empty strings for string columns', () => { + const source = ` + Table data { + id int + name varchar + } + records data(id, name) { + 1, "" + 2, '' + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: '' }); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: '' }); + }); + + test('- should treat empty field as null for non-string columns', () => { + const source = ` + Table data { + id int + count int + name varchar + } + records data(id, count, name) { + 1, , "test" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: null }); + expect(db.records[0].values[0][2]).toEqual({ type: 'string', value: 'test' }); + }); + + test('- should handle various null forms correctly', () => { + const source = ` + Table data { + id int + count int + amount decimal + name varchar + description text + } + records data(id, count, amount, name, description) { + 1, null, null, null, null + 2, , , , + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + // Row 1: explicit null keyword + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: null }); + expect(db.records[0].values[0][2]).toEqual({ type: 'real', value: null }); + expect(db.records[0].values[0][3]).toEqual({ type: 'string', value: null }); + expect(db.records[0].values[0][4]).toEqual({ type: 'string', value: null }); + + // Row 2: empty field (treated as null for non-string, null for string) + expect(db.records[0].values[1][1]).toEqual({ type: 'integer', value: null }); + expect(db.records[0].values[1][2]).toEqual({ type: 'real', value: null }); + expect(db.records[0].values[1][3]).toEqual({ type: 'string', value: null }); + expect(db.records[0].values[1][4]).toEqual({ type: 'string', value: null }); + }); + + test('- should accept strings with special characters', () => { + const source = ` + Table data { + id int + content text + } + records data(id, content) { + 1, "Line 1\\nLine 2" + 2, 'Tab\\tSeparated' + 3, "Quote: \\"test\\"" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + }); + + describe('null handling', () => { + test('- should accept null for nullable column', () => { + const source = ` + Table users { + id int [pk] + name varchar [null] + email varchar + } + records users(id, name, email) { + 1, null, null + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: null }); + expect(db.records[0].values[0][2]).toEqual({ type: 'string', value: null }); + }); + + test('- should reject NULL for NOT NULL column without default and increment', () => { + const source = ` + Table users { + id int [pk] + name varchar [not null] + } + records users(id, name) { + 1, null + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe("NULL not allowed for non-nullable column 'name' without default and increment"); + }); + + test('- should allow NULL for NOT NULL column with default', () => { + const source = ` + Table users { + id int [pk] + status varchar [not null, default: 'active'] + } + records users(id, status) { + 1, null + 2, "inactive" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values.length).toBe(2); + + // Row 1: id=1, status=null (null stored, default applied at DB level) + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: null }); + + // Row 2: id=2, status="inactive" + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: 2 }); + expect(db.records[0].values[1][1]).toEqual({ type: 'string', value: 'inactive' }); + }); + + test('- should allow NULL for auto-increment column', () => { + const source = ` + Table users { + id int [pk, increment] + name varchar + } + records users(id, name) { + null, "Alice" + null, "Bob" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: null }); + expect(db.records[0].values[1][0]).toEqual({ type: 'integer', value: null }); + }); + + test('- should reject explicit null keyword in various casings (if invalid)', () => { + const source = ` + Table users { + id int + name varchar [not null] + } + records users(id, name) { + 1, NULL + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + // NULL should be valid syntax + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe("NULL not allowed for non-nullable column 'name' without default and increment"); + }); + }); + + describe('datetime type validation', () => { + test('- should accept string datetime values', () => { + const source = ` + Table events { + id int + created_at timestamp + event_date date + } + records events(id, created_at, event_date) { + 1, "2024-01-15 10:30:00", "2024-01-15" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1].type).toBe('datetime'); + expect(db.records[0].values[0][1].value).toBe('2024-01-15 10:30:00'); + expect(db.records[0].values[0][2].type).toBe('date'); + expect(db.records[0].values[0][2].value).toBe('2024-01-15'); + }); + }); + + describe('enum type validation', () => { + test('- should accept schema-qualified enum values', () => { + const source = ` + Enum auth.role { + admin + user + } + Table auth.users { + id int [pk] + role auth.role + } + records auth.users(id, role) { + 1, auth.role.admin + 2, auth.role.user + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + }); + + test('- should reject invalid enum field', () => { + const source = ` + Enum status { + active + inactive + } + Table users { + id int [pk] + status status + } + records users(id, status) { + 1, status.active + 2, status.invalid + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + // This is a BINDING_ERROR, not a validation error, so it stays as an error + expect(errors.length).toBe(1); + expect(errors[0].diagnostic).toBe("Enum field 'invalid' does not exist in Enum 'status'"); + }); + + test('- should reject numeric value for enum column', () => { + const source = ` + Enum status { + active + inactive + } + Table users { + id int [pk] + status status + } + records users(id, status) { + 1, 1 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(1); + expect(warnings[0].diagnostic).toBe("Invalid enum value for column 'status'"); + }); + }); + + describe('invalid type tests', () => { + test('- should reject invalid boolean values', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, "not_a_bool" + 2, 99 + 3, -1 + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(3); + expect(warnings[0].diagnostic).toBe("Invalid boolean value for column 'active'"); + expect(warnings[1].diagnostic).toBe("Invalid boolean value for column 'active'"); + expect(warnings[2].diagnostic).toBe("Invalid boolean value for column 'active'"); + }); + + test('- should reject invalid numeric values', () => { + const source = ` + Table data { + id int + price decimal + } + records data(id, price) { + "not_a_number", 100.00 + 2, "also_not_a_number" + 3, true + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(3); + expect(warnings[0].diagnostic).toBe("Invalid numeric value for column 'id'"); + expect(warnings[1].diagnostic).toBe("Invalid numeric value for column 'price'"); + expect(warnings[2].diagnostic).toBe("Invalid numeric value for column 'price'"); + }); + + test('- should reject invalid string values', () => { + const source = ` + Table data { + id int + name varchar + } + records data(id, name) { + 1, 123 + 2, true + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toBe("Invalid string value for column 'name'"); + expect(warnings[1].diagnostic).toBe("Invalid string value for column 'name'"); + }); + + test('- should reject invalid datetime values', () => { + const source = ` + Table events { + id int + created_at timestamp + } + records events(id, created_at) { + 1, 12345 + 2, true + } + `; + const result = interpret(source); + const errors = result.getErrors(); + const warnings = result.getWarnings(); + + expect(errors.length).toBe(0); + expect(warnings.length).toBe(2); + expect(warnings[0].diagnostic).toContain("Invalid datetime value for column 'created_at'"); + expect(warnings[1].diagnostic).toContain("Invalid datetime value for column 'created_at'"); + }); + }); + + describe('null and empty field handling', () => { + test('- should treat empty field as null for numeric types', () => { + const source = ` + Table data { + id int + count int + price decimal + } + records data(id, count, price) { + 1, , + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: 1 }); + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: null }); + expect(db.records[0].values[0][2]).toEqual({ type: 'real', value: null }); + }); + + test('- should treat empty field as null for boolean type', () => { + const source = ` + Table data { + id int + active boolean + } + records data(id, active) { + 1, + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'bool', value: null }); + }); + + test('- should treat empty field as null for datetime type', () => { + const source = ` + Table events { + id int + created_at timestamp + } + records events(id, created_at) { + 1, + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'datetime', value: null }); + }); + + test('- should treat empty field as null for enum type', () => { + const source = ` + Enum status { + active + inactive + } + Table users { + id int + status status + } + records users(id, status) { + 1, + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + // Empty field for enum is treated as string null + expect(db.records[0].values[0][1].type).toBe('string'); + expect(db.records[0].values[0][1].value).toBe(null); + }); + + test('- should treat empty string as null for non-string types', () => { + const source = ` + Table data { + id int + count int + active boolean + name varchar + } + records data(id, count, active, name) { + "", "", "", "" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + // Empty strings are treated as null for non-string types, empty string for string types + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][0]).toEqual({ type: 'integer', value: null }); + expect(db.records[0].values[0][1]).toEqual({ type: 'integer', value: null }); + expect(db.records[0].values[0][2]).toEqual({ type: 'bool', value: null }); + expect(db.records[0].values[0][3]).toEqual({ type: 'string', value: '' }); + }); + + test('- should accept empty string for string types', () => { + const source = ` + Table data { + id int + name varchar + description text + } + records data(id, name, description) { + 1, "", "" + } + `; + const result = interpret(source); + const errors = result.getErrors(); + + expect(errors.length).toBe(0); + + const db = result.getValue()!; + expect(db.records[0].values[0][1]).toEqual({ type: 'string', value: '' }); + expect(db.records[0].values[0][2]).toEqual({ type: 'string', value: '' }); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/lexer/scientific-notation.test.ts b/packages/dbml-parse/__tests__/examples/lexer/scientific-notation.test.ts new file mode 100644 index 000000000..680ba8f18 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/lexer/scientific-notation.test.ts @@ -0,0 +1,277 @@ +import { describe, expect, test } from 'vitest'; +import { SyntaxTokenKind, isTriviaToken } from '@/core/lexer/tokens'; +import { CompileErrorCode } from '@/core/errors'; +import { lex } from '@tests/utils'; + +// Helper to get non-trivia, non-EOF tokens +function getTokens (source: string) { + return lex(source).getValue().filter((t) => !isTriviaToken(t) && t.kind !== SyntaxTokenKind.EOF); +} + +describe('[example] lexer - scientific notation', () => { + describe('valid scientific notation', () => { + test('should tokenize integer with exponent', () => { + const source = '1e2 1E2 1e+2 1e-2'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(4); + + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e2' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1E2' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e+2' }); + expect(tokens[3]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e-2' }); + }); + + test('should tokenize decimal with exponent', () => { + const source = '3.14e10 2.5E-3 1.0e+5'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '3.14e10' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '2.5E-3' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1.0e+5' }); + }); + + test('should tokenize scientific notation at end of input', () => { + const source = '1e2'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(1); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e2' }); + }); + + test('should tokenize scientific notation followed by delimiter', () => { + const source = '1e2,3e4'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e2' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.COMMA, value: ',' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '3e4' }); + }); + + test('should tokenize large exponents', () => { + const source = '1e100 2.5e-50'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(2); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e100' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '2.5e-50' }); + }); + + test('should tokenize scientific notation in DBML context', () => { + const source = 'default: 1e-5'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: 'default' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.COLON, value: ':' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e-5' }); + }); + + test('should tokenize zero exponent', () => { + const source = '1e0 5.5e0'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(2); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e0' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '5.5e0' }); + }); + }); + + describe('floating point numbers', () => { + test('should tokenize simple floating points', () => { + const source = '3.14 0.5 123.456'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '3.14' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '0.5' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '123.456' }); + }); + + test('should tokenize floating point at end of input', () => { + const source = '3.14'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(1); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '3.14' }); + }); + + test('should tokenize floating point followed by delimiter', () => { + const source = '3.14,2.71'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '3.14' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.COMMA, value: ',' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '2.71' }); + }); + }); + + describe('identifiers starting with digits', () => { + test('should tokenize digit followed by letters as identifier', () => { + const source = '1abc 2test 3rd'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1abc' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '2test' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '3rd' }); + }); + + test('should tokenize digit-letter-digit as identifier', () => { + const source = '1a2b3c'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(1); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1a2b3c' }); + }); + + test('should tokenize 1e as identifier (incomplete exponent)', () => { + const source = '1e'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(1); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1e' }); + }); + + test('should tokenize 1ea as identifier', () => { + const source = '1ea'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(1); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1ea' }); + }); + + test('should tokenize 1e2abc as identifier (valid exponent followed by letters)', () => { + const source = '1e2abc'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(1); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1e2abc' }); + }); + + test('should tokenize 5e10abcbd as identifier', () => { + const source = '5e10abcbd'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(1); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '5e10abcbd' }); + }); + }); + + describe('incomplete exponent with sign - sign not consumed', () => { + test('should tokenize 1e+ as identifier and operator', () => { + // Sign is NOT consumed when no digit follows + const source = '1e+'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(2); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1e' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.OP, value: '+' }); + }); + + test('should tokenize 1e- as identifier and operator', () => { + const source = '1e-'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(2); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1e' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.OP, value: '-' }); + }); + + test('should tokenize 1e+a as identifier, operator, identifier', () => { + const source = '1e+a'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1e' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.OP, value: '+' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: 'a' }); + }); + + test('should tokenize 1e-b as identifier, operator, identifier', () => { + const source = '1e-b'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: '1e' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.OP, value: '-' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: 'b' }); + }); + }); + + describe('invalid numbers - multiple dots', () => { + test('should report error for number with two dots', () => { + const source = '1.2.3'; + const result = lex(source); + const errors = result.getErrors(); + + expect(errors).toHaveLength(1); + expect(errors[0].code).toBe(CompileErrorCode.UNKNOWN_TOKEN); + }); + + test('should report error for two dots before exponent', () => { + const source = '1.2.3e4'; + const result = lex(source); + const errors = result.getErrors(); + + expect(errors).toHaveLength(1); + expect(errors[0].code).toBe(CompileErrorCode.UNKNOWN_TOKEN); + }); + + test('should tokenize 1.5e2.5 as number, dot, number (second dot after exponent)', () => { + // 1.5e2 is valid, then . and 5 are separate tokens + const source = '1.5e2.5'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1.5e2' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.OP, value: '.' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '5' }); + }); + + test('should report error for decimal with letters', () => { + const source = '3.14abc'; + const result = lex(source); + const errors = result.getErrors(); + + expect(errors).toHaveLength(1); + expect(errors[0].code).toBe(CompileErrorCode.UNKNOWN_TOKEN); + }); + + test('should report error for decimal scientific with letters', () => { + const source = '3.14e2xyz'; + const result = lex(source); + const errors = result.getErrors(); + + expect(errors).toHaveLength(1); + expect(errors[0].code).toBe(CompileErrorCode.UNKNOWN_TOKEN); + }); + }); + + describe('edge cases with dot after exponent', () => { + test('should tokenize 1e2.5 as number, dot, number', () => { + // No dot before 'e', so 1e2 is valid, then . and 5 are separate + const source = '1e2.5'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '1e2' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.OP, value: '.' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '5' }); + }); + + test('should tokenize 5e10.method as number, dot, identifier', () => { + const source = '5e10.method'; + const tokens = getTokens(source); + + expect(tokens).toHaveLength(3); + expect(tokens[0]).toMatchObject({ kind: SyntaxTokenKind.NUMERIC_LITERAL, value: '5e10' }); + expect(tokens[1]).toMatchObject({ kind: SyntaxTokenKind.OP, value: '.' }); + expect(tokens[2]).toMatchObject({ kind: SyntaxTokenKind.IDENTIFIER, value: 'method' }); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/parser/parser.test.ts b/packages/dbml-parse/__tests__/examples/parser/parser.test.ts index c4323ddaf..557ab5e0e 100644 --- a/packages/dbml-parse/__tests__/examples/parser/parser.test.ts +++ b/packages/dbml-parse/__tests__/examples/parser/parser.test.ts @@ -12,6 +12,8 @@ import { AttributeNode, PrimaryExpressionNode, VariableNode, + CommaExpressionNode, + LiteralNode, } from '@/core/parser/nodes'; import { SyntaxTokenKind } from '@/core/lexer/tokens'; import { parse } from '@tests/utils'; @@ -22,6 +24,9 @@ function getPrimaryValue (node: PrimaryExpressionNode | undefined): string | und if (node.expression instanceof VariableNode) { return node.expression.variable?.value; } + if (node.expression instanceof LiteralNode) { + return node.expression.literal?.value; + } return undefined; } @@ -841,6 +846,350 @@ Table posts { }); }); + describe('comma expression parsing', () => { + test('should parse comma expression in function application args', () => { + const source = ` + Table users { + sample_data 1, 2, 3 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + + expect(body.body).toHaveLength(1); + const funcApp = body.body[0] as FunctionApplicationNode; + expect(funcApp.kind).toBe(SyntaxNodeKind.FUNCTION_APPLICATION); + + // The args should contain a CommaExpressionNode + expect(funcApp.args).toHaveLength(1); + expect(funcApp.args[0].kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + + const commaExpr = funcApp.args[0] as CommaExpressionNode; + expect(commaExpr.elementList).toHaveLength(3); + expect(commaExpr.commaList).toHaveLength(2); + + // Verify each element is a primary expression with a literal + commaExpr.elementList.forEach((elem) => { + expect(elem.kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + const primary = elem as PrimaryExpressionNode; + expect(primary.expression?.kind).toBe(SyntaxNodeKind.LITERAL); + }); + }); + + test('should parse comma expression with string values', () => { + const source = ` + Table users { + sample_data 'a', 'b', 'c' + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + + expect(funcApp.args).toHaveLength(1); + expect(funcApp.args[0].kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + + const commaExpr = funcApp.args[0] as CommaExpressionNode; + expect(commaExpr.elementList).toHaveLength(3); + }); + + test('should parse comma expression as callee', () => { + const source = ` + Table users { + 1, 2, 3 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + + expect(body.body).toHaveLength(1); + const funcApp = body.body[0] as FunctionApplicationNode; + + // The callee should be a CommaExpressionNode + expect(funcApp.callee?.kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + + const commaExpr = funcApp.callee as CommaExpressionNode; + expect(commaExpr.elementList).toHaveLength(3); + expect(commaExpr.commaList).toHaveLength(2); + }); + + test('should parse single expression without comma as normal expression', () => { + const source = ` + Table users { + sample_data 1 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + + // Single value should be a PrimaryExpression, not CommaExpression + expect(funcApp.args).toHaveLength(1); + expect(funcApp.args[0].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + }); + + test('should parse multiple comma expressions in function application', () => { + const source = ` + Table users { + sample_data 1, 2 'x', 'y' + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + + // Should have two args: "1, 2" and "'x', 'y'" + expect(funcApp.args).toHaveLength(2); + expect(funcApp.args[0].kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + expect(funcApp.args[1].kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + + const first = funcApp.args[0] as CommaExpressionNode; + expect(first.elementList).toHaveLength(2); + + const second = funcApp.args[1] as CommaExpressionNode; + expect(second.elementList).toHaveLength(2); + }); + + test('should preserve comma tokens in comma expression', () => { + const source = ` + Table users { + sample_data 1, 2, 3, 4 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + const commaExpr = funcApp.args[0] as CommaExpressionNode; + + expect(commaExpr.commaList).toHaveLength(3); + commaExpr.commaList.forEach((comma) => { + expect(comma.value).toBe(','); + expect(comma.kind).toBe(SyntaxTokenKind.COMMA); + }); + }); + + test('should parse empty field in comma expression (consecutive commas)', () => { + const source = ` + Table users { + sample_data 1, , 3 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + + expect(funcApp.args).toHaveLength(1); + expect(funcApp.args[0].kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + + const commaExpr = funcApp.args[0] as CommaExpressionNode; + expect(commaExpr.elementList).toHaveLength(3); + expect(commaExpr.commaList).toHaveLength(2); + + // First element: 1 + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + // Second element: empty (EmptyNode) + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.EMPTY); + // Third element: 3 + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + }); + + test('should parse multiple empty fields in comma expression', () => { + const source = ` + Table users { + sample_data 1, , , 4 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + const commaExpr = funcApp.args[0] as CommaExpressionNode; + + expect(commaExpr.elementList).toHaveLength(4); + expect(commaExpr.commaList).toHaveLength(3); + + // First element: 1 + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + // Second element: empty (EmptyNode) + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.EMPTY); + // Third element: empty (EmptyNode) + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.EMPTY); + // Fourth element: 4 + expect(commaExpr.elementList[3].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + }); + + test('should parse trailing comma in comma expression', () => { + const source = ` + Table users { + sample_data 1, 2, + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + const commaExpr = funcApp.args[0] as CommaExpressionNode; + + expect(commaExpr.elementList).toHaveLength(3); + expect(commaExpr.commaList).toHaveLength(2); + + // First element: 1 + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + // Second element: 2 + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + // Third element: empty (EmptyNode for trailing comma) + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.EMPTY); + }); + + test('should parse leading comma in comma expression (as callee)', () => { + const source = ` + Table users { + ,1, 2 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + + // The callee should be a CommaExpressionNode starting with empty + expect(funcApp.callee?.kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + + const commaExpr = funcApp.callee as CommaExpressionNode; + expect(commaExpr.elementList).toHaveLength(3); + expect(commaExpr.commaList).toHaveLength(2); + + // First element: empty (EmptyNode for leading comma) + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.EMPTY); + // Second element: 1 + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[1] as PrimaryExpressionNode)).toBe('1'); + // Third element: 2 + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[2] as PrimaryExpressionNode)).toBe('2'); + }); + + test('should parse leading and trailing comma in comma expression', () => { + const source = ` + Table users { + ,1, 2, + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + const commaExpr = funcApp.callee as CommaExpressionNode; + + expect(commaExpr.elementList).toHaveLength(4); + expect(commaExpr.commaList).toHaveLength(3); + + // First element: empty (EmptyNode for leading comma) + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.EMPTY); + // Second element: 1 + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[1] as PrimaryExpressionNode)).toBe('1'); + // Third element: 2 + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[2] as PrimaryExpressionNode)).toBe('2'); + // Fourth element: empty (EmptyNode for trailing comma) + expect(commaExpr.elementList[3].kind).toBe(SyntaxNodeKind.EMPTY); + }); + + test('should parse comma expression with only commas (all empty fields)', () => { + const source = ` + Table users { + ,, + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + const commaExpr = funcApp.callee as CommaExpressionNode; + + expect(commaExpr.elementList).toHaveLength(3); + expect(commaExpr.commaList).toHaveLength(2); + + // All elements should be EmptyNodes + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.EMPTY); + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.EMPTY); + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.EMPTY); + }); + + test('should parse leading comma as callee in function application with spaces', () => { + const source = ` + Table users { + , 1, 2 + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + + // The callee should be a CommaExpressionNode starting with empty + expect(funcApp.callee?.kind).toBe(SyntaxNodeKind.COMMA_EXPRESSION); + + const commaExpr = funcApp.callee as CommaExpressionNode; + expect(commaExpr.elementList).toHaveLength(3); + + // First element: empty (EmptyNode for leading comma) + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.EMPTY); + // Second element: 1 + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[1] as PrimaryExpressionNode)).toBe('1'); + // Third element: 2 + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[2] as PrimaryExpressionNode)).toBe('2'); + }); + + test('should parse leading comma with string values', () => { + const source = ` + Table users { + ,'hello', 'world' + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + const commaExpr = funcApp.callee as CommaExpressionNode; + + expect(commaExpr.elementList).toHaveLength(3); + expect(commaExpr.commaList).toHaveLength(2); + + // First element: empty (EmptyNode for leading comma) + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.EMPTY); + // Second element: 'hello' (string literal values don't include quotes) + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[1] as PrimaryExpressionNode)).toBe('hello'); + // Third element: 'world' + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[2] as PrimaryExpressionNode)).toBe('world'); + }); + + test('should parse leading comma with identifier values', () => { + const source = ` + Table users { + ,foo, bar, baz + } + `; + const elements = getElements(source); + const body = elements[0].body as BlockExpressionNode; + const funcApp = body.body[0] as FunctionApplicationNode; + const commaExpr = funcApp.callee as CommaExpressionNode; + + expect(commaExpr.elementList).toHaveLength(4); + expect(commaExpr.commaList).toHaveLength(3); + + // First element: empty (EmptyNode for leading comma) + expect(commaExpr.elementList[0].kind).toBe(SyntaxNodeKind.EMPTY); + // Second element: foo + expect(commaExpr.elementList[1].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[1] as PrimaryExpressionNode)).toBe('foo'); + // Third element: bar + expect(commaExpr.elementList[2].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[2] as PrimaryExpressionNode)).toBe('bar'); + // Fourth element: baz + expect(commaExpr.elementList[3].kind).toBe(SyntaxNodeKind.PRIMARY_EXPRESSION); + expect(getPrimaryValue(commaExpr.elementList[3] as PrimaryExpressionNode)).toBe('baz'); + }); + }); + describe('edge cases', () => { test('should handle empty source with empty body', () => { const result = parse(''); diff --git a/packages/dbml-parse/__tests__/examples/services/definition.test.ts b/packages/dbml-parse/__tests__/examples/services/definition/general.test.ts similarity index 91% rename from packages/dbml-parse/__tests__/examples/services/definition.test.ts rename to packages/dbml-parse/__tests__/examples/services/definition/general.test.ts index 2266d1ae6..901e0f34c 100644 --- a/packages/dbml-parse/__tests__/examples/services/definition.test.ts +++ b/packages/dbml-parse/__tests__/examples/services/definition/general.test.ts @@ -1,9 +1,9 @@ import { describe, expect, it } from 'vitest'; import Compiler from '@/compiler'; import DBMLDefinitionProvider from '@/services/definition/provider'; -import { createMockTextModel, createPosition, extractTextFromRange } from '../../utils'; +import { createMockTextModel, createPosition, extractTextFromRange } from '../../../utils'; -describe('[snapshot] DefinitionProvider', () => { +describe('[example] DefinitionProvider', () => { describe('should find definition for tables', () => { it('- should find table definition in Ref block', () => { const program = `Table users { @@ -252,10 +252,22 @@ Ref: posts.user_id > users.id`; const model = createMockTextModel(program); // Position on "user_id" in "posts.user_id" - const position = createPosition(9, 12); + const position = createPosition(9, 13); const definitions = definitionProvider.provideDefinition(model, position); - expect(definitions).toMatchInlineSnapshot('[]'); + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 14, + "endLineNumber": 6, + "startColumn": 3, + "startLineNumber": 6, + }, + "uri": "", + }, + ] + `); }); it('- should find column definition in inline ref', () => { @@ -803,7 +815,7 @@ TableGroup group1 { const model = createMockTextModel(program); // Position on "status" in composite index - const position = createPosition(7, 20); + const position = createPosition(7, 21); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -943,7 +955,7 @@ Table users { const model = createMockTextModel(program); // Position on "timestamps" in qualified partial injection - const position = createPosition(7, 14); + const position = createPosition(7, 15); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -962,7 +974,7 @@ Table users { const model = createMockTextModel(program); // Position on keyword "Table" - const position = createPosition(1, 1); + const position = createPosition(1, 2); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -996,7 +1008,7 @@ Ref: posts.user_id > users.id`; const model = createMockTextModel(program); // Position on number literal - const position = createPosition(2, 20); + const position = createPosition(2, 21); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1013,7 +1025,7 @@ Ref: posts.user_id > users.id`; const model = createMockTextModel(program); // Position inside string literal - const position = createPosition(2, 27); + const position = createPosition(2, 28); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1030,7 +1042,7 @@ Ref: posts.user_id > users.id`; const model = createMockTextModel(program); // Position on "pk" attribute - const position = createPosition(2, 11); + const position = createPosition(2, 12); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1048,7 +1060,7 @@ Table posts { const model = createMockTextModel(program); // Position inside comment - const position = createPosition(1, 10); + const position = createPosition(1, 11); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1230,10 +1242,22 @@ Ref: schema1.orders.id > schema2.orders.id`; `); // Position on schema2.orders - const position2 = createPosition(9, 34); + const position2 = createPosition(9, 35); const definitions2 = definitionProvider.provideDefinition(model, position2); - expect(definitions2).toMatchInlineSnapshot('[]'); + expect(definitions2).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 7, + "startColumn": 1, + "startLineNumber": 5, + }, + "uri": "", + }, + ] + `); }); it('- should handle mixed direct and injected columns', () => { @@ -1643,7 +1667,7 @@ Ref: posts.(author_first, author_last) > users.(first_name, last_name)`; const model = createMockTextModel(program); // Position on "users" - const position = createPosition(1, 9); + const position = createPosition(1, 10); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1708,7 +1732,7 @@ Table posts { const model = createMockTextModel(program); // Position on "user_id" - const position = createPosition(6, 5); + const position = createPosition(6, 6); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1727,7 +1751,7 @@ Ref: posts.user_id > users.id`; const model = createMockTextModel(program); // Position on "posts" (non-existent table) - const position = createPosition(5, 8); + const position = createPosition(5, 9); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1743,7 +1767,7 @@ Ref: posts.user_id > users.id`; const model = createMockTextModel(program); // Position on "status" - const position = createPosition(1, 8); + const position = createPosition(1, 9); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1760,7 +1784,7 @@ Ref: posts.user_id > users.id`; const model = createMockTextModel(program); // Position on "users" - const position = createPosition(1, 9); + const position = createPosition(1, 10); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -1820,7 +1844,7 @@ Ref: posts.(author_first, author_last) > users.(first_name, last_name)`; const model = createMockTextModel(program); // Position on "author_last" (doesn't exist in posts) - const position = createPosition(10, 29); + const position = createPosition(10, 30); const definitions = definitionProvider.provideDefinition(model, position); expect(definitions).toMatchInlineSnapshot('[]'); @@ -2688,7 +2712,7 @@ Ref: orders.(merchant_id, country) > merchants.(id, country_code)`; const model = createMockTextModel(program); // Position inside empty block - const position = createPosition(2, 1); + const position = createPosition(2, 2); const definitions = definitionProvider.provideDefinition(model, position); expect(Array.isArray(definitions)).toBe(true); @@ -2731,4 +2755,237 @@ Ref: users.id > posts.user_id`; expect(Array.isArray(definitions)).toBe(true); }); }); + + describe('should find definition for Records elements', () => { + it('- should find table definition from top-level Records', () => { + const program = `Table users { + id int pk + name varchar + email varchar +} + +Records users(id, name, email) { + 1, "John", "john@example.com" + 2, "Jane", "jane@example.com" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "users" in Records declaration + const position = createPosition(7, 10); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 5, + "startColumn": 1, + "startLineNumber": 1, + }, + "uri": "", + }, + ] + `); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toMatchInlineSnapshot(` + "Table users { + id int pk + name varchar + email varchar + }" + `); + }); + + it('- should find column definition from Records column list', () => { + const program = `Table users { + id int pk + name varchar + email varchar +} + +Records users(id, name, email) { + 1, "John", "john@example.com" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "name" in Records column list + const position = createPosition(7, 19); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + expect(definitions).toMatchInlineSnapshot('[]'); + }); + + it('- should find schema-qualified table from Records', () => { + const program = `Table public.orders { + id int pk + customer_name varchar + total decimal +} + +Records public.orders(id, customer_name) { + 1, "John Doe" + 2, "Jane Smith" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "orders" in schema-qualified Records + const position = createPosition(7, 17); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 5, + "startColumn": 1, + "startLineNumber": 1, + }, + "uri": "", + }, + ] + `); + }); + + it('- should find enum definition from Records data', () => { + const program = `Enum order_status { + pending + processing + completed +} + +Table orders { + id int pk + status order_status +} + +Records orders(id, status) { + 1, order_status.pending + 2, order_status.completed +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "order_status" enum in Records data + const position = createPosition(13, 9); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 5, + "startColumn": 1, + "startLineNumber": 1, + }, + "uri": "", + }, + ] + `); + }); + + it('- should find enum field definition from Records data', () => { + const program = `Enum order_status { + pending + processing + completed +} + +Table orders { + id int pk + status order_status +} + +Records orders(id, status) { + 1, order_status.pending + 2, order_status.completed +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "pending" enum field in Records data + const position = createPosition(13, 20); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 10, + "endLineNumber": 2, + "startColumn": 3, + "startLineNumber": 2, + }, + "uri": "", + }, + ] + `); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toMatchInlineSnapshot('"pending"'); + }); + + it('- should find column definition from Records inside table', () => { + const program = `Table products { + id integer [pk] + name varchar + price decimal + + Records (id, name, price) { + 1, "Laptop", 999.99 + 2, "Mouse", 29.99 + } +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "name" in Records column list inside table + const position = createPosition(6, 17); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 15, + "endLineNumber": 3, + "startColumn": 3, + "startLineNumber": 3, + }, + "uri": "", + }, + ] + `); + }); + }); }); diff --git a/packages/dbml-parse/__tests__/examples/services/definition/records.test.ts b/packages/dbml-parse/__tests__/examples/services/definition/records.test.ts new file mode 100644 index 000000000..4163d53aa --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/services/definition/records.test.ts @@ -0,0 +1,380 @@ +import { describe, expect, it } from 'vitest'; +import Compiler from '@/compiler'; +import DBMLDefinitionProvider from '@/services/definition/provider'; +import { createMockTextModel, createPosition, extractTextFromRange } from '../../../utils'; + +describe('[example - records] DefinitionProvider - Records', () => { + describe('should find table definition from records', () => { + it('- should find table definition from records table name', () => { + const program = `Table users { + id int + name varchar +} + +records users(id, name) { + 1, "Alice" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "users" in "records users(id, name)" + const position = createPosition(6, 10); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 4, + "startColumn": 1, + "startLineNumber": 1, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toMatchInlineSnapshot(` + "Table users { + id int + name varchar + }" + `); + }); + + it('- should find table definition from schema-qualified records', () => { + const program = `Table auth.users { + id int + email varchar +} + +records auth.users(id, email) { + 1, "alice@example.com" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "users" in "records auth.users" + const position = createPosition(6, 15); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 4, + "startColumn": 1, + "startLineNumber": 1, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toMatchInlineSnapshot(` + "Table auth.users { + id int + email varchar + }" + `); + }); + + it('- should find table definition from schema-qualified table name in records call expression', () => { + const program = `Table auth.users { + id int + email varchar +} + +Table users { + id int + name varchar +} + +records auth.users(id, email) { + 1, "alice@example.com" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "users" in the call expression "auth.users(id, email)" + const position = createPosition(11, 15); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 4, + "startColumn": 1, + "startLineNumber": 1, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toMatchInlineSnapshot(` + "Table auth.users { + id int + email varchar + }" + `); + }); + }); + + describe('should find column definition from records', () => { + it('- should find column definition from records column list', () => { + const program = `Table users { + id int + name varchar +} + +records users(id, name) { + 1, "Alice" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "id" in "records users(id, name)" + const position = createPosition(6, 16); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 9, + "endLineNumber": 2, + "startColumn": 3, + "startLineNumber": 2, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toBe('id int'); + }); + + it('- should find column definition from second column in list', () => { + const program = `Table users { + id int + name varchar +} + +records users(id, name) { + 1, "Alice" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "name" in "records users(id, name)" + const position = createPosition(6, 20); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 15, + "endLineNumber": 3, + "startColumn": 3, + "startLineNumber": 3, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toBe('name varchar'); + }); + }); + + describe('should find enum definition from records data', () => { + it('- should find enum definition from records enum reference', () => { + const program = `Enum status { + active + inactive +} + +Table users { + id int + status status +} + +records users(id, status) { + 1, status.active +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "status" in "status.active" + const position = createPosition(12, 7); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 2, + "endLineNumber": 4, + "startColumn": 1, + "startLineNumber": 1, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toMatchInlineSnapshot(` + "Enum status { + active + inactive + }" + `); + }); + + it('- should find enum field definition from records data', () => { + const program = `Enum status { + active + inactive +} + +Table users { + id int + status status +} + +records users(id, status) { + 1, status.active +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "active" in "status.active" + const position = createPosition(12, 14); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 9, + "endLineNumber": 2, + "startColumn": 3, + "startLineNumber": 2, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toBe('active'); + }); + + it('- should find schema-qualified enum field definition', () => { + const program = `Enum auth.role { + admin + user +} + +Table auth.users { + id int + role auth.role +} + +records auth.users(id, role) { + 1, auth.role.admin +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const definitionProvider = new DBMLDefinitionProvider(compiler); + const model = createMockTextModel(program); + + // Position on "admin" in "auth.role.admin" + const position = createPosition(12, 20); + const definitions = definitionProvider.provideDefinition(model, position); + + expect(Array.isArray(definitions)).toBeTruthy(); + if (!Array.isArray(definitions)) return; + expect(definitions.length).toBe(1); + + expect(definitions).toMatchInlineSnapshot(` + [ + { + "range": { + "endColumn": 8, + "endLineNumber": 2, + "startColumn": 3, + "startLineNumber": 2, + }, + "uri": "", + }, + ] + `); + + const sourceText = extractTextFromRange(program, definitions[0].range); + expect(sourceText).toBe('admin'); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/services/inlineCompletions_records.test.ts b/packages/dbml-parse/__tests__/examples/services/inlineCompletions_records.test.ts new file mode 100644 index 000000000..29f1da005 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/services/inlineCompletions_records.test.ts @@ -0,0 +1,369 @@ +import { describe, expect, it } from 'vitest'; +import Compiler from '@/compiler'; +import DBMLCompletionItemProvider from '@/services/suggestions/provider'; +import { createMockTextModel, createPosition } from '../../utils'; + +describe('[snapshot] CompletionItemProvider - Records Row Snippets', () => { + describe('should suggest record row snippets with types on empty line in Records body', () => { + it('- should suggest completion with types after opening brace', () => { + const program = ` + Table users { + id int [pk] + name varchar + email varchar + } + + Records users(id, name, email) { + + + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + // Position right after opening brace on new line + const position = createPosition(9, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + expect(result?.suggestions).toBeDefined(); + expect(result?.suggestions.length).toBeGreaterThan(0); + expect(result?.suggestions[0].label).toEqual('Record row snippet'); + expect(result?.suggestions[0].insertText).toEqual('${1:id (int)}, ${2:name (varchar)}, ${3:email (varchar)}'); + }); + + it('- should suggest completion with correct column order and types', () => { + const program = ` + Table products { + product_id int [pk] + product_name varchar + price decimal + in_stock boolean + } + + Records products(product_id, product_name, price, in_stock) { + + + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(10, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + expect(result?.suggestions[0].insertText).toEqual('${1:product_id (int)}, ${2:product_name (varchar)}, ${3:price (decimal)}, ${4:in_stock (boolean)}'); + }); + + it('- should work with schema-qualified tables', () => { + const program = ` + Table auth.users { + id int [pk] + username varchar + password_hash varchar + } + + Records auth.users(id, username, password_hash) { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(9, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + expect(result?.suggestions[0].insertText).toEqual('${1:id (int)}, ${2:username (varchar)}, ${3:password_hash (varchar)}'); + }); + + it('- should work with Records inside Table', () => { + const program = ` + Table orders { + order_id int [pk] + customer_name varchar + total decimal + + Records { + + } + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(8, 11); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + expect(result?.suggestions[0].insertText).toEqual('${1:order_id (int)}, ${2:customer_name (varchar)}, ${3:total (decimal)}'); + }); + + it('- should suggest after existing records', () => { + const program = ` + Table users { + id int + name varchar + email varchar + } + + Records users { + 1, "Alice", "alice@example.com" + 2, "Bob", "bob@example.com" + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + // Position at the end of line 10 (after the last record) + const position = createPosition(10, 44); + const result = provider.provideCompletionItems(model, position); + + // Should suggest record row snippet if positioned on a new empty line + // This test position is at the end of the line, not on an empty line + // So it should not suggest the record row snippet + const recordSnippet = result?.suggestions?.find((s) => s.label === 'Record row snippet'); + // Note: This may not trigger since position is at end of line, not on empty line + if (recordSnippet) { + expect(recordSnippet.insertText).toEqual('${1:id (int)}, ${2:name (varchar)}, ${3:email (varchar)}'); + } + }); + + it('- should work with single column table', () => { + const program = ` + Table counter { + count int + } + + Records counter(count) { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(7, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + expect(result?.suggestions[0].insertText).toEqual('${1:count (int)}'); + }); + + it('- should preserve column names with special characters and show types', () => { + const program = ` + Table "special-table" { + "column-1" int + "column 2" varchar + "column.3" boolean + } + + Records "special-table"("column-1", "column 2", "column.3") { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(9, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + const insertText = result?.suggestions[0].insertText as string; + expect(insertText).toContain('column-1 (int)'); + expect(insertText).toContain('column 2 (varchar)'); + expect(insertText).toContain('column.3 (boolean)'); + }); + + it('- should not suggest inside existing record entry', () => { + const program = ` + Table users { + id int + name varchar + } + + Records users { + 1, "Alice" + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + // Position inside the record entry (after the comma) + const position = createPosition(8, 14); + const result = provider.provideCompletionItems(model, position); + + // Should not suggest record row snippet when inside a function application + // (may return other suggestions or empty array) + const recordSnippet = result?.suggestions?.find((s) => s.label === 'Record row snippet'); + expect(recordSnippet).toBeUndefined(); + }); + + it('- should not suggest in Records header', () => { + const program = ` + Table users { + id int + name varchar + } + + Records users { + 1, "Alice" + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + // Position in the header (after "Records ") + const position = createPosition(7, 17); + const result = provider.provideCompletionItems(model, position); + + // Should not suggest record row snippet in header + // (may return other suggestions like schema.table names) + const recordSnippet = result?.suggestions?.find((s) => s.label === 'Record row snippet'); + expect(recordSnippet).toBeUndefined(); + }); + + it('- should not suggest in non-Records scope', () => { + const program = ` + Table users { + id int + name varchar + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + // Position inside Table body + const position = createPosition(3, 15); + const result = provider.provideCompletionItems(model, position); + + // Should not suggest record row snippet when not in RECORDS scope + const recordSnippet = result?.suggestions?.find((s) => s.label === 'Record row snippet'); + expect(recordSnippet).toBeUndefined(); + }); + + it('- should handle table with many columns', () => { + const program = ` + Table employee { + emp_id int [pk] + first_name varchar + last_name varchar + email varchar + phone varchar + hire_date date + salary decimal + department varchar + manager_id int + is_active boolean + } + + Records employee(emp_id, first_name, last_name, email, phone, hire_date, salary, department, manager_id, is_active) { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(16, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + const insertText = result?.suggestions[0].insertText as string; + expect(insertText).toBeDefined(); + // Should have all 10 columns separated by commas + const columnCount = insertText.split(',').length; + expect(columnCount).toBe(10); + // Should have ${1:col (type)} format + expect(insertText).toContain('${1:emp_id (int)}'); + expect(insertText).toContain('${10:is_active (boolean)}'); + }); + }); + + describe('should handle edge cases', () => { + it('- should not crash with empty table', () => { + const program = ` + Table empty_table { + } + + Records empty_table { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(6, 9); + const result = provider.provideCompletionItems(model, position); + + // Should not return record row snippet when no columns + const recordSnippet = result?.suggestions?.find((s) => s.label === 'Record row snippet'); + expect(recordSnippet).toBeUndefined(); + }); + + it('- should work with Records using call expression', () => { + const program = ` + Table products { + id int + name varchar + price decimal + } + + Records products(id, name, price) { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(9, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + expect(result?.suggestions[0].insertText).toEqual('${1:id (int)}, ${2:name (varchar)}, ${3:price (decimal)}'); + }); + + it('- should handle Records with subset of columns specified', () => { + const program = ` + Table users { + id int + name varchar + email varchar + created_at timestamp + } + + Records users(id, name) { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(10, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result).toBeDefined(); + // Should suggest only the columns specified in Records header + const insertText = result?.suggestions[0].insertText as string; + expect(insertText).toContain('id (int)'); + expect(insertText).toContain('name (varchar)'); + expect(insertText).not.toContain('email (varchar)'); + expect(insertText).not.toContain('created_at (timestamp)'); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/services/references.test.ts b/packages/dbml-parse/__tests__/examples/services/references/general.test.ts similarity index 87% rename from packages/dbml-parse/__tests__/examples/services/references.test.ts rename to packages/dbml-parse/__tests__/examples/services/references/general.test.ts index b0fa6db63..0390c2967 100644 --- a/packages/dbml-parse/__tests__/examples/services/references.test.ts +++ b/packages/dbml-parse/__tests__/examples/services/references/general.test.ts @@ -1,9 +1,9 @@ import { describe, expect, it } from 'vitest'; import Compiler from '@/compiler'; import DBMLReferencesProvider from '@/services/references/provider'; -import { createPosition, createMockTextModel, extractTextFromRange } from '../../utils'; +import { createPosition, createMockTextModel, extractTextFromRange } from '../../../utils'; -describe('[snapshot] ReferencesProvider', () => { +describe('[example] ReferencesProvider', () => { it('should return empty array when no references found', () => { const program = 'Table test { id int }'; const compiler = new Compiler(); @@ -941,4 +941,144 @@ Ref: posts.user_id > "user-data".id`; expect(Array.isArray(references)).toBe(true); }); }); + + describe('should find references for Records elements', () => { + it('- should find all Records references to a table', () => { + const program = `Table users { + id int pk + name varchar +} + +Records users(id, name) { + 1, "John" + 2, "Jane" +} + +Records users(id) { + 3 +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "users" table declaration + const position = createPosition(1, 8); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBeGreaterThan(0); + expect(Array.isArray(references)).toBe(true); + }); + + it('- should find all references to a column from Records', () => { + const program = `Table users { + id int pk + name varchar + email varchar +} + +Records users(id, name, email) { + 1, "John", "john@example.com" +} + +Records users(email, name) { + "jane@example.com", "Jane" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "name" column declaration + const position = createPosition(3, 4); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBeGreaterThan(0); + expect(Array.isArray(references)).toBe(true); + }); + + it('- should find enum field references from Records data', () => { + const program = `Enum status { + active + inactive +} + +Table users { + id int pk + user_status status +} + +Records users(id, user_status) { + 1, status.active + 2, status.inactive + 3, status.active +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "active" enum field declaration + const position = createPosition(2, 4); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBeGreaterThan(0); + expect(Array.isArray(references)).toBe(true); + }); + + it('- should find schema-qualified table references from Records', () => { + const program = `Table public.orders { + id int pk + customer varchar +} + +Records public.orders(id, customer) { + 1, "John" +} + +Records public.orders(id) { + 2 +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "orders" table declaration + const position = createPosition(1, 16); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBeGreaterThan(0); + expect(Array.isArray(references)).toBe(true); + }); + + it('- should find column references from Records inside table', () => { + const program = `Table products { + id integer [pk] + name varchar + price decimal + + Records (id, name) { + 1, "Laptop" + 2, "Mouse" + } +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "id" column declaration + const position = createPosition(2, 4); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBeGreaterThan(0); + expect(Array.isArray(references)).toBe(true); + }); + }); }); diff --git a/packages/dbml-parse/__tests__/examples/services/references/records.test.ts b/packages/dbml-parse/__tests__/examples/services/references/records.test.ts new file mode 100644 index 000000000..2013a7c2e --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/services/references/records.test.ts @@ -0,0 +1,299 @@ +import { describe, expect, it } from 'vitest'; +import Compiler from '@/compiler'; +import DBMLReferencesProvider from '@/services/references/provider'; +import { createPosition, createMockTextModel, extractTextFromRange } from '../../../utils'; + +describe('[example] ReferencesProvider - Records', () => { + describe('should find all table references from records', () => { + it('- should find table references in records declarations', () => { + const program = `Table users { + id int + name varchar +} + +records users(id, name) { + 1, "Alice" +} + +records users(id, name) { + 2, "Bob" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "users" table declaration + const position = createPosition(1, 7); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBe(2); + references.forEach((ref) => { + const sourceText = extractTextFromRange(program, ref.range); + expect(sourceText).toBe('users'); + }); + }); + + it('- should find schema-qualified table references', () => { + const program = `Table auth.users { + id int + email varchar +} + +records auth.users(id, email) { + 1, "alice@example.com" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "users" table declaration + const position = createPosition(1, 12); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBe(1); + const sourceText = extractTextFromRange(program, references[0].range); + expect(sourceText).toBe('users'); + }); + + it('- should find schema-qualified table references in records call expression', () => { + const program = `Table public.orders { + id int + total decimal +} + +records public.orders(id, total) { + 1, 99.99 +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "orders" in "Table public.orders" declaration + const position = createPosition(1, 18); + const references = referencesProvider.provideReferences(model, position); + + // Should find the reference in "records public.orders(...)" + expect(references.length).toBe(1); + const sourceText = extractTextFromRange(program, references[0].range); + expect(sourceText).toBe('orders'); + }); + }); + + describe('should find all column references from records', () => { + it('- should find column references in records column list', () => { + const program = `Table users { + id int + name varchar +} + +records users(id, name) { + 1, "Alice" +} + +records users(id, name) { + 2, "Bob" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "id" column declaration + const position = createPosition(2, 4); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBe(2); + references.forEach((ref) => { + const sourceText = extractTextFromRange(program, ref.range); + expect(sourceText).toBe('id'); + }); + }); + + it('- should find multiple references for same column', () => { + const program = `Table users { + id int + name varchar +} + +records users(id, name) { + 1, "Alice" +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "name" column declaration + const position = createPosition(3, 4); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBe(1); + const sourceText = extractTextFromRange(program, references[0].range); + expect(sourceText).toBe('name'); + }); + }); + + describe('should find all enum references from records', () => { + it('- should find enum references in records data', () => { + const program = `Enum status { + active + inactive +} + +Table users { + id int + status status +} + +records users(id, status) { + 1, status.active + 2, status.inactive +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "status" enum declaration + const position = createPosition(1, 6); + const references = referencesProvider.provideReferences(model, position); + + // Enum should be referenced in: column type + 2 data rows + expect(references.length).toBe(3); + }); + + it('- should find schema-qualified enum references', () => { + const program = `Enum auth.role { + admin + user +} + +Table auth.users { + id int + role auth.role +} + +records auth.users(id, role) { + 1, auth.role.admin +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "role" enum declaration + const position = createPosition(1, 11); + const references = referencesProvider.provideReferences(model, position); + + // Enum should be referenced in: column type + 1 data row + expect(references.length).toBe(2); + }); + }); + + describe('should find all enum field references from records', () => { + it('- should find enum field references in records data', () => { + const program = `Enum status { + pending + active + completed +} + +Table tasks { + id int + status status +} + +records tasks(id, status) { + 1, status.pending + 2, status.active + 3, status.completed + 4, status.pending +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "pending" enum field declaration + const position = createPosition(2, 4); + const references = referencesProvider.provideReferences(model, position); + + // "pending" is referenced twice in records + expect(references.length).toBe(2); + references.forEach((ref) => { + const sourceText = extractTextFromRange(program, ref.range); + expect(sourceText).toBe('pending'); + }); + }); + + it('- should find single enum field reference', () => { + const program = `Enum status { + active + inactive +} + +Table users { + id int + status status +} + +records users(id, status) { + 1, status.active +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "active" enum field declaration + const position = createPosition(2, 4); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBe(1); + const sourceText = extractTextFromRange(program, references[0].range); + expect(sourceText).toBe('active'); + }); + + it('- should find schema-qualified enum field references', () => { + const program = `Enum auth.role { + admin + user +} + +Table auth.users { + id int + role auth.role +} + +records auth.users(id, role) { + 1, auth.role.admin + 2, auth.role.user +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const referencesProvider = new DBMLReferencesProvider(compiler); + const model = createMockTextModel(program); + + // Position on "admin" enum field declaration + const position = createPosition(2, 4); + const references = referencesProvider.provideReferences(model, position); + + expect(references.length).toBe(1); + const sourceText = extractTextFromRange(program, references[0].range); + expect(sourceText).toBe('admin'); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/services/suggestions.test.ts b/packages/dbml-parse/__tests__/examples/services/suggestions.test.ts index 7bab0d126..3f2513f85 100644 --- a/packages/dbml-parse/__tests__/examples/services/suggestions.test.ts +++ b/packages/dbml-parse/__tests__/examples/services/suggestions.test.ts @@ -16,11 +16,17 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); - - // Test insertTexts + expect(labels).toContain('Table'); + expect(labels).toContain('TableGroup'); + expect(labels).toContain('Enum'); + expect(labels).toContain('Project'); + expect(labels).toContain('Ref'); + expect(labels).toContain('TablePartial'); + expect(labels).toContain('Records'); + + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); + expect(insertTexts).toContain('Records'); }); it('- work even if some characters have been typed out', () => { @@ -34,11 +40,12 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); + expect(labels).toContain('Table'); + expect(labels).toContain('Records'); - // Test insertTexts + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); + expect(insertTexts).toContain('Records'); }); it('- work even if there are some not directly following nonsensical characters', () => { @@ -52,11 +59,12 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); + expect(labels).toContain('Table'); + expect(labels).toContain('Records'); - // Test insertTexts + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); + expect(insertTexts).toContain('Records'); }); it('- work even if there are some directly following nonsensical characters', () => { @@ -70,11 +78,12 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); + expect(labels).toContain('Table'); + expect(labels).toContain('Records'); - // Test insertTexts + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual(['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial']); + expect(insertTexts).toContain('Records'); }); }); @@ -121,21 +130,15 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(labels).toContain('Note'); + expect(labels).toContain('indexes'); + expect(labels).toContain('checks'); + expect(labels).toContain('Records'); - // Test insertTexts + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(insertTexts).toContain('Note'); + expect(insertTexts).toContain('Records'); }); it('- work when there is a comma following', () => { @@ -203,21 +206,15 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(labels).toContain('Note'); + expect(labels).toContain('indexes'); + expect(labels).toContain('checks'); + expect(labels).toContain('Records'); - // Test insertTexts + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(insertTexts).toContain('Note'); + expect(insertTexts).toContain('Records'); }); it('- should suggest after column definition', () => { @@ -231,21 +228,15 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(labels).toContain('Note'); + expect(labels).toContain('indexes'); + expect(labels).toContain('checks'); + expect(labels).toContain('Records'); - // Test insertTexts + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(insertTexts).toContain('Note'); + expect(insertTexts).toContain('Records'); }); }); @@ -1261,21 +1252,15 @@ describe('[snapshot] CompletionItemProvider', () => { // Test labels const labels = result.suggestions.map((s) => s.label); - expect(labels).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(labels).toContain('Note'); + expect(labels).toContain('indexes'); + expect(labels).toContain('checks'); + expect(labels).toContain('Records'); - // Test insertTexts + // Test insertTexts - should have Records keyword const insertTexts = result.suggestions.map((s) => s.insertText); - expect(insertTexts).toEqual([ - 'Note', - 'indexes', - 'checks', - - ]); + expect(insertTexts).toContain('Note'); + expect(insertTexts).toContain('Records'); }); it('- should suggest TablePartial names after tilde operator', () => { @@ -1430,8 +1415,7 @@ describe('[snapshot] CompletionItemProvider', () => { // Test insertTexts const insertTexts = result.suggestions.map((s) => s.insertText); expect(insertTexts).toEqual([ - '"user-table"', - + '""user-table""', ]); }); @@ -2137,4 +2121,154 @@ Table posts { expect(Array.isArray(result.suggestions)).toBe(true); }); }); + + describe('Records element suggestions', () => { + it('- should suggest table names for top-level Records', () => { + const program = `Table users { + id int pk + name varchar +} + +Table orders { + id int pk +} + +Records `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(10, 9); + const result = provider.provideCompletionItems(model, position); + + expect(result.suggestions.some((s) => s.label === 'users')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'orders')).toBe(true); + }); + + it('- should suggest column names for Records column list', () => { + const program = `Table users { + id int pk + name varchar + email varchar + age int +} + +Records users(id, )`; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(8, 19); + const result = provider.provideCompletionItems(model, position); + + expect(result.suggestions.some((s) => s.label === 'name')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'email')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'age')).toBe(true); + }); + + it('- should suggest schema-qualified table names', () => { + const program = `Table s.users { + id int pk +} + +Table s.orders { + id int pk +} + +Records s.`; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(9, 11); + const result = provider.provideCompletionItems(model, position); + + expect(result.suggestions.some((s) => s.label === 'users')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'orders')).toBe(true); + }); + + it('- should suggest column names for Records inside table', () => { + const program = `Table products { + id integer [pk] + name varchar + price decimal + + Records () +}`; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(6, 12); + const result = provider.provideCompletionItems(model, position); + + expect(result.suggestions.some((s) => s.label === 'id')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'name')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'price')).toBe(true); + }); + + it('- should suggest enum values in Records data rows', () => { + const program = `Enum status { + active + inactive + pending +} + +Table users { + id int pk + user_status status +} + +Records users(id, user_status) { + 1, status. +}`; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(13, 14); + const result = provider.provideCompletionItems(model, position); + + expect(result.suggestions.some((s) => s.label === 'active')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'inactive')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'pending')).toBe(true); + }); + + it('- should suggest Records keyword in table body', () => { + const program = `Table products { + id integer [pk] + name varchar + + +}`; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(5, 3); + const result = provider.provideCompletionItems(model, position); + + expect(result.suggestions.some((s) => s.label === 'Records')).toBe(true); + }); + + it('- should suggest column names in Records call expression', () => { + const program = `Table users { + id int pk + name varchar + email varchar +} + +Records users()`; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(7, 15); + const result = provider.provideCompletionItems(model, position); + + expect(result.suggestions.some((s) => s.label === 'id')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'name')).toBe(true); + expect(result.suggestions.some((s) => s.label === 'email')).toBe(true); + }); + }); }); diff --git a/packages/dbml-parse/__tests__/examples/services/suggestions_expand_all_columns.test.ts b/packages/dbml-parse/__tests__/examples/services/suggestions_expand_all_columns.test.ts new file mode 100644 index 000000000..8d23256e9 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/services/suggestions_expand_all_columns.test.ts @@ -0,0 +1,93 @@ +import { describe, expect, it } from 'vitest'; +import Compiler from '@/compiler'; +import DBMLCompletionItemProvider from '@/services/suggestions/provider'; +import { createMockTextModel, createPosition } from '../../utils'; + +describe('[example - suggestions] Expand * to all columns in Records', () => { + describe('nested records', () => { + it('- should suggest "* (all columns)" in nested records column list', () => { + const program = `Table users { + id int + name varchar + email varchar + + records () +}`; + const compiler = new Compiler(); + compiler.setSource(program); + + const suggestionProvider = new DBMLCompletionItemProvider(compiler); + const model = createMockTextModel(program); + + // Position after "records (" + const position = createPosition(6, 12); + const suggestions = suggestionProvider.provideCompletionItems(model, position); + + expect(suggestions).toBeDefined(); + expect(suggestions.suggestions.length).toBeGreaterThan(0); + + // Find the "* (all columns)" suggestion + const expandAllSuggestion = suggestions.suggestions.find((s) => s.label === '* (all columns)'); + expect(expandAllSuggestion).toBeDefined(); + expect(expandAllSuggestion!.insertText).toBe('id, name, email'); + }); + }); + + describe('top-level records', () => { + it('- should suggest "* (all columns)" in top-level Records column list', () => { + const program = `Table users { + id int + name varchar + email varchar +} + +Records users() { +} +`; + const compiler = new Compiler(); + compiler.setSource(program); + + const suggestionProvider = new DBMLCompletionItemProvider(compiler); + const model = createMockTextModel(program); + + // Position after "Records users(" - inside the parentheses + const position = createPosition(7, 15); + const suggestions = suggestionProvider.provideCompletionItems(model, position); + + expect(suggestions).toBeDefined(); + expect(suggestions.suggestions.length).toBeGreaterThan(0); + + // Find the "* (all columns)" suggestion + const expandAllSuggestion = suggestions.suggestions.find((s) => s.label === '* (all columns)'); + expect(expandAllSuggestion).toBeDefined(); + expect(expandAllSuggestion!.insertText).toBe('id, name, email'); + }); + + it('- should be the first suggestion', () => { + const program = `Table products { + product_id int + product_name varchar + price decimal +} + +Records products( +`; + const compiler = new Compiler(); + compiler.setSource(program); + + const suggestionProvider = new DBMLCompletionItemProvider(compiler); + const model = createMockTextModel(program); + + // Position after "Records products(" + const position = createPosition(7, 17); + const suggestions = suggestionProvider.provideCompletionItems(model, position); + + expect(suggestions).toBeDefined(); + expect(suggestions.suggestions.length).toBeGreaterThan(0); + + // The "* (all columns)" suggestion should be first + expect(suggestions.suggestions[0].label).toBe('* (all columns)'); + expect(suggestions.suggestions[0].insertText).toBe('product_id, product_name, price'); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/services/suggestions_records.test.ts b/packages/dbml-parse/__tests__/examples/services/suggestions_records.test.ts new file mode 100644 index 000000000..335164c5a --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/services/suggestions_records.test.ts @@ -0,0 +1,56 @@ +import { describe, expect, it } from 'vitest'; +import Compiler from '@/compiler'; +import DBMLCompletionItemProvider from '@/services/suggestions/provider'; +import { createMockTextModel, createPosition } from '../../utils'; + +describe('[snapshot] CompletionItemProvider - Records', () => { + describe('should NOT suggest record entry snippets in Records body (handled by inline completions)', () => { + it('- should not suggest snippet in Records body', () => { + const program = ` + Table users { + id int [pk] + name varchar + email varchar + + records { + + } + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + // Position inside the Records body (between the braces) + const position = createPosition(8, 13); + const result = provider.provideCompletionItems(model, position); + + // Should NOT have record entry snippet - now handled by inline completions + const recordEntrySnippet = result.suggestions.find((s) => s.label === 'Record entry'); + expect(recordEntrySnippet).toBeUndefined(); + }); + + it('- should not suggest snippet in top-level Records body', () => { + const program = ` + Table products { + id int + name varchar + } + + Records products(id, name) { + + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + const model = createMockTextModel(program); + const provider = new DBMLCompletionItemProvider(compiler); + const position = createPosition(8, 11); + const result = provider.provideCompletionItems(model, position); + + // Should NOT have record entry snippet - now handled by inline completions + const recordEntrySnippet = result.suggestions.find((s) => s.label === 'Record entry'); + expect(recordEntrySnippet).toBeUndefined(); + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/services/suggestions_utils_records.test.ts b/packages/dbml-parse/__tests__/examples/services/suggestions_utils_records.test.ts new file mode 100644 index 000000000..d809465b6 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/services/suggestions_utils_records.test.ts @@ -0,0 +1,295 @@ +import { describe, expect, it } from 'vitest'; +import Compiler from '@/compiler'; +import { generateRecordEntrySnippet, getColumnsFromTableSymbol } from '@/services/suggestions/utils'; +import { TableSymbol } from '@/core/analyzer/symbol/symbols'; + +describe('[unit] Suggestions Utils - Records', () => { + describe('generateRecordEntrySnippet', () => { + it('- should generate snippet with placeholders including types for single column', () => { + const columns = [{ name: 'id', type: 'int' }]; + const result = generateRecordEntrySnippet(columns); + expect(result).toBe('${1:id (int)}'); + }); + + it('- should generate snippet with placeholders including types for multiple columns', () => { + const columns = [ + { name: 'id', type: 'int' }, + { name: 'name', type: 'varchar' }, + { name: 'email', type: 'varchar' }, + ]; + const result = generateRecordEntrySnippet(columns); + expect(result).toBe('${1:id (int)}, ${2:name (varchar)}, ${3:email (varchar)}'); + }); + + it('- should generate snippet with correct placeholder indices', () => { + const columns = [ + { name: 'a', type: 'int' }, + { name: 'b', type: 'int' }, + { name: 'c', type: 'int' }, + { name: 'd', type: 'int' }, + { name: 'e', type: 'int' }, + ]; + const result = generateRecordEntrySnippet(columns); + expect(result).toBe('${1:a (int)}, ${2:b (int)}, ${3:c (int)}, ${4:d (int)}, ${5:e (int)}'); + }); + + it('- should handle column names with special characters', () => { + const columns = [ + { name: 'column-1', type: 'int' }, + { name: 'column 2', type: 'varchar' }, + { name: 'column.3', type: 'boolean' }, + ]; + const result = generateRecordEntrySnippet(columns); + expect(result).toBe('${1:column-1 (int)}, ${2:column 2 (varchar)}, ${3:column.3 (boolean)}'); + }); + + it('- should return empty string for empty columns array', () => { + const columns: Array<{ name: string; type: string }> = []; + const result = generateRecordEntrySnippet(columns); + expect(result).toBe(''); + }); + + it('- should handle many columns', () => { + const columns = Array.from({ length: 20 }, (_, i) => ({ + name: `col${i + 1}`, + type: 'varchar', + })); + const result = generateRecordEntrySnippet(columns); + + // Should have 20 placeholders + const placeholderCount = (result.match(/\$\{/g) || []).length; + expect(placeholderCount).toBe(20); + + // Should start with ${1:col1 (varchar)} + expect(result).toMatch(/^\$\{1:col1 \(varchar\)\}/); + + // Should end with ${20:col20 (varchar)} + expect(result).toMatch(/\$\{20:col20 \(varchar\)\}$/); + }); + + it('- should preserve exact column name and type in placeholder', () => { + const columns = [ + { name: 'UserId', type: 'int' }, + { name: 'FirstName', type: 'varchar' }, + { name: 'LAST_NAME', type: 'varchar' }, + ]; + const result = generateRecordEntrySnippet(columns); + expect(result).toBe('${1:UserId (int)}, ${2:FirstName (varchar)}, ${3:LAST_NAME (varchar)}'); + }); + }); + + describe('getColumnsFromTableSymbol', () => { + it('- should extract columns with types from table symbol', () => { + const program = ` + Table users { + id int [pk] + name varchar + email varchar + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + compiler.parse._(); // Trigger parsing + + // Get the table symbol + const ast = compiler.parse.ast(); + const tableElement = ast.body[0]; + const tableSymbol = tableElement.symbol; + + expect(tableSymbol).toBeInstanceOf(TableSymbol); + + if (tableSymbol instanceof TableSymbol) { + const columns = getColumnsFromTableSymbol(tableSymbol, compiler); + + expect(columns).not.toBeNull(); + + expect(columns).not.toBeNull(); + expect(columns!.length).toBe(3); + expect(columns![0].name).toBe('id'); + expect(columns![0].type).toBe('int'); + expect(columns![1].name).toBe('name'); + expect(columns![1].type).toBe('varchar'); + expect(columns![2].name).toBe('email'); + expect(columns![2].type).toBe('varchar'); + } + }); + + it('- should maintain column order and extract types', () => { + const program = ` + Table products { + product_id int [pk] + product_name varchar + price decimal + in_stock boolean + created_at timestamp + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + compiler.parse._(); + + const ast = compiler.parse.ast(); + const tableElement = ast.body[0]; + const tableSymbol = tableElement.symbol; + + if (tableSymbol instanceof TableSymbol) { + const columns = getColumnsFromTableSymbol(tableSymbol, compiler); + + expect(columns).not.toBeNull(); + + expect(columns!.length).toBe(5); + expect(columns![0].name).toBe('product_id'); + expect(columns![0].type).toBe('int'); + expect(columns![1].name).toBe('product_name'); + expect(columns![1].type).toBe('varchar'); + expect(columns![2].name).toBe('price'); + expect(columns![2].type).toBe('decimal'); + expect(columns![3].name).toBe('in_stock'); + expect(columns![3].type).toBe('boolean'); + expect(columns![4].name).toBe('created_at'); + expect(columns![4].type).toBe('timestamp'); + } + }); + + it('- should handle table with single column', () => { + const program = ` + Table counter { + count int + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + compiler.parse._(); + + const ast = compiler.parse.ast(); + const tableElement = ast.body[0]; + const tableSymbol = tableElement.symbol; + + if (tableSymbol instanceof TableSymbol) { + const columns = getColumnsFromTableSymbol(tableSymbol, compiler); + + expect(columns).not.toBeNull(); + + expect(columns!.length).toBe(1); + expect(columns![0].name).toBe('count'); + expect(columns![0].type).toBe('int'); + } + }); + + it('- should handle quoted column names', () => { + const program = ` + Table "special-table" { + "column-1" int + "column 2" varchar + "column.3" boolean + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + compiler.parse._(); + + const ast = compiler.parse.ast(); + const tableElement = ast.body[0]; + const tableSymbol = tableElement.symbol; + + if (tableSymbol instanceof TableSymbol) { + const columns = getColumnsFromTableSymbol(tableSymbol, compiler); + + expect(columns).not.toBeNull(); + + expect(columns!.length).toBe(3); + expect(columns![0].name).toBe('column-1'); + expect(columns![0].type).toBe('int'); + expect(columns![1].name).toBe('column 2'); + expect(columns![1].type).toBe('varchar'); + expect(columns![2].name).toBe('column.3'); + expect(columns![2].type).toBe('boolean'); + } + }); + + it('- should return empty array for empty table', () => { + const program = ` + Table empty_table { + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + compiler.parse._(); + + const ast = compiler.parse.ast(); + const tableElement = ast.body[0]; + const tableSymbol = tableElement.symbol; + + if (tableSymbol instanceof TableSymbol) { + const columns = getColumnsFromTableSymbol(tableSymbol, compiler); + + expect(columns).not.toBeNull(); + expect(columns!.length).toBe(0); + } + }); + + it('- should only extract columns, not other symbols', () => { + const program = ` + Table users { + id int [pk] + name varchar + + indexes { + (id, name) + } + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + compiler.parse._(); + + const ast = compiler.parse.ast(); + const tableElement = ast.body[0]; + const tableSymbol = tableElement.symbol; + + if (tableSymbol instanceof TableSymbol) { + const columns = getColumnsFromTableSymbol(tableSymbol, compiler); + + expect(columns).not.toBeNull(); + + // Should only get columns, not indexes + expect(columns!.length).toBe(2); + expect(columns![0].name).toBe('id'); + expect(columns![0].type).toBe('int'); + expect(columns![1].name).toBe('name'); + expect(columns![1].type).toBe('varchar'); + } + }); + + it('- should work with schema-qualified tables', () => { + const program = ` + Table auth.users { + id int [pk] + username varchar + password_hash varchar + } + `; + const compiler = new Compiler(); + compiler.setSource(program); + compiler.parse._(); + + const ast = compiler.parse.ast(); + const tableElement = ast.body[0]; + const tableSymbol = tableElement.symbol; + + if (tableSymbol instanceof TableSymbol) { + const columns = getColumnsFromTableSymbol(tableSymbol, compiler); + + expect(columns).not.toBeNull(); + + expect(columns!.length).toBe(3); + expect(columns![0].name).toBe('id'); + expect(columns![0].type).toBe('int'); + expect(columns![1].name).toBe('username'); + expect(columns![1].type).toBe('varchar'); + expect(columns![2].name).toBe('password_hash'); + expect(columns![2].type).toBe('varchar'); + } + }); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/validator/records.test.ts b/packages/dbml-parse/__tests__/examples/validator/records.test.ts new file mode 100644 index 000000000..8045fc8d1 --- /dev/null +++ b/packages/dbml-parse/__tests__/examples/validator/records.test.ts @@ -0,0 +1,200 @@ +import { describe, expect, test } from 'vitest'; +import { analyze } from '@tests/utils'; + +describe('[example] records validator', () => { + test('should accept valid records', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with various data types', () => { + const source = ` + Table data { + int_col int + float_col decimal(10,2) + bool_col boolean + str_col varchar + } + records data(int_col, float_col, bool_col, str_col) { + 1, 3.14, true, "hello" + 2, -2.5, false, "world" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with null values', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, null + 2, "" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with function expressions', () => { + const source = ` + Table users { + id int [pk] + created_at timestamp + } + records users(id, created_at) { + 1, \`now()\` + 2, \`uuid_generate_v4()\` + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with scientific notation', () => { + const source = ` + Table data { + id int + value decimal + } + records data(id, value) { + 1, 1e10 + 2, 3.14e-5 + 3, 2E+8 + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with negative numbers', () => { + const source = ` + Table data { + id int + value int + } + records data(id, value) { + 1, -100 + 2, -999 + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with enum values', () => { + const source = ` + Enum status { active\n inactive } + Table users { + id int + status status + } + records users(id, status) { + 1, status.active + 2, status.inactive + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should detect unknown table in records', () => { + const source = ` + records nonexistent(id, name) { + 1, "Alice" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(1); + expect(errors[0].diagnostic).toBe("Table 'nonexistent' does not exist in Schema 'public'"); + }); + + test('should accept multiple records blocks for same table', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 2, "Bob" + } + records users(id, name) { + 3, "Charlie" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with schema-qualified table name', () => { + const source = ` + Table auth.users { + id int [pk] + email varchar + } + records auth.users(id, email) { + 1, "alice@example.com" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with quoted column names', () => { + const source = ` + Table users { + "user-id" int [pk] + "user-name" varchar + } + records users("user-id", "user-name") { + 1, "Alice" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept empty records block', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); + + test('should accept records with only one column', () => { + const source = ` + Table ids { + id int [pk] + } + records ids(id) { + 1 + 2 + 3 + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBe(0); + }); +}); diff --git a/packages/dbml-parse/__tests__/examples/validator/validator.test.ts b/packages/dbml-parse/__tests__/examples/validator/validator.test.ts index 316cbff3e..afd18928f 100644 --- a/packages/dbml-parse/__tests__/examples/validator/validator.test.ts +++ b/packages/dbml-parse/__tests__/examples/validator/validator.test.ts @@ -1095,4 +1095,201 @@ Table users { name varchar }`; }); }); }); + + describe('records validation', () => { + test('should accept valid records', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + 2, "Bob" + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with various data types', () => { + const source = ` + Table data { + int_col int + float_col decimal(10,2) + bool_col boolean + str_col varchar + } + records data(int_col, float_col, bool_col, str_col) { + 1, 3.14, true, "hello" + 2, -2.5, false, "world" + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with null values', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, null + 2, "" + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with function expressions', () => { + const source = ` + Table users { + id int [pk] + created_at timestamp + } + records users(id, created_at) { + 1, \`now()\` + 2, \`uuid_generate_v4()\` + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with scientific notation', () => { + const source = ` + Table data { + id int + value decimal + } + records data(id, value) { + 1, 1e10 + 2, 3.14e-5 + 3, 2E+8 + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with negative numbers', () => { + const source = ` + Table data { + id int + value int + } + records data(id, value) { + 1, -100 + 2, -999 + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with enum values', () => { + const source = ` + Enum status { active\n inactive } + Table users { + id int + status status + } + records users(id, status) { + 1, status.active + 2, status.inactive + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should detect unknown table in records', () => { + const source = ` + records nonexistent(id, name) { + 1, "Alice" + } + `; + const errors = analyze(source).getErrors(); + expect(errors.length).toBeGreaterThan(0); + }); + + test('should accept multiple records blocks for same table', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + 1, "Alice" + } + records users(id, name) { + 2, "Bob" + } + records users(id, name) { + 3, "Charlie" + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with schema-qualified table name', () => { + const source = ` + Table auth.users { + id int [pk] + email varchar + } + records auth.users(id, email) { + 1, "alice@example.com" + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with quoted column names', () => { + const source = ` + Table users { + "user-id" int [pk] + "user-name" varchar + } + records users("user-id", "user-name") { + 1, "Alice" + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept empty records block', () => { + const source = ` + Table users { + id int [pk] + name varchar + } + records users(id, name) { + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + + test('should accept records with only one column', () => { + const source = ` + Table ids { + id int [pk] + } + records ids(id) { + 1 + 2 + 3 + } + `; + const errors = analyze(source).getErrors(); + expect(errors).toHaveLength(0); + }); + }); }); diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/duplicate_name.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/duplicate_name.out.json index cb8fdf72a..eb5631e26 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/duplicate_name.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/duplicate_name.out.json @@ -761,5 +761,6 @@ "end": 27, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/enum_as_default_column_value.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/enum_as_default_column_value.out.json index 6460ff58d..381356abf 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/enum_as_default_column_value.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/enum_as_default_column_value.out.json @@ -8542,5 +8542,6 @@ "end": 598, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/enum_name.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/enum_name.out.json index 5240522e3..c2a5eeba1 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/enum_name.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/enum_name.out.json @@ -5580,5 +5580,6 @@ "end": 168, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/erroneous.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/erroneous.out.json index 64e0ea461..7b4aa98f5 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/erroneous.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/erroneous.out.json @@ -6696,5 +6696,6 @@ "end": 215, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table.out.json index 56f972b0e..16e4536ac 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table.out.json @@ -3215,5 +3215,6 @@ "end": 145, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table_partial.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table_partial.out.json index 0a1c93d54..9ef430bef 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table_partial.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/nonexisting_inline_ref_column_in_table_partial.out.json @@ -3215,5 +3215,6 @@ "end": 152, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/old_undocumented_syntax.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/old_undocumented_syntax.out.json index d4e88fa32..af485af57 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/old_undocumented_syntax.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/old_undocumented_syntax.out.json @@ -8837,5 +8837,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/ref.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/ref.out.json index 0208cb5ed..15a9f0487 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/ref.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/ref.out.json @@ -1911,5 +1911,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/ref_name_and_color_setting.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/ref_name_and_color_setting.out.json index a0f2a7563..d8bb32fc0 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/ref_name_and_color_setting.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/ref_name_and_color_setting.out.json @@ -4246,5 +4246,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/ref_setting.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/ref_setting.out.json index e1b7df3cb..b886ac05c 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/ref_setting.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/ref_setting.out.json @@ -2892,5 +2892,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/sticky_notes.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/sticky_notes.out.json index b7a6b4e77..268e2d8b9 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/sticky_notes.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/sticky_notes.out.json @@ -2520,5 +2520,6 @@ "end": 146, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/table_partial.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/table_partial.out.json index 0bd0bd97a..1eed40069 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/table_partial.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/table_partial.out.json @@ -1322,5 +1322,6 @@ "end": 51, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/binder/output/unknown_table_group_field.out.json b/packages/dbml-parse/__tests__/snapshots/binder/output/unknown_table_group_field.out.json index d2f54babe..e091ba3a5 100644 --- a/packages/dbml-parse/__tests__/snapshots/binder/output/unknown_table_group_field.out.json +++ b/packages/dbml-parse/__tests__/snapshots/binder/output/unknown_table_group_field.out.json @@ -1293,5 +1293,6 @@ "end": 65, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_basic.in.dbml b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_basic.in.dbml new file mode 100644 index 000000000..24b5742f6 --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_basic.in.dbml @@ -0,0 +1,12 @@ +Table users { + id integer [pk] + name varchar + email varchar + age integer +} + +Records users(id, name, email, age) { + 1, "John Doe", "john@example.com", 30 + 2, "Jane Smith", "jane@example.com", 25 + 3, "Bob Johnson", "bob@example.com", 35 +} diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_inside_table.in.dbml b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_inside_table.in.dbml new file mode 100644 index 000000000..ded4b346a --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_inside_table.in.dbml @@ -0,0 +1,11 @@ +Table products { + id integer [pk] + name varchar + price decimal + + Records { + 1, "Laptop", 999.99 + 2, "Mouse", 29.99 + 3, "Keyboard", 79.99 + } +} diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_inside_table_with_columns.in.dbml b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_inside_table_with_columns.in.dbml new file mode 100644 index 000000000..882adad65 --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_inside_table_with_columns.in.dbml @@ -0,0 +1,14 @@ +Table employees { + id integer [pk] + first_name varchar + last_name varchar + department varchar + salary decimal + hire_date date + + Records (id, first_name, last_name, department) { + 1, "Alice", "Anderson", "Engineering" + 2, "Bob", "Brown", "Marketing" + 3, "Carol", "Chen", "Engineering" + } +} diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_with_nulls.in.dbml b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_with_nulls.in.dbml new file mode 100644 index 000000000..1b365e333 --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_with_nulls.in.dbml @@ -0,0 +1,13 @@ +Table users { + id integer [pk] + name varchar + email varchar + age integer + created_at timestamp +} + +Records users(id, name, email) { + 1, "Alice", + 2,, + 3, "Charlie", "charlie@example.com" +} diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_with_schema.in.dbml b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_with_schema.in.dbml new file mode 100644 index 000000000..55998c972 --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/input/records_with_schema.in.dbml @@ -0,0 +1,12 @@ +Table public.orders { + id integer [pk] + customer_name varchar + total decimal + status varchar +} + +Records public.orders(id, customer_name) { + 1, "John Doe" + 2, "Jane Smith" + 3, "Bob Wilson" +} diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/interpreter.test.ts b/packages/dbml-parse/__tests__/snapshots/interpreter/interpreter.test.ts index f9ce4b479..fe11ae392 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/interpreter.test.ts +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/interpreter.test.ts @@ -33,7 +33,7 @@ describe('[snapshot] interpreter', () => { 2, ); } else { - const res = new Interpreter(report.getValue()).interpret(); + const res = new Interpreter(report.getValue(), program).interpret(); if (res.getErrors().length > 0) { output = JSON.stringify( res.getErrors(), diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/array_type.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/array_type.out.json index d4d3d6196..0bf5d4f13 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/array_type.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/array_type.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "text", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "int[]", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -60,7 +62,8 @@ "type": { "schemaName": null, "type_name": "text[][]", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -108,7 +111,8 @@ "type": { "schemaName": null, "type_name": "integer[3][3]", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -150,5 +154,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/checks.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/checks.out.json index 2b7f91dab..d1afaf95e 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/checks.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/checks.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "TEXT", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -52,7 +53,8 @@ "type": { "schemaName": null, "type_name": "TEXT", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -146,7 +148,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -208,7 +211,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -292,7 +296,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -361,5 +366,6 @@ } ] } - ] + ], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/column_caller_type.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/column_caller_type.out.json index 2a5f02979..fc38911ae 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/column_caller_type.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/column_caller_type.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "nvarbinary(MAX)", - "args": "MAX" + "args": "MAX", + "isEnum": false }, "token": { "start": { @@ -57,7 +59,8 @@ "type": { "schemaName": null, "type_name": "varchar(MAX)", - "args": "MAX" + "args": "MAX", + "isEnum": false }, "token": { "start": { @@ -80,7 +83,8 @@ "type": { "schemaName": null, "type_name": "varbinary(MAX)", - "args": "MAX" + "args": "MAX", + "isEnum": false }, "token": { "start": { @@ -103,7 +107,11 @@ "type": { "schemaName": null, "type_name": "int(10)", - "args": "10" + "args": "10", + "lengthParam": { + "length": 10 + }, + "isEnum": false }, "token": { "start": { @@ -145,5 +153,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/comment.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/comment.out.json index 774bd9edd..efde7065d 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/comment.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/comment.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -36,7 +37,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -62,7 +64,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -102,7 +105,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -401,5 +405,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/default_tables.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/default_tables.out.json index 036c50d78..7f96a24f5 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/default_tables.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/default_tables.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -40,7 +41,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -66,7 +68,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -95,7 +98,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -146,7 +150,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -169,7 +174,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -192,7 +198,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -237,7 +244,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -262,7 +270,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -291,7 +300,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -321,7 +331,8 @@ "type": { "schemaName": null, "type_name": "float", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -350,7 +361,8 @@ "type": { "schemaName": null, "type_name": "boolean", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -379,7 +391,8 @@ "type": { "schemaName": null, "type_name": "date", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -427,5 +440,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_as_default_column_value.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_as_default_column_value.out.json index c3b7660c9..dd169cd88 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_as_default_column_value.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_as_default_column_value.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "text", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -57,7 +59,8 @@ "type": { "schemaName": null, "type_name": "status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -86,7 +89,8 @@ "type": { "schemaName": "demographic", "type_name": "gender", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -115,7 +119,8 @@ "type": { "schemaName": "demographic", "type_name": "age segment", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -368,5 +373,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_tables.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_tables.out.json index 6e87cca51..e2e8c9725 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_tables.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/enum_tables.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -36,7 +37,8 @@ "type": { "schemaName": null, "type_name": "job_status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -98,7 +100,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -121,7 +124,8 @@ "type": { "schemaName": null, "type_name": "order status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -144,7 +148,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -418,5 +423,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/general_schema.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/general_schema.out.json index febd164d4..35287d08c 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/general_schema.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/general_schema.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -36,7 +37,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -62,7 +64,8 @@ "type": { "schemaName": null, "type_name": "orders_status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -85,7 +88,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -131,7 +135,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -154,7 +159,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -177,7 +183,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -228,7 +235,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -253,7 +261,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -276,7 +285,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -302,7 +312,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -325,7 +336,8 @@ "type": { "schemaName": null, "type_name": "product status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -348,7 +360,8 @@ "type": { "schemaName": null, "type_name": "datetime", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -486,7 +499,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -511,7 +525,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -534,7 +549,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -559,7 +575,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -582,7 +599,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -605,7 +623,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -628,7 +647,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -673,7 +693,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -698,7 +719,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -721,7 +743,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -744,7 +767,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -767,7 +791,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -812,7 +837,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -837,7 +863,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -860,7 +887,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -1431,5 +1459,6 @@ ], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/header_color_tables.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/header_color_tables.out.json index 332f1b567..0a2835ece 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/header_color_tables.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/header_color_tables.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -57,7 +59,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -80,7 +83,8 @@ "type": { "schemaName": null, "type_name": "date_time", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -123,5 +127,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_table_partial.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_table_partial.out.json index 083e092a0..6039e0abb 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_table_partial.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_table_partial.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -81,7 +82,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -104,7 +106,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -129,7 +132,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -152,7 +156,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -175,7 +180,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -198,7 +204,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -221,7 +228,8 @@ "type": { "schemaName": null, "type_name": "boolean", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -554,5 +562,6 @@ ], "checks": [] } - ] + ], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_tables.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_tables.out.json index bb1a063c2..8a50639c1 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_tables.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/index_tables.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -36,7 +37,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -59,7 +61,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -84,7 +87,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -107,7 +111,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -130,7 +135,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -153,7 +159,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -176,7 +183,8 @@ "type": { "schemaName": null, "type_name": "boolean", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -517,5 +525,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/multi_notes.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/multi_notes.out.json index 37a122705..3e032e82f 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/multi_notes.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/multi_notes.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -51,7 +52,8 @@ "type": { "schemaName": null, "type_name": "order status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -74,7 +76,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -134,7 +137,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -157,7 +161,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -180,7 +185,8 @@ "type": { "schemaName": null, "type_name": "date", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -203,7 +209,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -720,5 +727,6 @@ }, "database_type": "PostgreSQL" }, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/multiline_string.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/multiline_string.out.json index 4a06ba066..f07afb90a 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/multiline_string.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/multiline_string.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -70,5 +71,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/negative_number.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/negative_number.out.json index 55d8cab0d..937a8308d 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/negative_number.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/negative_number.out.json @@ -1,290 +1,1900 @@ -{ - "schemas": [], - "tables": [ - { - "name": "a", - "schemaName": null, - "alias": null, - "fields": [ +[ + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 49, + "kind": "", + "startPos": { + "offset": 77, + "line": 3, + "column": 10 + }, + "fullStart": 77, + "endPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "fullEnd": 90, + "start": 77, + "end": 89, + "tupleOpenParen": { + "kind": "", + "startPos": { + "offset": 77, + "line": 3, + "column": 10 + }, + "endPos": { + "offset": 78, + "line": 3, + "column": 11 + }, + "value": "(", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 77, + "end": 78 + }, + "elementList": [ { - "name": "id", - "type": { - "schemaName": null, - "type_name": "int(-1)", - "args": "-1" + "id": 48, + "kind": "", + "startPos": { + "offset": 78, + "line": 3, + "column": 11 }, - "token": { - "start": { - "offset": 12, - "line": 2, - "column": 3 - }, - "end": { - "offset": 36, - "line": 2, - "column": 27 - } + "fullStart": 78, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 }, - "inline_refs": [], - "pk": false, - "increment": false, - "unique": false, - "dbdefault": { - "type": "number", - "value": -2 + "fullEnd": 88, + "start": 78, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 78, + "line": 3, + "column": 11 + }, + "endPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 78, + "end": 79 }, - "checks": [] + "expression": { + "id": 47, + "kind": "", + "startPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "fullStart": 79, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 79, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "endPos": { + "offset": 80, + "line": 3, + "column": 13 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 79, + "end": 80 + }, + "expression": { + "id": 46, + "kind": "", + "startPos": { + "offset": 80, + "line": 3, + "column": 13 + }, + "fullStart": 80, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 80, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 80, + "line": 3, + "column": 13 + }, + "endPos": { + "offset": 81, + "line": 3, + "column": 14 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 80, + "end": 81 + }, + "expression": { + "id": 45, + "kind": "", + "startPos": { + "offset": 81, + "line": 3, + "column": 14 + }, + "fullStart": 81, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 81, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 81, + "line": 3, + "column": 14 + }, + "endPos": { + "offset": 82, + "line": 3, + "column": 15 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 81, + "end": 82 + }, + "expression": { + "id": 44, + "kind": "", + "startPos": { + "offset": 82, + "line": 3, + "column": 15 + }, + "fullStart": 82, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 82, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 82, + "line": 3, + "column": 15 + }, + "endPos": { + "offset": 83, + "line": 3, + "column": 16 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 82, + "end": 83 + }, + "expression": { + "id": 43, + "kind": "", + "startPos": { + "offset": 83, + "line": 3, + "column": 16 + }, + "fullStart": 83, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 83, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 83, + "line": 3, + "column": 16 + }, + "endPos": { + "offset": 84, + "line": 3, + "column": 17 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 83, + "end": 84 + }, + "expression": { + "id": 42, + "kind": "", + "startPos": { + "offset": 84, + "line": 3, + "column": 17 + }, + "fullStart": 84, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 84, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 84, + "line": 3, + "column": 17 + }, + "endPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 84, + "end": 85 + }, + "expression": { + "id": 41, + "kind": "", + "startPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "fullStart": 85, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 85, + "end": 88, + "expression": { + "id": 40, + "kind": "", + "startPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "fullStart": 85, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 85, + "end": 88, + "literal": { + "kind": "", + "startPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "value": "0.1", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 85, + "end": 88 + } + } + } + } + } + } + } + } + } + } + ], + "commaList": [], + "tupleCloseParen": { + "kind": "", + "startPos": { + "offset": 88, + "line": 3, + "column": 21 }, + "endPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "value": ")", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "endPos": { + "offset": 90, + "line": 3, + "column": 23 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 89, + "end": 90 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 88, + "end": 89 + } + }, + "start": 77, + "end": 89, + "name": "CompileError" + }, + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 58, + "kind": "", + "startPos": { + "offset": 90, + "line": 3, + "column": 23 + }, + "fullStart": 90, + "endPos": { + "offset": 111, + "line": 3, + "column": 44 + }, + "fullEnd": 124, + "start": 90, + "end": 111, + "listOpenBracket": { + "kind": "", + "startPos": { + "offset": 90, + "line": 3, + "column": 23 + }, + "endPos": { + "offset": 91, + "line": 3, + "column": 24 + }, + "value": "[", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 90, + "end": 91 + }, + "elementList": [ { - "name": "id2", - "type": { - "schemaName": null, - "type_name": "int(--1)", - "args": "--1" + "id": 57, + "kind": "", + "startPos": { + "offset": 91, + "line": 3, + "column": 24 + }, + "fullStart": 91, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 }, - "token": { - "start": { - "offset": 39, + "fullEnd": 110, + "start": 91, + "end": 110, + "name": { + "id": 50, + "kind": "", + "startPos": { + "offset": 91, "line": 3, - "column": 3 + "column": 24 }, - "end": { - "offset": 66, + "fullStart": 91, + "endPos": { + "offset": 98, "line": 3, - "column": 30 - } - }, - "inline_refs": [], - "pk": false, - "increment": false, - "unique": false, - "dbdefault": { - "type": "number", - "value": -2 + "column": 31 + }, + "fullEnd": 98, + "start": 91, + "end": 98, + "identifiers": [ + { + "kind": "", + "startPos": { + "offset": 91, + "line": 3, + "column": 24 + }, + "endPos": { + "offset": 98, + "line": 3, + "column": 31 + }, + "value": "default", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 91, + "end": 98 + } + ] }, - "checks": [] - }, - { - "name": "id3", - "type": { - "schemaName": null, - "type_name": "int(+-+---+0.1)", - "args": "+-+---+0.1" + "value": { + "id": 56, + "kind": "", + "startPos": { + "offset": 100, + "line": 3, + "column": 33 + }, + "fullStart": 100, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 100, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 100, + "line": 3, + "column": 33 + }, + "endPos": { + "offset": 101, + "line": 3, + "column": 34 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 100, + "end": 101 + }, + "expression": { + "id": 55, + "kind": "", + "startPos": { + "offset": 101, + "line": 3, + "column": 34 + }, + "fullStart": 101, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 101, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 101, + "line": 3, + "column": 34 + }, + "endPos": { + "offset": 102, + "line": 3, + "column": 35 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 101, + "end": 102 + }, + "expression": { + "id": 54, + "kind": "", + "startPos": { + "offset": 102, + "line": 3, + "column": 35 + }, + "fullStart": 102, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 102, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 102, + "line": 3, + "column": 35 + }, + "endPos": { + "offset": 103, + "line": 3, + "column": 36 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 102, + "end": 103 + }, + "expression": { + "id": 53, + "kind": "", + "startPos": { + "offset": 103, + "line": 3, + "column": 36 + }, + "fullStart": 103, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 103, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 103, + "line": 3, + "column": 36 + }, + "endPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 103, + "end": 104 + }, + "expression": { + "id": 52, + "kind": "", + "startPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "fullStart": 104, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 104, + "end": 110, + "expression": { + "id": 51, + "kind": "", + "startPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "fullStart": 104, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 104, + "end": 110, + "literal": { + "kind": "", + "startPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "value": "7.2225", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 104, + "end": 110 + } + } + } + } + } + } }, - "token": { - "start": { - "offset": 69, - "line": 4, - "column": 3 + "colon": { + "kind": "", + "startPos": { + "offset": 98, + "line": 3, + "column": 31 + }, + "endPos": { + "offset": 99, + "line": 3, + "column": 32 }, - "end": { + "value": ":", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 99, + "line": 3, + "column": 32 + }, + "endPos": { + "offset": 100, + "line": 3, + "column": 33 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 99, + "end": 100 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 98, + "end": 99 + } + } + ], + "commaList": [], + "listCloseBracket": { + "kind": "", + "startPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "endPos": { + "offset": 111, + "line": 3, + "column": 44 + }, + "value": "]", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { "offset": 111, - "line": 4, + "line": 3, + "column": 44 + }, + "endPos": { + "offset": 112, + "line": 3, "column": 45 - } + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 111, + "end": 112 }, - "inline_refs": [], - "pk": false, - "increment": false, - "unique": false, - "dbdefault": { - "type": "number", - "value": 7.2225 + { + "kind": "", + "startPos": { + "offset": 112, + "line": 3, + "column": 45 + }, + "endPos": { + "offset": 123, + "line": 3, + "column": 56 + }, + "value": " positive", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 112, + "end": 123 }, - "checks": [] - } - ], - "token": { - "start": { - "offset": 0, - "line": 1, - "column": 1 + { + "kind": "", + "startPos": { + "offset": 123, + "line": 3, + "column": 56 + }, + "endPos": { + "offset": 124, + "line": 4, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 123, + "end": 124 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 110, + "end": 111 + } + }, + "start": 90, + "end": 111, + "name": "CompileError" + }, + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 111, + "kind": "", + "startPos": { + "offset": 212, + "line": 9, + "column": 10 + }, + "fullStart": 212, + "endPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "fullEnd": 225, + "start": 212, + "end": 224, + "tupleOpenParen": { + "kind": "", + "startPos": { + "offset": 212, + "line": 9, + "column": 10 }, - "end": { - "offset": 125, - "line": 5, - "column": 2 - } + "endPos": { + "offset": 213, + "line": 9, + "column": 11 + }, + "value": "(", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 212, + "end": 213 }, - "indexes": [], - "partials": [], - "checks": [] - }, - { - "name": "b", - "schemaName": null, - "alias": null, - "fields": [ + "elementList": [ { - "name": "id", - "type": { - "schemaName": null, - "type_name": "int", - "args": null + "id": 110, + "kind": "", + "startPos": { + "offset": 213, + "line": 9, + "column": 11 }, - "token": { - "start": { - "offset": 278, - "line": 14, - "column": 3 - }, - "end": { - "offset": 284, - "line": 14, - "column": 9 - } + "fullStart": 213, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 }, - "inline_refs": [], - "pk": false, - "unique": false + "fullEnd": 223, + "start": 213, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 213, + "line": 9, + "column": 11 + }, + "endPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 213, + "end": 214 + }, + "expression": { + "id": 109, + "kind": "", + "startPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "fullStart": 214, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 214, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "endPos": { + "offset": 215, + "line": 9, + "column": 13 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 214, + "end": 215 + }, + "expression": { + "id": 108, + "kind": "", + "startPos": { + "offset": 215, + "line": 9, + "column": 13 + }, + "fullStart": 215, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 215, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 215, + "line": 9, + "column": 13 + }, + "endPos": { + "offset": 216, + "line": 9, + "column": 14 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 215, + "end": 216 + }, + "expression": { + "id": 107, + "kind": "", + "startPos": { + "offset": 216, + "line": 9, + "column": 14 + }, + "fullStart": 216, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 216, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 216, + "line": 9, + "column": 14 + }, + "endPos": { + "offset": 217, + "line": 9, + "column": 15 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 216, + "end": 217 + }, + "expression": { + "id": 106, + "kind": "", + "startPos": { + "offset": 217, + "line": 9, + "column": 15 + }, + "fullStart": 217, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 217, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 217, + "line": 9, + "column": 15 + }, + "endPos": { + "offset": 218, + "line": 9, + "column": 16 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 217, + "end": 218 + }, + "expression": { + "id": 105, + "kind": "", + "startPos": { + "offset": 218, + "line": 9, + "column": 16 + }, + "fullStart": 218, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 218, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 218, + "line": 9, + "column": 16 + }, + "endPos": { + "offset": 219, + "line": 9, + "column": 17 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 218, + "end": 219 + }, + "expression": { + "id": 104, + "kind": "", + "startPos": { + "offset": 219, + "line": 9, + "column": 17 + }, + "fullStart": 219, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 219, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 219, + "line": 9, + "column": 17 + }, + "endPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 219, + "end": 220 + }, + "expression": { + "id": 103, + "kind": "", + "startPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "fullStart": 220, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 220, + "end": 223, + "expression": { + "id": 102, + "kind": "", + "startPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "fullStart": 220, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 220, + "end": 223, + "literal": { + "kind": "", + "startPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "value": "0.1", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 220, + "end": 223 + } + } + } + } + } + } + } + } + } } ], - "token": { - "start": { - "offset": 266, - "line": 13, - "column": 1 + "commaList": [], + "tupleCloseParen": { + "kind": "", + "startPos": { + "offset": 223, + "line": 9, + "column": 21 }, - "end": { - "offset": 292, - "line": 16, - "column": 2 - } + "endPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "value": ")", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "endPos": { + "offset": 225, + "line": 9, + "column": 23 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 224, + "end": 225 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 223, + "end": 224 + } + }, + "start": 212, + "end": 224, + "name": "CompileError" + }, + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 124, + "kind": "", + "startPos": { + "offset": 225, + "line": 9, + "column": 23 }, - "indexes": [], - "partials": [ - { - "order": 1, - "token": { - "start": { - "offset": 287, - "line": 15, - "column": 3 - }, - "end": { - "offset": 290, - "line": 15, - "column": 6 - } - }, - "name": "P1" - } - ], - "checks": [] - } - ], - "notes": [], - "refs": [], - "enums": [], - "tableGroups": [], - "aliases": [], - "project": {}, - "tablePartials": [ - { - "name": "P1", - "fields": [ + "fullStart": 225, + "endPos": { + "offset": 250, + "line": 9, + "column": 48 + }, + "fullEnd": 263, + "start": 225, + "end": 250, + "listOpenBracket": { + "kind": "", + "startPos": { + "offset": 225, + "line": 9, + "column": 23 + }, + "endPos": { + "offset": 226, + "line": 9, + "column": 24 + }, + "value": "[", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 225, + "end": 226 + }, + "elementList": [ { - "name": "id", - "type": { - "schemaName": null, - "type_name": "int(-1)", - "args": "-1" - }, - "token": { - "start": { - "offset": 147, - "line": 8, - "column": 3 - }, - "end": { - "offset": 171, - "line": 8, - "column": 27 - } + "id": 123, + "kind": "", + "startPos": { + "offset": 226, + "line": 9, + "column": 24 }, - "inline_refs": [], - "pk": false, - "increment": false, - "unique": false, - "dbdefault": { - "type": "number", - "value": -2 + "fullStart": 226, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 }, - "checks": [] - }, - { - "name": "id2", - "type": { - "schemaName": null, - "type_name": "int(--1)", - "args": "--1" + "fullEnd": 249, + "start": 226, + "end": 249, + "name": { + "id": 112, + "kind": "", + "startPos": { + "offset": 226, + "line": 9, + "column": 24 + }, + "fullStart": 226, + "endPos": { + "offset": 233, + "line": 9, + "column": 31 + }, + "fullEnd": 233, + "start": 226, + "end": 233, + "identifiers": [ + { + "kind": "", + "startPos": { + "offset": 226, + "line": 9, + "column": 24 + }, + "endPos": { + "offset": 233, + "line": 9, + "column": 31 + }, + "value": "default", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 226, + "end": 233 + } + ] }, - "token": { - "start": { - "offset": 174, + "value": { + "id": 122, + "kind": "", + "startPos": { + "offset": 235, "line": 9, - "column": 3 + "column": 33 }, - "end": { - "offset": 201, + "fullStart": 235, + "endPos": { + "offset": 249, "line": 9, - "column": 30 + "column": 47 + }, + "fullEnd": 249, + "start": 235, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 235, + "line": 9, + "column": 33 + }, + "endPos": { + "offset": 236, + "line": 9, + "column": 34 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 235, + "end": 236 + }, + "expression": { + "id": 121, + "kind": "", + "startPos": { + "offset": 236, + "line": 9, + "column": 34 + }, + "fullStart": 236, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 236, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 236, + "line": 9, + "column": 34 + }, + "endPos": { + "offset": 237, + "line": 9, + "column": 35 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 236, + "end": 237 + }, + "expression": { + "id": 120, + "kind": "", + "startPos": { + "offset": 237, + "line": 9, + "column": 35 + }, + "fullStart": 237, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 237, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 237, + "line": 9, + "column": 35 + }, + "endPos": { + "offset": 238, + "line": 9, + "column": 36 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 237, + "end": 238 + }, + "expression": { + "id": 119, + "kind": "", + "startPos": { + "offset": 238, + "line": 9, + "column": 36 + }, + "fullStart": 238, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 238, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 238, + "line": 9, + "column": 36 + }, + "endPos": { + "offset": 239, + "line": 9, + "column": 37 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 238, + "end": 239 + }, + "expression": { + "id": 118, + "kind": "", + "startPos": { + "offset": 239, + "line": 9, + "column": 37 + }, + "fullStart": 239, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 239, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 239, + "line": 9, + "column": 37 + }, + "endPos": { + "offset": 240, + "line": 9, + "column": 38 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 239, + "end": 240 + }, + "expression": { + "id": 117, + "kind": "", + "startPos": { + "offset": 240, + "line": 9, + "column": 38 + }, + "fullStart": 240, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 240, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 240, + "line": 9, + "column": 38 + }, + "endPos": { + "offset": 241, + "line": 9, + "column": 39 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 240, + "end": 241 + }, + "expression": { + "id": 116, + "kind": "", + "startPos": { + "offset": 241, + "line": 9, + "column": 39 + }, + "fullStart": 241, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 241, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 241, + "line": 9, + "column": 39 + }, + "endPos": { + "offset": 242, + "line": 9, + "column": 40 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 241, + "end": 242 + }, + "expression": { + "id": 115, + "kind": "", + "startPos": { + "offset": 242, + "line": 9, + "column": 40 + }, + "fullStart": 242, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 242, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 242, + "line": 9, + "column": 40 + }, + "endPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 242, + "end": 243 + }, + "expression": { + "id": 114, + "kind": "", + "startPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "fullStart": 243, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 243, + "end": 249, + "expression": { + "id": 113, + "kind": "", + "startPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "fullStart": 243, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 243, + "end": 249, + "literal": { + "kind": "", + "startPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "value": "7.2225", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 243, + "end": 249 + } + } + } + } + } + } + } + } + } } }, - "inline_refs": [], - "pk": false, - "increment": false, - "unique": false, - "dbdefault": { - "type": "number", - "value": -2 - }, - "checks": [] - }, - { - "name": "id3", - "type": { - "schemaName": null, - "type_name": "int(+-+---+0.1)", - "args": "+-+---+0.1" - }, - "token": { - "start": { - "offset": 204, - "line": 10, - "column": 3 + "colon": { + "kind": "", + "startPos": { + "offset": 233, + "line": 9, + "column": 31 + }, + "endPos": { + "offset": 234, + "line": 9, + "column": 32 }, - "end": { + "value": ":", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 234, + "line": 9, + "column": 32 + }, + "endPos": { + "offset": 235, + "line": 9, + "column": 33 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 234, + "end": 235 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 233, + "end": 234 + } + } + ], + "commaList": [], + "listCloseBracket": { + "kind": "", + "startPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "endPos": { + "offset": 250, + "line": 9, + "column": 48 + }, + "value": "]", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { "offset": 250, - "line": 10, + "line": 9, + "column": 48 + }, + "endPos": { + "offset": 251, + "line": 9, "column": 49 - } + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 250, + "end": 251 }, - "inline_refs": [], - "pk": false, - "increment": false, - "unique": false, - "dbdefault": { - "type": "number", - "value": -7.2225 + { + "kind": "", + "startPos": { + "offset": 251, + "line": 9, + "column": 49 + }, + "endPos": { + "offset": 262, + "line": 9, + "column": 60 + }, + "value": " negative", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 251, + "end": 262 }, - "checks": [] - } - ], - "token": { - "start": { - "offset": 127, - "line": 7, - "column": 1 - }, - "end": { - "offset": 264, - "line": 11, - "column": 2 - } - }, - "indexes": [], - "checks": [] - } - ] -} \ No newline at end of file + { + "kind": "", + "startPos": { + "offset": 262, + "line": 9, + "column": 60 + }, + "endPos": { + "offset": 263, + "line": 10, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 262, + "end": 263 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 249, + "end": 250 + } + }, + "start": 225, + "end": 250, + "name": "CompileError" + } +] \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize.out.json index d63bd9cac..965f25580 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -57,7 +59,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -117,7 +120,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -142,7 +146,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -165,7 +170,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -188,7 +194,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -248,7 +255,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -273,7 +281,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -296,7 +305,8 @@ "type": { "schemaName": null, "type_name": "text", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -336,7 +346,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -359,7 +370,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -382,7 +394,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -614,5 +627,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize_with_top_empty_lines.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize_with_top_empty_lines.out.json index 212cd55b2..b0a17712b 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize_with_top_empty_lines.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/note_normalize_with_top_empty_lines.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -57,7 +59,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -117,7 +120,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -142,7 +146,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -165,7 +170,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -188,7 +194,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -248,7 +255,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -273,7 +281,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -296,7 +305,8 @@ "type": { "schemaName": null, "type_name": "text", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -336,7 +346,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -359,7 +370,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -382,7 +394,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -614,5 +627,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/old_undocumented_syntax.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/old_undocumented_syntax.out.json index daf2c0be8..8168aa2b2 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/old_undocumented_syntax.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/old_undocumented_syntax.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "SMALLINT", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -37,7 +38,8 @@ "type": { "schemaName": null, "type_name": "TINYINT", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -63,7 +65,11 @@ "type": { "schemaName": null, "type_name": "VARCHAR(45)", - "args": "45" + "args": "45", + "lengthParam": { + "length": 45 + }, + "isEnum": false }, "token": { "start": { @@ -89,7 +95,11 @@ "type": { "schemaName": null, "type_name": "VARCHAR(45)", - "args": "45" + "args": "45", + "lengthParam": { + "length": 45 + }, + "isEnum": false }, "token": { "start": { @@ -119,7 +129,11 @@ "type": { "schemaName": null, "type_name": "VARCHAR(50)", - "args": "50" + "args": "50", + "lengthParam": { + "length": 50 + }, + "isEnum": false }, "token": { "start": { @@ -148,7 +162,8 @@ "type": { "schemaName": null, "type_name": "SMALLINT", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -174,7 +189,8 @@ "type": { "schemaName": null, "type_name": "BOOLEAN", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -204,7 +220,8 @@ "type": { "schemaName": null, "type_name": "DATETIME", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -230,7 +247,8 @@ "type": { "schemaName": null, "type_name": "TIMESTAMP", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -281,7 +299,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -306,7 +325,8 @@ "type": { "schemaName": null, "type_name": "e", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -335,7 +355,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -395,7 +416,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -418,7 +440,8 @@ "type": { "schemaName": null, "type_name": "string[]", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -463,7 +486,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -486,7 +510,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -577,5 +602,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/primary_key.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/primary_key.out.json index a7aec078d..054b9345d 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/primary_key.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/primary_key.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -55,5 +56,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/project.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/project.out.json index 7cc45ba13..8dfa8c579 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/project.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/project.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -36,7 +37,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -62,7 +64,8 @@ "type": { "schemaName": null, "type_name": "orders_status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -85,7 +88,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -131,7 +135,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -154,7 +159,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -177,7 +183,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -228,7 +235,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -253,7 +261,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -276,7 +285,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -302,7 +312,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -325,7 +336,8 @@ "type": { "schemaName": null, "type_name": "product status", - "args": null + "args": null, + "isEnum": true }, "token": { "start": { @@ -348,7 +360,8 @@ "type": { "schemaName": null, "type_name": "datetime", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -486,7 +499,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -511,7 +525,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -534,7 +549,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -559,7 +575,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -582,7 +599,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -605,7 +623,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -628,7 +647,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -673,7 +693,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -698,7 +719,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -721,7 +743,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -744,7 +767,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -767,7 +791,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -812,7 +837,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -837,7 +863,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -860,7 +887,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -1466,5 +1494,6 @@ }, "database_type": "PostgreSQL" }, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_basic.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_basic.out.json new file mode 100644 index 000000000..4a11ea82f --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_basic.out.json @@ -0,0 +1,199 @@ +{ + "schemas": [], + "tables": [ + { + "name": "users", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 16, + "line": 2, + "column": 3 + }, + "end": { + "offset": 31, + "line": 2, + "column": 18 + } + }, + "inline_refs": [], + "pk": true, + "increment": false, + "unique": false, + "checks": [] + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 34, + "line": 3, + "column": 3 + }, + "end": { + "offset": 46, + "line": 3, + "column": 15 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "email", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 49, + "line": 4, + "column": 3 + }, + "end": { + "offset": 62, + "line": 4, + "column": 16 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "age", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 65, + "line": 5, + "column": 3 + }, + "end": { + "offset": 76, + "line": 5, + "column": 14 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { + "offset": 0, + "line": 1, + "column": 1 + }, + "end": { + "offset": 78, + "line": 6, + "column": 2 + } + }, + "indexes": [], + "partials": [], + "checks": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "tablePartials": [], + "records": [ + { + "tableName": "users", + "columns": [ + "id", + "name", + "email", + "age" + ], + "values": [ + [ + { + "value": 1, + "type": "integer" + }, + { + "value": "John Doe", + "type": "string" + }, + { + "value": "john@example.com", + "type": "string" + }, + { + "value": 30, + "type": "integer" + } + ], + [ + { + "value": 2, + "type": "integer" + }, + { + "value": "Jane Smith", + "type": "string" + }, + { + "value": "jane@example.com", + "type": "string" + }, + { + "value": 25, + "type": "integer" + } + ], + [ + { + "value": 3, + "type": "integer" + }, + { + "value": "Bob Johnson", + "type": "string" + }, + { + "value": "bob@example.com", + "type": "string" + }, + { + "value": 35, + "type": "integer" + } + ] + ] + } + ] +} \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_inside_table.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_inside_table.out.json new file mode 100644 index 000000000..6c91e80c8 --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_inside_table.out.json @@ -0,0 +1,162 @@ +{ + "schemas": [], + "tables": [ + { + "name": "products", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 19, + "line": 2, + "column": 3 + }, + "end": { + "offset": 34, + "line": 2, + "column": 18 + } + }, + "inline_refs": [], + "pk": true, + "increment": false, + "unique": false, + "checks": [] + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 37, + "line": 3, + "column": 3 + }, + "end": { + "offset": 49, + "line": 3, + "column": 15 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "price", + "type": { + "schemaName": null, + "type_name": "decimal", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 52, + "line": 4, + "column": 3 + }, + "end": { + "offset": 65, + "line": 4, + "column": 16 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { + "offset": 0, + "line": 1, + "column": 1 + }, + "end": { + "offset": 155, + "line": 11, + "column": 2 + } + }, + "indexes": [], + "partials": [], + "checks": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "tablePartials": [], + "records": [ + { + "tableName": "products", + "columns": [ + "id", + "name", + "price" + ], + "values": [ + [ + { + "value": 1, + "type": "integer" + }, + { + "value": "Laptop", + "type": "string" + }, + { + "value": 999.99, + "type": "real" + } + ], + [ + { + "value": 2, + "type": "integer" + }, + { + "value": "Mouse", + "type": "string" + }, + { + "value": 29.99, + "type": "real" + } + ], + [ + { + "value": 3, + "type": "integer" + }, + { + "value": "Keyboard", + "type": "string" + }, + { + "value": 79.99, + "type": "real" + } + ] + ] + } + ] +} \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_inside_table_with_columns.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_inside_table_with_columns.out.json new file mode 100644 index 000000000..1cfc93be2 --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_inside_table_with_columns.out.json @@ -0,0 +1,247 @@ +{ + "schemas": [], + "tables": [ + { + "name": "employees", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 20, + "line": 2, + "column": 3 + }, + "end": { + "offset": 35, + "line": 2, + "column": 18 + } + }, + "inline_refs": [], + "pk": true, + "increment": false, + "unique": false, + "checks": [] + }, + { + "name": "first_name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 38, + "line": 3, + "column": 3 + }, + "end": { + "offset": 56, + "line": 3, + "column": 21 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "last_name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 59, + "line": 4, + "column": 3 + }, + "end": { + "offset": 76, + "line": 4, + "column": 20 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "department", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 79, + "line": 5, + "column": 3 + }, + "end": { + "offset": 97, + "line": 5, + "column": 21 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "salary", + "type": { + "schemaName": null, + "type_name": "decimal", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 100, + "line": 6, + "column": 3 + }, + "end": { + "offset": 114, + "line": 6, + "column": 17 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "hire_date", + "type": { + "schemaName": null, + "type_name": "date", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 117, + "line": 7, + "column": 3 + }, + "end": { + "offset": 131, + "line": 7, + "column": 17 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { + "offset": 0, + "line": 1, + "column": 1 + }, + "end": { + "offset": 305, + "line": 14, + "column": 2 + } + }, + "indexes": [], + "partials": [], + "checks": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "tablePartials": [], + "records": [ + { + "tableName": "employees", + "columns": [ + "id", + "first_name", + "last_name", + "department" + ], + "values": [ + [ + { + "value": 1, + "type": "integer" + }, + { + "value": "Alice", + "type": "string" + }, + { + "value": "Anderson", + "type": "string" + }, + { + "value": "Engineering", + "type": "string" + } + ], + [ + { + "value": 2, + "type": "integer" + }, + { + "value": "Bob", + "type": "string" + }, + { + "value": "Brown", + "type": "string" + }, + { + "value": "Marketing", + "type": "string" + } + ], + [ + { + "value": 3, + "type": "integer" + }, + { + "value": "Carol", + "type": "string" + }, + { + "value": "Chen", + "type": "string" + }, + { + "value": "Engineering", + "type": "string" + } + ] + ] + } + ] +} \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_with_nulls.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_with_nulls.out.json new file mode 100644 index 000000000..31fbb0673 --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_with_nulls.out.json @@ -0,0 +1,210 @@ +{ + "schemas": [], + "tables": [ + { + "name": "users", + "schemaName": null, + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 16, + "line": 2, + "column": 3 + }, + "end": { + "offset": 31, + "line": 2, + "column": 18 + } + }, + "inline_refs": [], + "pk": true, + "increment": false, + "unique": false, + "checks": [] + }, + { + "name": "name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 34, + "line": 3, + "column": 3 + }, + "end": { + "offset": 46, + "line": 3, + "column": 15 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "email", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 49, + "line": 4, + "column": 3 + }, + "end": { + "offset": 62, + "line": 4, + "column": 16 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "age", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 65, + "line": 5, + "column": 3 + }, + "end": { + "offset": 76, + "line": 5, + "column": 14 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "created_at", + "type": { + "schemaName": null, + "type_name": "timestamp", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 79, + "line": 6, + "column": 3 + }, + "end": { + "offset": 99, + "line": 6, + "column": 23 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { + "offset": 0, + "line": 1, + "column": 1 + }, + "end": { + "offset": 101, + "line": 7, + "column": 2 + } + }, + "indexes": [], + "partials": [], + "checks": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "tablePartials": [], + "records": [ + { + "tableName": "users", + "columns": [ + "id", + "name", + "email" + ], + "values": [ + [ + { + "value": 1, + "type": "integer" + }, + { + "value": "Alice", + "type": "string" + }, + { + "value": null, + "type": "string" + } + ], + [ + { + "value": 2, + "type": "integer" + }, + { + "value": null, + "type": "string" + }, + { + "value": null, + "type": "string" + } + ], + [ + { + "value": 3, + "type": "integer" + }, + { + "value": "Charlie", + "type": "string" + }, + { + "value": "charlie@example.com", + "type": "string" + } + ] + ] + } + ] +} \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_with_schema.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_with_schema.out.json new file mode 100644 index 000000000..43e41f41d --- /dev/null +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/records_with_schema.out.json @@ -0,0 +1,174 @@ +{ + "schemas": [], + "tables": [ + { + "name": "orders", + "schemaName": "public", + "alias": null, + "fields": [ + { + "name": "id", + "type": { + "schemaName": null, + "type_name": "integer", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 24, + "line": 2, + "column": 3 + }, + "end": { + "offset": 39, + "line": 2, + "column": 18 + } + }, + "inline_refs": [], + "pk": true, + "increment": false, + "unique": false, + "checks": [] + }, + { + "name": "customer_name", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 42, + "line": 3, + "column": 3 + }, + "end": { + "offset": 63, + "line": 3, + "column": 24 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "total", + "type": { + "schemaName": null, + "type_name": "decimal", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 66, + "line": 4, + "column": 3 + }, + "end": { + "offset": 79, + "line": 4, + "column": 16 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + }, + { + "name": "status", + "type": { + "schemaName": null, + "type_name": "varchar", + "args": null, + "isEnum": false + }, + "token": { + "start": { + "offset": 82, + "line": 5, + "column": 3 + }, + "end": { + "offset": 96, + "line": 5, + "column": 17 + } + }, + "inline_refs": [], + "pk": false, + "unique": false + } + ], + "token": { + "start": { + "offset": 0, + "line": 1, + "column": 1 + }, + "end": { + "offset": 98, + "line": 6, + "column": 2 + } + }, + "indexes": [], + "partials": [], + "checks": [] + } + ], + "notes": [], + "refs": [], + "enums": [], + "tableGroups": [], + "aliases": [], + "project": {}, + "tablePartials": [], + "records": [ + { + "schemaName": "public", + "tableName": "orders", + "columns": [ + "id", + "customer_name" + ], + "values": [ + [ + { + "value": 1, + "type": "integer" + }, + { + "value": "John Doe", + "type": "string" + } + ], + [ + { + "value": 2, + "type": "integer" + }, + { + "value": "Jane Smith", + "type": "string" + } + ], + [ + { + "value": 3, + "type": "integer" + }, + { + "value": "Bob Wilson", + "type": "string" + } + ] + ] + } + ] +} \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_name_and_color_setting.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_name_and_color_setting.out.json index 04f70dd59..0eba7b114 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_name_and_color_setting.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_name_and_color_setting.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -80,7 +82,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -103,7 +106,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -264,5 +268,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_settings.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_settings.out.json index 3420b2e95..2547945c5 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_settings.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/ref_settings.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "number", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -79,7 +81,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -102,7 +105,8 @@ "type": { "schemaName": null, "type_name": "number", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -265,5 +269,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/referential_actions.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/referential_actions.out.json index 7603e3c49..69e7a7ff0 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/referential_actions.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/referential_actions.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -37,7 +38,8 @@ "type": { "schemaName": null, "type_name": "orders_status_enum", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -60,7 +62,11 @@ "type": { "schemaName": null, "type_name": "varchar(255)", - "args": "255" + "args": "255", + "lengthParam": { + "length": 255 + }, + "isEnum": false }, "token": { "start": { @@ -122,7 +128,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -145,7 +152,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -168,7 +176,11 @@ "type": { "schemaName": null, "type_name": "varchar(255)", - "args": "255" + "args": "255", + "lengthParam": { + "length": 255 + }, + "isEnum": false }, "token": { "start": { @@ -191,7 +203,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -242,7 +255,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -265,7 +279,12 @@ "type": { "schemaName": null, "type_name": "decimal(10,4)", - "args": "10,4" + "args": "10,4", + "numericParams": { + "precision": 10, + "scale": 4 + }, + "isEnum": false }, "token": { "start": { @@ -288,7 +307,8 @@ "type": { "schemaName": null, "type_name": "datetime", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -407,7 +427,11 @@ "type": { "schemaName": null, "type_name": "varchar(255)", - "args": "255" + "args": "255", + "lengthParam": { + "length": 255 + }, + "isEnum": false }, "token": { "start": { @@ -432,7 +456,8 @@ "type": { "schemaName": null, "type_name": "datetime", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -455,7 +480,8 @@ "type": { "schemaName": null, "type_name": "datetime", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -484,7 +510,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -565,7 +592,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -590,7 +618,11 @@ "type": { "schemaName": null, "type_name": "varchar(255)", - "args": "255" + "args": "255", + "lengthParam": { + "length": 255 + }, + "isEnum": false }, "token": { "start": { @@ -897,7 +929,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -941,7 +974,11 @@ "type": { "schemaName": null, "type_name": "varchar(255)", - "args": "255" + "args": "255", + "lengthParam": { + "length": 255 + }, + "isEnum": false }, "token": { "start": { @@ -975,5 +1012,6 @@ "indexes": [], "checks": [] } - ] + ], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/sticky_notes.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/sticky_notes.out.json index e526d6a67..5836be7a5 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/sticky_notes.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/sticky_notes.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "integer", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -36,7 +37,11 @@ "type": { "schemaName": null, "type_name": "varchar(255)", - "args": "255" + "args": "255", + "lengthParam": { + "length": 255 + }, + "isEnum": false }, "token": { "start": { @@ -115,5 +120,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group.out.json index 25c961a60..aa34b98af 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -34,7 +35,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -57,7 +59,8 @@ "type": { "schemaName": null, "type_name": "timestamp", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -80,7 +83,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -125,7 +129,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -148,7 +153,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -171,7 +177,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -194,7 +201,8 @@ "type": { "schemaName": null, "type_name": "varchar", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -217,7 +225,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -377,5 +386,6 @@ } ], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_element.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_element.out.json index 3cdcc3068..01748de31 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_element.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_element.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -58,7 +59,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -208,5 +210,6 @@ ], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_settings.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_settings.out.json index 05ffbc988..490e3a221 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_settings.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_group_settings.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -94,5 +95,6 @@ ], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_partial.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_partial.out.json index f6519ca91..99e0e907c 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_partial.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_partial.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -155,7 +156,11 @@ "type": { "schemaName": null, "type_name": "char(255)", - "args": "255" + "args": "255", + "lengthParam": { + "length": 255 + }, + "isEnum": false }, "token": { "start": { @@ -251,7 +256,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -274,7 +280,8 @@ "type": { "schemaName": null, "type_name": "decimal", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -371,7 +378,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -394,7 +402,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -417,7 +426,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -511,7 +521,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -790,7 +801,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -815,7 +827,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -979,7 +992,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -1013,5 +1027,6 @@ "indexes": [], "checks": [] } - ] + ], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_settings.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_settings.out.json index 04b8eb22e..de73b46f0 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_settings.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/table_settings.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -36,7 +37,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -82,7 +84,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -107,7 +110,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -170,7 +174,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -195,7 +200,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -218,7 +224,8 @@ "type": { "schemaName": null, "type_name": "decimal", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -282,7 +289,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -307,7 +315,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -330,7 +339,8 @@ "type": { "schemaName": null, "type_name": "int", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -353,7 +363,8 @@ "type": { "schemaName": null, "type_name": "string", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -528,5 +539,6 @@ "tableGroups": [], "aliases": [], "project": {}, - "tablePartials": [] + "tablePartials": [], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/interpreter/output/tablepartial_causing_circular_ref.out.json b/packages/dbml-parse/__tests__/snapshots/interpreter/output/tablepartial_causing_circular_ref.out.json index 4806c3203..7e2a31ad4 100644 --- a/packages/dbml-parse/__tests__/snapshots/interpreter/output/tablepartial_causing_circular_ref.out.json +++ b/packages/dbml-parse/__tests__/snapshots/interpreter/output/tablepartial_causing_circular_ref.out.json @@ -11,7 +11,8 @@ "type": { "schemaName": null, "type_name": "type", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -161,7 +162,8 @@ "type": { "schemaName": null, "type_name": "type", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -207,7 +209,8 @@ "type": { "schemaName": null, "type_name": "type", - "args": null + "args": null, + "isEnum": false }, "token": { "start": { @@ -264,5 +267,6 @@ "indexes": [], "checks": [] } - ] + ], + "records": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/color.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/color.out.json index c65c32d3c..7cffd026a 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/color.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/color.out.json @@ -107,5 +107,6 @@ "end": 15 } ], - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/comment.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/comment.out.json index 8dadefed7..52c3c67d5 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/comment.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/comment.out.json @@ -422,5 +422,6 @@ "end": 150 } ], - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/function_expression.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/function_expression.out.json index b45cabd9d..0fc522dd3 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/function_expression.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/function_expression.out.json @@ -288,5 +288,6 @@ "end": 84, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers.out.json index 61eed117e..2010803f2 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers.out.json @@ -258,5 +258,6 @@ "end": 39 } ], - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers_starting_with_digits.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers_starting_with_digits.out.json index 11f94a72b..8e9095beb 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers_starting_with_digits.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/identifiers_starting_with_digits.out.json @@ -980,5 +980,6 @@ "end": 167, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/invalid_escape_sequence.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/invalid_escape_sequence.out.json index c328fd657..e45ed8a85 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/invalid_escape_sequence.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/invalid_escape_sequence.out.json @@ -399,5 +399,6 @@ "end": 35, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/number.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/number.out.json index 2aac84464..e4a83d662 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/number.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/number.out.json @@ -645,5 +645,6 @@ "end": 79, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/strings.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/strings.out.json index 7f090ba89..1b35f6204 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/strings.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/strings.out.json @@ -695,5 +695,6 @@ "end": 312, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/symbols.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/symbols.out.json index f96c9481c..f8150ad8d 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/symbols.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/symbols.out.json @@ -1009,5 +1009,6 @@ "end": 59 } ], - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/unclosed_strings.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/unclosed_strings.out.json index 746f02cea..3604b2d05 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/unclosed_strings.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/unclosed_strings.out.json @@ -347,5 +347,6 @@ "end": 104, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/unicode_identifiers.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/unicode_identifiers.out.json index a5ff199eb..a28b57b08 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/unicode_identifiers.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/unicode_identifiers.out.json @@ -101340,5 +101340,6 @@ "end": 5951 } ], - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/lexer/output/valid_escape_sequence.out.json b/packages/dbml-parse/__tests__/snapshots/lexer/output/valid_escape_sequence.out.json index 7a9abda5d..a109c5cde 100644 --- a/packages/dbml-parse/__tests__/snapshots/lexer/output/valid_escape_sequence.out.json +++ b/packages/dbml-parse/__tests__/snapshots/lexer/output/valid_escape_sequence.out.json @@ -560,5 +560,6 @@ "end": 251 } ], - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/nan/nan.test.ts b/packages/dbml-parse/__tests__/snapshots/nan/nan.test.ts index cfdf0d50c..0fcd35dd6 100644 --- a/packages/dbml-parse/__tests__/snapshots/nan/nan.test.ts +++ b/packages/dbml-parse/__tests__/snapshots/nan/nan.test.ts @@ -33,7 +33,7 @@ describe('[snapshot] interpreter (NaN cases)', () => { 2, ); } else { - const res = new Interpreter(report.getValue()).interpret(); + const res = new Interpreter(report.getValue(), program).interpret(); if (res.getErrors().length > 0) { output = JSON.stringify( res.getErrors(), diff --git a/packages/dbml-parse/__tests__/snapshots/parser/input/expression.in.dbml b/packages/dbml-parse/__tests__/snapshots/parser/input/expression.in.dbml index 31d9388eb..5e7d8b5f9 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/input/expression.in.dbml +++ b/packages/dbml-parse/__tests__/snapshots/parser/input/expression.in.dbml @@ -43,8 +43,8 @@ Test Expression { b = 1 == 1 - a != b + c () + a != b + c() +++----++-1 ---++---+1 -} \ No newline at end of file +} diff --git a/packages/dbml-parse/__tests__/snapshots/parser/input/function_application.in.dbml b/packages/dbml-parse/__tests__/snapshots/parser/input/function_application.in.dbml index d66f03c0b..a97aa1594 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/input/function_application.in.dbml +++ b/packages/dbml-parse/__tests__/snapshots/parser/input/function_application.in.dbml @@ -1,4 +1,4 @@ Test FunctionApplication { id integer [primary key] - name char (255) [unique] + name char(255) [unique] } diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/call_expression.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/call_expression.out.json index 733aba9a2..819804989 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/call_expression.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/call_expression.out.json @@ -1517,5 +1517,6 @@ "end": 31, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/element-declaration.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/element-declaration.out.json index d68553ba9..980d55f16 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/element-declaration.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/element-declaration.out.json @@ -1,1608 +1,1609 @@ -{ - "value": { - "id": 22, - "kind": "", - "startPos": { - "offset": 0, - "line": 0, - "column": 0 - }, - "fullStart": 0, - "endPos": { - "offset": 148, - "line": 19, - "column": 1 - }, - "fullEnd": 148, - "start": 0, - "end": 148, - "body": [ - { - "id": 1, - "kind": "", - "startPos": { - "offset": 0, - "line": 0, - "column": 0 - }, - "fullStart": 0, - "endPos": { - "offset": 12, - "line": 2, - "column": 1 - }, - "fullEnd": 14, - "start": 0, - "end": 12, - "type": { - "kind": "", - "startPos": { - "offset": 0, - "line": 0, - "column": 0 - }, - "endPos": { - "offset": 5, - "line": 0, - "column": 5 - }, - "value": "Table", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 5, - "line": 0, - "column": 5 - }, - "endPos": { - "offset": 6, - "line": 0, - "column": 6 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 5, - "end": 6 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 0, - "end": 5 - }, - "body": { - "id": 0, - "kind": "", - "startPos": { - "offset": 6, - "line": 0, - "column": 6 - }, - "fullStart": 6, - "endPos": { - "offset": 12, - "line": 2, - "column": 1 - }, - "fullEnd": 14, - "start": 6, - "end": 12, - "blockOpenBrace": { - "kind": "", - "startPos": { - "offset": 6, - "line": 0, - "column": 6 - }, - "endPos": { - "offset": 7, - "line": 0, - "column": 7 - }, - "value": "{", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 8, - "line": 0, - "column": 8 - }, - "endPos": { - "offset": 9, - "line": 1, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 8, - "end": 9 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 6, - "end": 7 - }, - "body": [], - "blockCloseBrace": { - "kind": "", - "startPos": { - "offset": 11, - "line": 2, - "column": 0 - }, - "endPos": { - "offset": 12, - "line": 2, - "column": 1 - }, - "value": "}", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 10, - "line": 1, - "column": 1 - }, - "endPos": { - "offset": 11, - "line": 2, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 10, - "end": 11 - } - ], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 13, - "line": 2, - "column": 2 - }, - "endPos": { - "offset": 14, - "line": 3, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 13, - "end": 14 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 11, - "end": 12 - } - } - }, - { - "id": 5, - "kind": "", - "startPos": { - "offset": 16, - "line": 4, - "column": 0 - }, - "fullStart": 15, - "endPos": { - "offset": 39, - "line": 6, - "column": 1 - }, - "fullEnd": 41, - "start": 16, - "end": 39, - "type": { - "kind": "", - "startPos": { - "offset": 16, - "line": 4, - "column": 0 - }, - "endPos": { - "offset": 26, - "line": 4, - "column": 10 - }, - "value": "TableGroup", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 15, - "line": 3, - "column": 1 - }, - "endPos": { - "offset": 16, - "line": 4, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 15, - "end": 16 - } - ], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 26, - "line": 4, - "column": 10 - }, - "endPos": { - "offset": 27, - "line": 4, - "column": 11 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 26, - "end": 27 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 16, - "end": 26 - }, - "name": { - "id": 3, - "kind": "", - "startPos": { - "offset": 27, - "line": 4, - "column": 11 - }, - "fullStart": 27, - "endPos": { - "offset": 32, - "line": 4, - "column": 16 - }, - "fullEnd": 33, - "start": 27, - "end": 32, - "expression": { - "id": 2, - "kind": "", - "startPos": { - "offset": 27, - "line": 4, - "column": 11 - }, - "fullStart": 27, - "endPos": { - "offset": 32, - "line": 4, - "column": 16 - }, - "fullEnd": 33, - "start": 27, - "end": 32, - "variable": { - "kind": "", - "startPos": { - "offset": 27, - "line": 4, - "column": 11 - }, - "endPos": { - "offset": 32, - "line": 4, - "column": 16 - }, - "value": "group", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 32, - "line": 4, - "column": 16 - }, - "endPos": { - "offset": 33, - "line": 4, - "column": 17 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 32, - "end": 33 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 27, - "end": 32 - } - } - }, - "body": { - "id": 4, - "kind": "", - "startPos": { - "offset": 33, - "line": 4, - "column": 17 - }, - "fullStart": 33, - "endPos": { - "offset": 39, - "line": 6, - "column": 1 - }, - "fullEnd": 41, - "start": 33, - "end": 39, - "blockOpenBrace": { - "kind": "", - "startPos": { - "offset": 33, - "line": 4, - "column": 17 - }, - "endPos": { - "offset": 34, - "line": 4, - "column": 18 - }, - "value": "{", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 35, - "line": 4, - "column": 19 - }, - "endPos": { - "offset": 36, - "line": 5, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 35, - "end": 36 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 33, - "end": 34 - }, - "body": [], - "blockCloseBrace": { - "kind": "", - "startPos": { - "offset": 38, - "line": 6, - "column": 0 - }, - "endPos": { - "offset": 39, - "line": 6, - "column": 1 - }, - "value": "}", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 37, - "line": 5, - "column": 1 - }, - "endPos": { - "offset": 38, - "line": 6, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 37, - "end": 38 - } - ], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 40, - "line": 6, - "column": 2 - }, - "endPos": { - "offset": 41, - "line": 7, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 40, - "end": 41 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 38, - "end": 39 - } - } - }, - { - "id": 7, - "kind": "", - "startPos": { - "offset": 43, - "line": 8, - "column": 0 - }, - "fullStart": 42, - "endPos": { - "offset": 53, - "line": 10, - "column": 1 - }, - "fullEnd": 55, - "start": 43, - "end": 53, - "type": { - "kind": "", - "startPos": { - "offset": 43, - "line": 8, - "column": 0 - }, - "endPos": { - "offset": 46, - "line": 8, - "column": 3 - }, - "value": "Ref", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 42, - "line": 7, - "column": 1 - }, - "endPos": { - "offset": 43, - "line": 8, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 42, - "end": 43 - } - ], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 46, - "line": 8, - "column": 3 - }, - "endPos": { - "offset": 47, - "line": 8, - "column": 4 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 46, - "end": 47 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 43, - "end": 46 - }, - "body": { - "id": 6, - "kind": "", - "startPos": { - "offset": 47, - "line": 8, - "column": 4 - }, - "fullStart": 47, - "endPos": { - "offset": 53, - "line": 10, - "column": 1 - }, - "fullEnd": 55, - "start": 47, - "end": 53, - "blockOpenBrace": { - "kind": "", - "startPos": { - "offset": 47, - "line": 8, - "column": 4 - }, - "endPos": { - "offset": 48, - "line": 8, - "column": 5 - }, - "value": "{", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 49, - "line": 8, - "column": 6 - }, - "endPos": { - "offset": 50, - "line": 9, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 49, - "end": 50 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 47, - "end": 48 - }, - "body": [], - "blockCloseBrace": { - "kind": "", - "startPos": { - "offset": 52, - "line": 10, - "column": 0 - }, - "endPos": { - "offset": 53, - "line": 10, - "column": 1 - }, - "value": "}", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 51, - "line": 9, - "column": 1 - }, - "endPos": { - "offset": 52, - "line": 10, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 51, - "end": 52 - } - ], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 54, - "line": 10, - "column": 2 - }, - "endPos": { - "offset": 55, - "line": 11, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 54, - "end": 55 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 52, - "end": 53 - } - } - }, - { - "id": 11, - "kind": "", - "startPos": { - "offset": 57, - "line": 12, - "column": 0 - }, - "fullStart": 56, - "endPos": { - "offset": 79, - "line": 12, - "column": 22 - }, - "fullEnd": 81, - "start": 57, - "end": 79, - "type": { - "kind": "", - "startPos": { - "offset": 57, - "line": 12, - "column": 0 - }, - "endPos": { - "offset": 61, - "line": 12, - "column": 4 - }, - "value": "Note", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 56, - "line": 11, - "column": 1 - }, - "endPos": { - "offset": 57, - "line": 12, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 56, - "end": 57 - } - ], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 57, - "end": 61 - }, - "bodyColon": { - "kind": "", - "startPos": { - "offset": 61, - "line": 12, - "column": 4 - }, - "endPos": { - "offset": 62, - "line": 12, - "column": 5 - }, - "value": ":", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 62, - "line": 12, - "column": 5 - }, - "endPos": { - "offset": 63, - "line": 12, - "column": 6 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 62, - "end": 63 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 61, - "end": 62 - }, - "body": { - "id": 10, - "kind": "", - "startPos": { - "offset": 63, - "line": 12, - "column": 6 - }, - "fullStart": 63, - "endPos": { - "offset": 79, - "line": 12, - "column": 22 - }, - "fullEnd": 81, - "start": 63, - "end": 79, - "callee": { - "id": 9, - "kind": "", - "startPos": { - "offset": 63, - "line": 12, - "column": 6 - }, - "fullStart": 63, - "endPos": { - "offset": 79, - "line": 12, - "column": 22 - }, - "fullEnd": 81, - "start": 63, - "end": 79, - "expression": { - "id": 8, - "kind": "", - "startPos": { - "offset": 63, - "line": 12, - "column": 6 - }, - "fullStart": 63, - "endPos": { - "offset": 79, - "line": 12, - "column": 22 - }, - "fullEnd": 81, - "start": 63, - "end": 79, - "literal": { - "kind": "", - "startPos": { - "offset": 63, - "line": 12, - "column": 6 - }, - "endPos": { - "offset": 79, - "line": 12, - "column": 22 - }, - "value": "This is a note", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 80, - "line": 12, - "column": 23 - }, - "endPos": { - "offset": 81, - "line": 13, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 80, - "end": 81 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 63, - "end": 79 - } - } - }, - "args": [] - } - }, - { - "id": 15, - "kind": "", - "startPos": { - "offset": 83, - "line": 14, - "column": 0 - }, - "fullStart": 82, - "endPos": { - "offset": 117, - "line": 15, - "column": 15 - }, - "fullEnd": 119, - "start": 83, - "end": 117, - "type": { - "kind": "", - "startPos": { - "offset": 83, - "line": 14, - "column": 0 - }, - "endPos": { - "offset": 87, - "line": 14, - "column": 4 - }, - "value": "Note", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 82, - "line": 13, - "column": 1 - }, - "endPos": { - "offset": 83, - "line": 14, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 82, - "end": 83 - } - ], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 83, - "end": 87 - }, - "bodyColon": { - "kind": "", - "startPos": { - "offset": 87, - "line": 14, - "column": 4 - }, - "endPos": { - "offset": 88, - "line": 14, - "column": 5 - }, - "value": ":", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 88, - "line": 14, - "column": 5 - }, - "endPos": { - "offset": 89, - "line": 14, - "column": 6 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 88, - "end": 89 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 87, - "end": 88 - }, - "body": { - "id": 14, - "kind": "", - "startPos": { - "offset": 89, - "line": 14, - "column": 6 - }, - "fullStart": 89, - "endPos": { - "offset": 117, - "line": 15, - "column": 15 - }, - "fullEnd": 119, - "start": 89, - "end": 117, - "callee": { - "id": 13, - "kind": "", - "startPos": { - "offset": 89, - "line": 14, - "column": 6 - }, - "fullStart": 89, - "endPos": { - "offset": 117, - "line": 15, - "column": 15 - }, - "fullEnd": 119, - "start": 89, - "end": 117, - "expression": { - "id": 12, - "kind": "", - "startPos": { - "offset": 89, - "line": 14, - "column": 6 - }, - "fullStart": 89, - "endPos": { - "offset": 117, - "line": 15, - "column": 15 - }, - "fullEnd": 119, - "start": 89, - "end": 117, - "literal": { - "kind": "", - "startPos": { - "offset": 89, - "line": 14, - "column": 6 - }, - "endPos": { - "offset": 117, - "line": 15, - "column": 15 - }, - "value": "This is \r\nanother note", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 118, - "line": 15, - "column": 16 - }, - "endPos": { - "offset": 119, - "line": 16, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 118, - "end": 119 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 89, - "end": 117 - } - } - }, - "args": [] - } - }, - { - "id": 21, - "kind": "", - "startPos": { - "offset": 121, - "line": 17, - "column": 0 - }, - "fullStart": 120, - "endPos": { - "offset": 148, - "line": 19, - "column": 1 - }, - "fullEnd": 148, - "start": 121, - "end": 148, - "type": { - "kind": "", - "startPos": { - "offset": 121, - "line": 17, - "column": 0 - }, - "endPos": { - "offset": 126, - "line": 17, - "column": 5 - }, - "value": "Table", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 120, - "line": 16, - "column": 1 - }, - "endPos": { - "offset": 121, - "line": 17, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 120, - "end": 121 - } - ], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 126, - "line": 17, - "column": 5 - }, - "endPos": { - "offset": 127, - "line": 17, - "column": 6 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 126, - "end": 127 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 121, - "end": 126 - }, - "name": { - "id": 17, - "kind": "", - "startPos": { - "offset": 127, - "line": 17, - "column": 6 - }, - "fullStart": 127, - "endPos": { - "offset": 132, - "line": 17, - "column": 11 - }, - "fullEnd": 133, - "start": 127, - "end": 132, - "expression": { - "id": 16, - "kind": "", - "startPos": { - "offset": 127, - "line": 17, - "column": 6 - }, - "fullStart": 127, - "endPos": { - "offset": 132, - "line": 17, - "column": 11 - }, - "fullEnd": 133, - "start": 127, - "end": 132, - "variable": { - "kind": "", - "startPos": { - "offset": 127, - "line": 17, - "column": 6 - }, - "endPos": { - "offset": 132, - "line": 17, - "column": 11 - }, - "value": "Users", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 132, - "line": 17, - "column": 11 - }, - "endPos": { - "offset": 133, - "line": 17, - "column": 12 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 132, - "end": 133 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 127, - "end": 132 - } - } - }, - "as": { - "kind": "", - "startPos": { - "offset": 133, - "line": 17, - "column": 12 - }, - "endPos": { - "offset": 135, - "line": 17, - "column": 14 - }, - "value": "as", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 135, - "line": 17, - "column": 14 - }, - "endPos": { - "offset": 136, - "line": 17, - "column": 15 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 135, - "end": 136 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 133, - "end": 135 - }, - "alias": { - "id": 19, - "kind": "", - "startPos": { - "offset": 136, - "line": 17, - "column": 15 - }, - "fullStart": 136, - "endPos": { - "offset": 137, - "line": 17, - "column": 16 - }, - "fullEnd": 138, - "start": 136, - "end": 137, - "expression": { - "id": 18, - "kind": "", - "startPos": { - "offset": 136, - "line": 17, - "column": 15 - }, - "fullStart": 136, - "endPos": { - "offset": 137, - "line": 17, - "column": 16 - }, - "fullEnd": 138, - "start": 136, - "end": 137, - "variable": { - "kind": "", - "startPos": { - "offset": 136, - "line": 17, - "column": 15 - }, - "endPos": { - "offset": 137, - "line": 17, - "column": 16 - }, - "value": "U", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 137, - "line": 17, - "column": 16 - }, - "endPos": { - "offset": 138, - "line": 17, - "column": 17 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 137, - "end": 138 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 136, - "end": 137 - } - } - }, - "body": { - "id": 20, - "kind": "", - "startPos": { - "offset": 138, - "line": 17, - "column": 17 - }, - "fullStart": 138, - "endPos": { - "offset": 148, - "line": 19, - "column": 1 - }, - "fullEnd": 148, - "start": 138, - "end": 148, - "blockOpenBrace": { - "kind": "", - "startPos": { - "offset": 138, - "line": 17, - "column": 17 - }, - "endPos": { - "offset": 139, - "line": 17, - "column": 18 - }, - "value": "{", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 140, - "line": 17, - "column": 19 - }, - "endPos": { - "offset": 141, - "line": 18, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 140, - "end": 141 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 138, - "end": 139 - }, - "body": [], - "blockCloseBrace": { - "kind": "", - "startPos": { - "offset": 147, - "line": 19, - "column": 0 - }, - "endPos": { - "offset": 148, - "line": 19, - "column": 1 - }, - "value": "}", - "leadingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 141, - "line": 18, - "column": 0 - }, - "endPos": { - "offset": 142, - "line": 18, - "column": 1 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 141, - "end": 142 - }, - { - "kind": "", - "startPos": { - "offset": 142, - "line": 18, - "column": 1 - }, - "endPos": { - "offset": 143, - "line": 18, - "column": 2 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 142, - "end": 143 - }, - { - "kind": "", - "startPos": { - "offset": 143, - "line": 18, - "column": 2 - }, - "endPos": { - "offset": 144, - "line": 18, - "column": 3 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 143, - "end": 144 - }, - { - "kind": "", - "startPos": { - "offset": 144, - "line": 18, - "column": 3 - }, - "endPos": { - "offset": 145, - "line": 18, - "column": 4 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 144, - "end": 145 - }, - { - "kind": "", - "startPos": { - "offset": 146, - "line": 18, - "column": 5 - }, - "endPos": { - "offset": 147, - "line": 19, - "column": 0 - }, - "value": "\n", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 146, - "end": 147 - } - ], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 147, - "end": 148 - } - } - } - ], - "eof": { - "kind": "", - "startPos": { - "offset": 148, - "line": 19, - "column": 1 - }, - "endPos": { - "offset": 148, - "line": 19, - "column": 1 - }, - "value": "", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 148, - "end": 148 - } - }, - "errors": [] +{ + "value": { + "id": 22, + "kind": "", + "startPos": { + "offset": 0, + "line": 0, + "column": 0 + }, + "fullStart": 0, + "endPos": { + "offset": 148, + "line": 19, + "column": 1 + }, + "fullEnd": 148, + "start": 0, + "end": 148, + "body": [ + { + "id": 1, + "kind": "", + "startPos": { + "offset": 0, + "line": 0, + "column": 0 + }, + "fullStart": 0, + "endPos": { + "offset": 12, + "line": 2, + "column": 1 + }, + "fullEnd": 14, + "start": 0, + "end": 12, + "type": { + "kind": "", + "startPos": { + "offset": 0, + "line": 0, + "column": 0 + }, + "endPos": { + "offset": 5, + "line": 0, + "column": 5 + }, + "value": "Table", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 5, + "line": 0, + "column": 5 + }, + "endPos": { + "offset": 6, + "line": 0, + "column": 6 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 5, + "end": 6 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 0, + "end": 5 + }, + "body": { + "id": 0, + "kind": "", + "startPos": { + "offset": 6, + "line": 0, + "column": 6 + }, + "fullStart": 6, + "endPos": { + "offset": 12, + "line": 2, + "column": 1 + }, + "fullEnd": 14, + "start": 6, + "end": 12, + "blockOpenBrace": { + "kind": "", + "startPos": { + "offset": 6, + "line": 0, + "column": 6 + }, + "endPos": { + "offset": 7, + "line": 0, + "column": 7 + }, + "value": "{", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 8, + "line": 0, + "column": 8 + }, + "endPos": { + "offset": 9, + "line": 1, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 8, + "end": 9 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 6, + "end": 7 + }, + "body": [], + "blockCloseBrace": { + "kind": "", + "startPos": { + "offset": 11, + "line": 2, + "column": 0 + }, + "endPos": { + "offset": 12, + "line": 2, + "column": 1 + }, + "value": "}", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 10, + "line": 1, + "column": 1 + }, + "endPos": { + "offset": 11, + "line": 2, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 10, + "end": 11 + } + ], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 13, + "line": 2, + "column": 2 + }, + "endPos": { + "offset": 14, + "line": 3, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 13, + "end": 14 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 11, + "end": 12 + } + } + }, + { + "id": 5, + "kind": "", + "startPos": { + "offset": 16, + "line": 4, + "column": 0 + }, + "fullStart": 15, + "endPos": { + "offset": 39, + "line": 6, + "column": 1 + }, + "fullEnd": 41, + "start": 16, + "end": 39, + "type": { + "kind": "", + "startPos": { + "offset": 16, + "line": 4, + "column": 0 + }, + "endPos": { + "offset": 26, + "line": 4, + "column": 10 + }, + "value": "TableGroup", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 15, + "line": 3, + "column": 1 + }, + "endPos": { + "offset": 16, + "line": 4, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 15, + "end": 16 + } + ], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 26, + "line": 4, + "column": 10 + }, + "endPos": { + "offset": 27, + "line": 4, + "column": 11 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 26, + "end": 27 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 16, + "end": 26 + }, + "name": { + "id": 3, + "kind": "", + "startPos": { + "offset": 27, + "line": 4, + "column": 11 + }, + "fullStart": 27, + "endPos": { + "offset": 32, + "line": 4, + "column": 16 + }, + "fullEnd": 33, + "start": 27, + "end": 32, + "expression": { + "id": 2, + "kind": "", + "startPos": { + "offset": 27, + "line": 4, + "column": 11 + }, + "fullStart": 27, + "endPos": { + "offset": 32, + "line": 4, + "column": 16 + }, + "fullEnd": 33, + "start": 27, + "end": 32, + "variable": { + "kind": "", + "startPos": { + "offset": 27, + "line": 4, + "column": 11 + }, + "endPos": { + "offset": 32, + "line": 4, + "column": 16 + }, + "value": "group", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 32, + "line": 4, + "column": 16 + }, + "endPos": { + "offset": 33, + "line": 4, + "column": 17 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 32, + "end": 33 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 27, + "end": 32 + } + } + }, + "body": { + "id": 4, + "kind": "", + "startPos": { + "offset": 33, + "line": 4, + "column": 17 + }, + "fullStart": 33, + "endPos": { + "offset": 39, + "line": 6, + "column": 1 + }, + "fullEnd": 41, + "start": 33, + "end": 39, + "blockOpenBrace": { + "kind": "", + "startPos": { + "offset": 33, + "line": 4, + "column": 17 + }, + "endPos": { + "offset": 34, + "line": 4, + "column": 18 + }, + "value": "{", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 35, + "line": 4, + "column": 19 + }, + "endPos": { + "offset": 36, + "line": 5, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 35, + "end": 36 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 33, + "end": 34 + }, + "body": [], + "blockCloseBrace": { + "kind": "", + "startPos": { + "offset": 38, + "line": 6, + "column": 0 + }, + "endPos": { + "offset": 39, + "line": 6, + "column": 1 + }, + "value": "}", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 37, + "line": 5, + "column": 1 + }, + "endPos": { + "offset": 38, + "line": 6, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 37, + "end": 38 + } + ], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 40, + "line": 6, + "column": 2 + }, + "endPos": { + "offset": 41, + "line": 7, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 40, + "end": 41 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 38, + "end": 39 + } + } + }, + { + "id": 7, + "kind": "", + "startPos": { + "offset": 43, + "line": 8, + "column": 0 + }, + "fullStart": 42, + "endPos": { + "offset": 53, + "line": 10, + "column": 1 + }, + "fullEnd": 55, + "start": 43, + "end": 53, + "type": { + "kind": "", + "startPos": { + "offset": 43, + "line": 8, + "column": 0 + }, + "endPos": { + "offset": 46, + "line": 8, + "column": 3 + }, + "value": "Ref", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 42, + "line": 7, + "column": 1 + }, + "endPos": { + "offset": 43, + "line": 8, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 42, + "end": 43 + } + ], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 46, + "line": 8, + "column": 3 + }, + "endPos": { + "offset": 47, + "line": 8, + "column": 4 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 46, + "end": 47 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 43, + "end": 46 + }, + "body": { + "id": 6, + "kind": "", + "startPos": { + "offset": 47, + "line": 8, + "column": 4 + }, + "fullStart": 47, + "endPos": { + "offset": 53, + "line": 10, + "column": 1 + }, + "fullEnd": 55, + "start": 47, + "end": 53, + "blockOpenBrace": { + "kind": "", + "startPos": { + "offset": 47, + "line": 8, + "column": 4 + }, + "endPos": { + "offset": 48, + "line": 8, + "column": 5 + }, + "value": "{", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 49, + "line": 8, + "column": 6 + }, + "endPos": { + "offset": 50, + "line": 9, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 49, + "end": 50 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 47, + "end": 48 + }, + "body": [], + "blockCloseBrace": { + "kind": "", + "startPos": { + "offset": 52, + "line": 10, + "column": 0 + }, + "endPos": { + "offset": 53, + "line": 10, + "column": 1 + }, + "value": "}", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 51, + "line": 9, + "column": 1 + }, + "endPos": { + "offset": 52, + "line": 10, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 51, + "end": 52 + } + ], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 54, + "line": 10, + "column": 2 + }, + "endPos": { + "offset": 55, + "line": 11, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 54, + "end": 55 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 52, + "end": 53 + } + } + }, + { + "id": 11, + "kind": "", + "startPos": { + "offset": 57, + "line": 12, + "column": 0 + }, + "fullStart": 56, + "endPos": { + "offset": 79, + "line": 12, + "column": 22 + }, + "fullEnd": 81, + "start": 57, + "end": 79, + "type": { + "kind": "", + "startPos": { + "offset": 57, + "line": 12, + "column": 0 + }, + "endPos": { + "offset": 61, + "line": 12, + "column": 4 + }, + "value": "Note", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 56, + "line": 11, + "column": 1 + }, + "endPos": { + "offset": 57, + "line": 12, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 56, + "end": 57 + } + ], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 57, + "end": 61 + }, + "bodyColon": { + "kind": "", + "startPos": { + "offset": 61, + "line": 12, + "column": 4 + }, + "endPos": { + "offset": 62, + "line": 12, + "column": 5 + }, + "value": ":", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 62, + "line": 12, + "column": 5 + }, + "endPos": { + "offset": 63, + "line": 12, + "column": 6 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 62, + "end": 63 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 61, + "end": 62 + }, + "body": { + "id": 10, + "kind": "", + "startPos": { + "offset": 63, + "line": 12, + "column": 6 + }, + "fullStart": 63, + "endPos": { + "offset": 79, + "line": 12, + "column": 22 + }, + "fullEnd": 81, + "start": 63, + "end": 79, + "callee": { + "id": 9, + "kind": "", + "startPos": { + "offset": 63, + "line": 12, + "column": 6 + }, + "fullStart": 63, + "endPos": { + "offset": 79, + "line": 12, + "column": 22 + }, + "fullEnd": 81, + "start": 63, + "end": 79, + "expression": { + "id": 8, + "kind": "", + "startPos": { + "offset": 63, + "line": 12, + "column": 6 + }, + "fullStart": 63, + "endPos": { + "offset": 79, + "line": 12, + "column": 22 + }, + "fullEnd": 81, + "start": 63, + "end": 79, + "literal": { + "kind": "", + "startPos": { + "offset": 63, + "line": 12, + "column": 6 + }, + "endPos": { + "offset": 79, + "line": 12, + "column": 22 + }, + "value": "This is a note", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 80, + "line": 12, + "column": 23 + }, + "endPos": { + "offset": 81, + "line": 13, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 80, + "end": 81 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 63, + "end": 79 + } + } + }, + "args": [] + } + }, + { + "id": 15, + "kind": "", + "startPos": { + "offset": 83, + "line": 14, + "column": 0 + }, + "fullStart": 82, + "endPos": { + "offset": 117, + "line": 15, + "column": 15 + }, + "fullEnd": 119, + "start": 83, + "end": 117, + "type": { + "kind": "", + "startPos": { + "offset": 83, + "line": 14, + "column": 0 + }, + "endPos": { + "offset": 87, + "line": 14, + "column": 4 + }, + "value": "Note", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 82, + "line": 13, + "column": 1 + }, + "endPos": { + "offset": 83, + "line": 14, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 82, + "end": 83 + } + ], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 83, + "end": 87 + }, + "bodyColon": { + "kind": "", + "startPos": { + "offset": 87, + "line": 14, + "column": 4 + }, + "endPos": { + "offset": 88, + "line": 14, + "column": 5 + }, + "value": ":", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 88, + "line": 14, + "column": 5 + }, + "endPos": { + "offset": 89, + "line": 14, + "column": 6 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 88, + "end": 89 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 87, + "end": 88 + }, + "body": { + "id": 14, + "kind": "", + "startPos": { + "offset": 89, + "line": 14, + "column": 6 + }, + "fullStart": 89, + "endPos": { + "offset": 117, + "line": 15, + "column": 15 + }, + "fullEnd": 119, + "start": 89, + "end": 117, + "callee": { + "id": 13, + "kind": "", + "startPos": { + "offset": 89, + "line": 14, + "column": 6 + }, + "fullStart": 89, + "endPos": { + "offset": 117, + "line": 15, + "column": 15 + }, + "fullEnd": 119, + "start": 89, + "end": 117, + "expression": { + "id": 12, + "kind": "", + "startPos": { + "offset": 89, + "line": 14, + "column": 6 + }, + "fullStart": 89, + "endPos": { + "offset": 117, + "line": 15, + "column": 15 + }, + "fullEnd": 119, + "start": 89, + "end": 117, + "literal": { + "kind": "", + "startPos": { + "offset": 89, + "line": 14, + "column": 6 + }, + "endPos": { + "offset": 117, + "line": 15, + "column": 15 + }, + "value": "This is \r\nanother note", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 118, + "line": 15, + "column": 16 + }, + "endPos": { + "offset": 119, + "line": 16, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 118, + "end": 119 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 89, + "end": 117 + } + } + }, + "args": [] + } + }, + { + "id": 21, + "kind": "", + "startPos": { + "offset": 121, + "line": 17, + "column": 0 + }, + "fullStart": 120, + "endPos": { + "offset": 148, + "line": 19, + "column": 1 + }, + "fullEnd": 148, + "start": 121, + "end": 148, + "type": { + "kind": "", + "startPos": { + "offset": 121, + "line": 17, + "column": 0 + }, + "endPos": { + "offset": 126, + "line": 17, + "column": 5 + }, + "value": "Table", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 120, + "line": 16, + "column": 1 + }, + "endPos": { + "offset": 121, + "line": 17, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 120, + "end": 121 + } + ], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 126, + "line": 17, + "column": 5 + }, + "endPos": { + "offset": 127, + "line": 17, + "column": 6 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 126, + "end": 127 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 121, + "end": 126 + }, + "name": { + "id": 17, + "kind": "", + "startPos": { + "offset": 127, + "line": 17, + "column": 6 + }, + "fullStart": 127, + "endPos": { + "offset": 132, + "line": 17, + "column": 11 + }, + "fullEnd": 133, + "start": 127, + "end": 132, + "expression": { + "id": 16, + "kind": "", + "startPos": { + "offset": 127, + "line": 17, + "column": 6 + }, + "fullStart": 127, + "endPos": { + "offset": 132, + "line": 17, + "column": 11 + }, + "fullEnd": 133, + "start": 127, + "end": 132, + "variable": { + "kind": "", + "startPos": { + "offset": 127, + "line": 17, + "column": 6 + }, + "endPos": { + "offset": 132, + "line": 17, + "column": 11 + }, + "value": "Users", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 132, + "line": 17, + "column": 11 + }, + "endPos": { + "offset": 133, + "line": 17, + "column": 12 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 132, + "end": 133 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 127, + "end": 132 + } + } + }, + "as": { + "kind": "", + "startPos": { + "offset": 133, + "line": 17, + "column": 12 + }, + "endPos": { + "offset": 135, + "line": 17, + "column": 14 + }, + "value": "as", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 135, + "line": 17, + "column": 14 + }, + "endPos": { + "offset": 136, + "line": 17, + "column": 15 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 135, + "end": 136 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 133, + "end": 135 + }, + "alias": { + "id": 19, + "kind": "", + "startPos": { + "offset": 136, + "line": 17, + "column": 15 + }, + "fullStart": 136, + "endPos": { + "offset": 137, + "line": 17, + "column": 16 + }, + "fullEnd": 138, + "start": 136, + "end": 137, + "expression": { + "id": 18, + "kind": "", + "startPos": { + "offset": 136, + "line": 17, + "column": 15 + }, + "fullStart": 136, + "endPos": { + "offset": 137, + "line": 17, + "column": 16 + }, + "fullEnd": 138, + "start": 136, + "end": 137, + "variable": { + "kind": "", + "startPos": { + "offset": 136, + "line": 17, + "column": 15 + }, + "endPos": { + "offset": 137, + "line": 17, + "column": 16 + }, + "value": "U", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 137, + "line": 17, + "column": 16 + }, + "endPos": { + "offset": 138, + "line": 17, + "column": 17 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 137, + "end": 138 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 136, + "end": 137 + } + } + }, + "body": { + "id": 20, + "kind": "", + "startPos": { + "offset": 138, + "line": 17, + "column": 17 + }, + "fullStart": 138, + "endPos": { + "offset": 148, + "line": 19, + "column": 1 + }, + "fullEnd": 148, + "start": 138, + "end": 148, + "blockOpenBrace": { + "kind": "", + "startPos": { + "offset": 138, + "line": 17, + "column": 17 + }, + "endPos": { + "offset": 139, + "line": 17, + "column": 18 + }, + "value": "{", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 140, + "line": 17, + "column": 19 + }, + "endPos": { + "offset": 141, + "line": 18, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 140, + "end": 141 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 138, + "end": 139 + }, + "body": [], + "blockCloseBrace": { + "kind": "", + "startPos": { + "offset": 147, + "line": 19, + "column": 0 + }, + "endPos": { + "offset": 148, + "line": 19, + "column": 1 + }, + "value": "}", + "leadingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 141, + "line": 18, + "column": 0 + }, + "endPos": { + "offset": 142, + "line": 18, + "column": 1 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 141, + "end": 142 + }, + { + "kind": "", + "startPos": { + "offset": 142, + "line": 18, + "column": 1 + }, + "endPos": { + "offset": 143, + "line": 18, + "column": 2 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 142, + "end": 143 + }, + { + "kind": "", + "startPos": { + "offset": 143, + "line": 18, + "column": 2 + }, + "endPos": { + "offset": 144, + "line": 18, + "column": 3 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 143, + "end": 144 + }, + { + "kind": "", + "startPos": { + "offset": 144, + "line": 18, + "column": 3 + }, + "endPos": { + "offset": 145, + "line": 18, + "column": 4 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 144, + "end": 145 + }, + { + "kind": "", + "startPos": { + "offset": 146, + "line": 18, + "column": 5 + }, + "endPos": { + "offset": 147, + "line": 19, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 146, + "end": 147 + } + ], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 147, + "end": 148 + } + } + } + ], + "eof": { + "kind": "", + "startPos": { + "offset": 148, + "line": 19, + "column": 1 + }, + "endPos": { + "offset": 148, + "line": 19, + "column": 1 + }, + "value": "", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 148, + "end": 148 + } + }, + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/element_in_simple_body.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/element_in_simple_body.out.json index 21b31eba1..26369b25b 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/element_in_simple_body.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/element_in_simple_body.out.json @@ -460,5 +460,6 @@ "end": 15, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/erroneous_setting.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/erroneous_setting.out.json index b105432df..720d67f5f 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/erroneous_setting.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/erroneous_setting.out.json @@ -5782,5 +5782,6 @@ "end": 227, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/expression.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/expression.out.json index 2c8509163..eb53b2552 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/expression.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/expression.out.json @@ -9,13 +9,13 @@ }, "fullStart": 0, "endPos": { - "offset": 461, - "line": 49, - "column": 1 + "offset": 462, + "line": 50, + "column": 0 }, - "fullEnd": 461, + "fullEnd": 462, "start": 0, - "end": 461, + "end": 462, "body": [ { "id": 216, @@ -27,13 +27,13 @@ }, "fullStart": 0, "endPos": { - "offset": 461, + "offset": 460, "line": 49, "column": 1 }, - "fullEnd": 461, + "fullEnd": 462, "start": 0, - "end": 461, + "end": 460, "type": { "kind": "", "startPos": { @@ -166,13 +166,13 @@ }, "fullStart": 16, "endPos": { - "offset": 461, + "offset": 460, "line": 49, "column": 1 }, - "fullEnd": 461, + "fullEnd": 462, "start": 16, - "end": 461, + "end": 460, "blockOpenBrace": { "kind": "", "startPos": { @@ -10621,13 +10621,13 @@ }, "fullStart": 405, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, + "fullEnd": 459, "start": 410, - "end": 458, + "end": 457, "callee": { "id": 213, "kind": "", @@ -10638,13 +10638,13 @@ }, "fullStart": 405, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, + "fullEnd": 459, "start": 410, - "end": 458, + "end": 457, "op": { "kind": "", "startPos": { @@ -10883,22 +10883,22 @@ }, "fullStart": 415, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, + "fullEnd": 459, "start": 415, - "end": 458, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 448, + "offset": 447, "line": 48, "column": 4 }, "endPos": { - "offset": 449, + "offset": 448, "line": 48, "column": 5 }, @@ -10907,12 +10907,12 @@ { "kind": "", "startPos": { - "offset": 444, + "offset": 443, "line": 48, "column": 0 }, "endPos": { - "offset": 445, + "offset": 444, "line": 48, "column": 1 }, @@ -10922,18 +10922,18 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 444, - "end": 445 + "start": 443, + "end": 444 }, { "kind": "", "startPos": { - "offset": 445, + "offset": 444, "line": 48, "column": 1 }, "endPos": { - "offset": 446, + "offset": 445, "line": 48, "column": 2 }, @@ -10943,18 +10943,18 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 445, - "end": 446 + "start": 444, + "end": 445 }, { "kind": "", "startPos": { - "offset": 446, + "offset": 445, "line": 48, "column": 2 }, "endPos": { - "offset": 447, + "offset": 446, "line": 48, "column": 3 }, @@ -10964,18 +10964,18 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 446, - "end": 447 + "start": 445, + "end": 446 }, { "kind": "", "startPos": { - "offset": 447, + "offset": 446, "line": 48, "column": 3 }, "endPos": { - "offset": 448, + "offset": 447, "line": 48, "column": 4 }, @@ -10985,16 +10985,16 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 447, - "end": 448 + "start": 446, + "end": 447 } ], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 448, - "end": 449 + "start": 447, + "end": 448 }, "leftExpression": { "id": 201, @@ -11006,22 +11006,22 @@ }, "fullStart": 415, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, + "fullEnd": 443, "start": 415, - "end": 442, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 431, + "offset": 430, "line": 47, "column": 4 }, "endPos": { - "offset": 432, + "offset": 431, "line": 47, "column": 5 }, @@ -11030,12 +11030,12 @@ { "kind": "", "startPos": { - "offset": 426, + "offset": 425, "line": 46, "column": 1 }, "endPos": { - "offset": 427, + "offset": 426, "line": 47, "column": 0 }, @@ -11045,18 +11045,18 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 426, - "end": 427 + "start": 425, + "end": 426 }, { "kind": "", "startPos": { - "offset": 427, + "offset": 426, "line": 47, "column": 0 }, "endPos": { - "offset": 428, + "offset": 427, "line": 47, "column": 1 }, @@ -11066,18 +11066,18 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 427, - "end": 428 + "start": 426, + "end": 427 }, { "kind": "", "startPos": { - "offset": 428, + "offset": 427, "line": 47, "column": 1 }, "endPos": { - "offset": 429, + "offset": 428, "line": 47, "column": 2 }, @@ -11087,18 +11087,18 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 428, - "end": 429 + "start": 427, + "end": 428 }, { "kind": "", "startPos": { - "offset": 429, + "offset": 428, "line": 47, "column": 2 }, "endPos": { - "offset": 430, + "offset": 429, "line": 47, "column": 3 }, @@ -11108,18 +11108,18 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 429, - "end": 430 + "start": 428, + "end": 429 }, { "kind": "", "startPos": { - "offset": 430, + "offset": 429, "line": 47, "column": 3 }, "endPos": { - "offset": 431, + "offset": 430, "line": 47, "column": 4 }, @@ -11129,16 +11129,16 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 430, - "end": 431 + "start": 429, + "end": 430 } ], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 431, - "end": 432 + "start": 430, + "end": 431 }, "leftExpression": { "id": 189, @@ -11150,13 +11150,13 @@ }, "fullStart": 415, "endPos": { - "offset": 423, + "offset": 422, "line": 45, - "column": 17 + "column": 16 }, - "fullEnd": 425, + "fullEnd": 424, "start": 415, - "end": 423, + "end": 422, "op": { "kind": "", "startPos": { @@ -11289,13 +11289,13 @@ }, "fullStart": 419, "endPos": { - "offset": 423, + "offset": 422, "line": 45, - "column": 17 + "column": 16 }, - "fullEnd": 425, + "fullEnd": 424, "start": 419, - "end": 423, + "end": 422, "callee": { "id": 186, "kind": "", @@ -11310,7 +11310,7 @@ "line": 45, "column": 14 }, - "fullEnd": 421, + "fullEnd": 420, "start": 419, "end": 420, "expression": { @@ -11327,7 +11327,7 @@ "line": 45, "column": 14 }, - "fullEnd": 421, + "fullEnd": 420, "start": 419, "end": 420, "variable": { @@ -11344,29 +11344,7 @@ }, "value": "c", "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 420, - "line": 45, - "column": 14 - }, - "endPos": { - "offset": 421, - "line": 45, - "column": 15 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 420, - "end": 421 - } - ], + "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, @@ -11379,30 +11357,30 @@ "id": 187, "kind": "", "startPos": { - "offset": 421, + "offset": 420, "line": 45, - "column": 15 + "column": 14 }, - "fullStart": 421, + "fullStart": 420, "endPos": { - "offset": 423, + "offset": 422, "line": 45, - "column": 17 + "column": 16 }, - "fullEnd": 425, - "start": 421, - "end": 423, + "fullEnd": 424, + "start": 420, + "end": 422, "tupleOpenParen": { "kind": "", "startPos": { - "offset": 421, + "offset": 420, "line": 45, - "column": 15 + "column": 14 }, "endPos": { - "offset": 422, + "offset": 421, "line": 45, - "column": 16 + "column": 15 }, "value": "(", "leadingTrivia": [], @@ -11410,22 +11388,22 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 421, - "end": 422 + "start": 420, + "end": 421 }, "elementList": [], "commaList": [], "tupleCloseParen": { "kind": "", "startPos": { - "offset": 422, + "offset": 421, "line": 45, - "column": 16 + "column": 15 }, "endPos": { - "offset": 423, + "offset": 422, "line": 45, - "column": 17 + "column": 16 }, "value": ")", "leadingTrivia": [], @@ -11433,12 +11411,12 @@ { "kind": "", "startPos": { - "offset": 424, + "offset": 423, "line": 45, - "column": 18 + "column": 17 }, "endPos": { - "offset": 425, + "offset": 424, "line": 46, "column": 0 }, @@ -11448,15 +11426,15 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 424, - "end": 425 + "start": 423, + "end": 424 } ], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 422, - "end": 423 + "start": 421, + "end": 422 } } } @@ -11465,28 +11443,28 @@ "id": 200, "kind": "", "startPos": { - "offset": 432, + "offset": 431, "line": 47, "column": 5 }, - "fullStart": 432, + "fullStart": 431, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 432, - "end": 442, + "fullEnd": 443, + "start": 431, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 432, + "offset": 431, "line": 47, "column": 5 }, "endPos": { - "offset": 433, + "offset": 432, "line": 47, "column": 6 }, @@ -11496,35 +11474,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 432, - "end": 433 + "start": 431, + "end": 432 }, "expression": { "id": 199, "kind": "", "startPos": { - "offset": 433, + "offset": 432, "line": 47, "column": 6 }, - "fullStart": 433, + "fullStart": 432, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 433, - "end": 442, + "fullEnd": 443, + "start": 432, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 433, + "offset": 432, "line": 47, "column": 6 }, "endPos": { - "offset": 434, + "offset": 433, "line": 47, "column": 7 }, @@ -11534,35 +11512,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 433, - "end": 434 + "start": 432, + "end": 433 }, "expression": { "id": 198, "kind": "", "startPos": { - "offset": 434, + "offset": 433, "line": 47, "column": 7 }, - "fullStart": 434, + "fullStart": 433, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 434, - "end": 442, + "fullEnd": 443, + "start": 433, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 434, + "offset": 433, "line": 47, "column": 7 }, "endPos": { - "offset": 435, + "offset": 434, "line": 47, "column": 8 }, @@ -11572,35 +11550,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 434, - "end": 435 + "start": 433, + "end": 434 }, "expression": { "id": 197, "kind": "", "startPos": { - "offset": 435, + "offset": 434, "line": 47, "column": 8 }, - "fullStart": 435, + "fullStart": 434, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 435, - "end": 442, + "fullEnd": 443, + "start": 434, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 435, + "offset": 434, "line": 47, "column": 8 }, "endPos": { - "offset": 436, + "offset": 435, "line": 47, "column": 9 }, @@ -11610,35 +11588,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 435, - "end": 436 + "start": 434, + "end": 435 }, "expression": { "id": 196, "kind": "", "startPos": { - "offset": 436, + "offset": 435, "line": 47, "column": 9 }, - "fullStart": 436, + "fullStart": 435, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 436, - "end": 442, + "fullEnd": 443, + "start": 435, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 436, + "offset": 435, "line": 47, "column": 9 }, "endPos": { - "offset": 437, + "offset": 436, "line": 47, "column": 10 }, @@ -11648,35 +11626,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 436, - "end": 437 + "start": 435, + "end": 436 }, "expression": { "id": 195, "kind": "", "startPos": { - "offset": 437, + "offset": 436, "line": 47, "column": 10 }, - "fullStart": 437, + "fullStart": 436, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 437, - "end": 442, + "fullEnd": 443, + "start": 436, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 437, + "offset": 436, "line": 47, "column": 10 }, "endPos": { - "offset": 438, + "offset": 437, "line": 47, "column": 11 }, @@ -11686,35 +11664,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 437, - "end": 438 + "start": 436, + "end": 437 }, "expression": { "id": 194, "kind": "", "startPos": { - "offset": 438, + "offset": 437, "line": 47, "column": 11 }, - "fullStart": 438, + "fullStart": 437, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 438, - "end": 442, + "fullEnd": 443, + "start": 437, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 438, + "offset": 437, "line": 47, "column": 11 }, "endPos": { - "offset": 439, + "offset": 438, "line": 47, "column": 12 }, @@ -11724,35 +11702,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 438, - "end": 439 + "start": 437, + "end": 438 }, "expression": { "id": 193, "kind": "", "startPos": { - "offset": 439, + "offset": 438, "line": 47, "column": 12 }, - "fullStart": 439, + "fullStart": 438, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 439, - "end": 442, + "fullEnd": 443, + "start": 438, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 439, + "offset": 438, "line": 47, "column": 12 }, "endPos": { - "offset": 440, + "offset": 439, "line": 47, "column": 13 }, @@ -11762,35 +11740,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 439, - "end": 440 + "start": 438, + "end": 439 }, "expression": { "id": 192, "kind": "", "startPos": { - "offset": 440, + "offset": 439, "line": 47, "column": 13 }, - "fullStart": 440, + "fullStart": 439, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 440, - "end": 442, + "fullEnd": 443, + "start": 439, + "end": 441, "op": { "kind": "", "startPos": { - "offset": 440, + "offset": 439, "line": 47, "column": 13 }, "endPos": { - "offset": 441, + "offset": 440, "line": 47, "column": 14 }, @@ -11800,52 +11778,52 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 440, - "end": 441 + "start": 439, + "end": 440 }, "expression": { "id": 191, "kind": "", "startPos": { - "offset": 441, + "offset": 440, "line": 47, "column": 14 }, - "fullStart": 441, + "fullStart": 440, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 441, - "end": 442, + "fullEnd": 443, + "start": 440, + "end": 441, "expression": { "id": 190, "kind": "", "startPos": { - "offset": 441, + "offset": 440, "line": 47, "column": 14 }, - "fullStart": 441, + "fullStart": 440, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, - "fullEnd": 444, - "start": 441, - "end": 442, + "fullEnd": 443, + "start": 440, + "end": 441, "literal": { "kind": "", "startPos": { - "offset": 441, + "offset": 440, "line": 47, "column": 14 }, "endPos": { - "offset": 442, + "offset": 441, "line": 47, "column": 15 }, @@ -11855,12 +11833,12 @@ { "kind": "", "startPos": { - "offset": 443, + "offset": 442, "line": 47, "column": 16 }, "endPos": { - "offset": 444, + "offset": 443, "line": 48, "column": 0 }, @@ -11870,15 +11848,15 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 443, - "end": 444 + "start": 442, + "end": 443 } ], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 441, - "end": 442 + "start": 440, + "end": 441 } } } @@ -11896,28 +11874,28 @@ "id": 211, "kind": "", "startPos": { - "offset": 449, + "offset": 448, "line": 48, "column": 5 }, - "fullStart": 449, + "fullStart": 448, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 449, - "end": 458, + "fullEnd": 459, + "start": 448, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 449, + "offset": 448, "line": 48, "column": 5 }, "endPos": { - "offset": 450, + "offset": 449, "line": 48, "column": 6 }, @@ -11927,35 +11905,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 449, - "end": 450 + "start": 448, + "end": 449 }, "expression": { "id": 210, "kind": "", "startPos": { - "offset": 450, + "offset": 449, "line": 48, "column": 6 }, - "fullStart": 450, + "fullStart": 449, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 450, - "end": 458, + "fullEnd": 459, + "start": 449, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 450, + "offset": 449, "line": 48, "column": 6 }, "endPos": { - "offset": 451, + "offset": 450, "line": 48, "column": 7 }, @@ -11965,35 +11943,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 450, - "end": 451 + "start": 449, + "end": 450 }, "expression": { "id": 209, "kind": "", "startPos": { - "offset": 451, + "offset": 450, "line": 48, "column": 7 }, - "fullStart": 451, + "fullStart": 450, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 451, - "end": 458, + "fullEnd": 459, + "start": 450, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 451, + "offset": 450, "line": 48, "column": 7 }, "endPos": { - "offset": 452, + "offset": 451, "line": 48, "column": 8 }, @@ -12003,35 +11981,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 451, - "end": 452 + "start": 450, + "end": 451 }, "expression": { "id": 208, "kind": "", "startPos": { - "offset": 452, + "offset": 451, "line": 48, "column": 8 }, - "fullStart": 452, + "fullStart": 451, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 452, - "end": 458, + "fullEnd": 459, + "start": 451, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 452, + "offset": 451, "line": 48, "column": 8 }, "endPos": { - "offset": 453, + "offset": 452, "line": 48, "column": 9 }, @@ -12041,35 +12019,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 452, - "end": 453 + "start": 451, + "end": 452 }, "expression": { "id": 207, "kind": "", "startPos": { - "offset": 453, + "offset": 452, "line": 48, "column": 9 }, - "fullStart": 453, + "fullStart": 452, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 453, - "end": 458, + "fullEnd": 459, + "start": 452, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 453, + "offset": 452, "line": 48, "column": 9 }, "endPos": { - "offset": 454, + "offset": 453, "line": 48, "column": 10 }, @@ -12079,35 +12057,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 453, - "end": 454 + "start": 452, + "end": 453 }, "expression": { "id": 206, "kind": "", "startPos": { - "offset": 454, + "offset": 453, "line": 48, "column": 10 }, - "fullStart": 454, + "fullStart": 453, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 454, - "end": 458, + "fullEnd": 459, + "start": 453, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 454, + "offset": 453, "line": 48, "column": 10 }, "endPos": { - "offset": 455, + "offset": 454, "line": 48, "column": 11 }, @@ -12117,35 +12095,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 454, - "end": 455 + "start": 453, + "end": 454 }, "expression": { "id": 205, "kind": "", "startPos": { - "offset": 455, + "offset": 454, "line": 48, "column": 11 }, - "fullStart": 455, + "fullStart": 454, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 455, - "end": 458, + "fullEnd": 459, + "start": 454, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 455, + "offset": 454, "line": 48, "column": 11 }, "endPos": { - "offset": 456, + "offset": 455, "line": 48, "column": 12 }, @@ -12155,35 +12133,35 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 455, - "end": 456 + "start": 454, + "end": 455 }, "expression": { "id": 204, "kind": "", "startPos": { - "offset": 456, + "offset": 455, "line": 48, "column": 12 }, - "fullStart": 456, + "fullStart": 455, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 456, - "end": 458, + "fullEnd": 459, + "start": 455, + "end": 457, "op": { "kind": "", "startPos": { - "offset": 456, + "offset": 455, "line": 48, "column": 12 }, "endPos": { - "offset": 457, + "offset": 456, "line": 48, "column": 13 }, @@ -12193,52 +12171,52 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 456, - "end": 457 + "start": 455, + "end": 456 }, "expression": { "id": 203, "kind": "", "startPos": { - "offset": 457, + "offset": 456, "line": 48, "column": 13 }, - "fullStart": 457, + "fullStart": 456, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 457, - "end": 458, + "fullEnd": 459, + "start": 456, + "end": 457, "expression": { "id": 202, "kind": "", "startPos": { - "offset": 457, + "offset": 456, "line": 48, "column": 13 }, - "fullStart": 457, + "fullStart": 456, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, - "fullEnd": 460, - "start": 457, - "end": 458, + "fullEnd": 459, + "start": 456, + "end": 457, "literal": { "kind": "", "startPos": { - "offset": 457, + "offset": 456, "line": 48, "column": 13 }, "endPos": { - "offset": 458, + "offset": 457, "line": 48, "column": 14 }, @@ -12248,12 +12226,12 @@ { "kind": "", "startPos": { - "offset": 459, + "offset": 458, "line": 48, "column": 15 }, "endPos": { - "offset": 460, + "offset": 459, "line": 49, "column": 0 }, @@ -12263,15 +12241,15 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 459, - "end": 460 + "start": 458, + "end": 459 } ], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 457, - "end": 458 + "start": 456, + "end": 457 } } } @@ -12291,23 +12269,45 @@ "blockCloseBrace": { "kind": "", "startPos": { - "offset": 460, + "offset": 459, "line": 49, "column": 0 }, "endPos": { - "offset": 461, + "offset": 460, "line": 49, "column": 1 }, "value": "}", "leadingTrivia": [], - "trailingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 461, + "line": 49, + "column": 2 + }, + "endPos": { + "offset": 462, + "line": 50, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 461, + "end": 462 + } + ], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 460, - "end": 461 + "start": 459, + "end": 460 } } } @@ -12315,14 +12315,14 @@ "eof": { "kind": "", "startPos": { - "offset": 461, - "line": 49, - "column": 1 + "offset": 462, + "line": 50, + "column": 0 }, "endPos": { - "offset": 461, - "line": 49, - "column": 1 + "offset": 462, + "line": 50, + "column": 0 }, "value": "", "leadingTrivia": [], @@ -12330,8 +12330,8 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 461, - "end": 461 + "start": 462, + "end": 462 } }, "errors": [ @@ -12448,5 +12448,6 @@ "end": 24, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/function_application.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/function_application.out.json index 4b30bf30b..145d9deb8 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/function_application.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/function_application.out.json @@ -9,13 +9,13 @@ }, "fullStart": 0, "endPos": { - "offset": 91, + "offset": 90, "line": 4, "column": 0 }, - "fullEnd": 91, + "fullEnd": 90, "start": 0, - "end": 91, + "end": 90, "body": [ { "id": 23, @@ -27,13 +27,13 @@ }, "fullStart": 0, "endPos": { - "offset": 89, + "offset": 88, "line": 3, "column": 1 }, - "fullEnd": 91, + "fullEnd": 90, "start": 0, - "end": 89, + "end": 88, "type": { "kind": "", "startPos": { @@ -166,13 +166,13 @@ }, "fullStart": 25, "endPos": { - "offset": 89, + "offset": 88, "line": 3, "column": 1 }, - "fullEnd": 91, + "fullEnd": 90, "start": 25, - "end": 89, + "end": 88, "blockOpenBrace": { "kind": "", "startPos": { @@ -677,13 +677,13 @@ }, "fullStart": 58, "endPos": { - "offset": 86, + "offset": 85, "line": 2, - "column": 28 + "column": 27 }, - "fullEnd": 88, + "fullEnd": 87, "start": 62, - "end": 86, + "end": 85, "callee": { "id": 11, "kind": "", @@ -859,13 +859,13 @@ }, "fullStart": 67, "endPos": { - "offset": 77, + "offset": 76, "line": 2, - "column": 19 + "column": 18 }, - "fullEnd": 78, + "fullEnd": 77, "start": 67, - "end": 77, + "end": 76, "callee": { "id": 13, "kind": "", @@ -880,7 +880,7 @@ "line": 2, "column": 13 }, - "fullEnd": 72, + "fullEnd": 71, "start": 67, "end": 71, "expression": { @@ -897,7 +897,7 @@ "line": 2, "column": 13 }, - "fullEnd": 72, + "fullEnd": 71, "start": 67, "end": 71, "variable": { @@ -914,29 +914,7 @@ }, "value": "char", "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 71, - "line": 2, - "column": 13 - }, - "endPos": { - "offset": 72, - "line": 2, - "column": 14 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 71, - "end": 72 - } - ], + "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, @@ -949,30 +927,30 @@ "id": 16, "kind": "", "startPos": { - "offset": 72, + "offset": 71, "line": 2, - "column": 14 + "column": 13 }, - "fullStart": 72, + "fullStart": 71, "endPos": { - "offset": 77, + "offset": 76, "line": 2, - "column": 19 + "column": 18 }, - "fullEnd": 78, - "start": 72, - "end": 77, + "fullEnd": 77, + "start": 71, + "end": 76, "tupleOpenParen": { "kind": "", "startPos": { - "offset": 72, + "offset": 71, "line": 2, - "column": 14 + "column": 13 }, "endPos": { - "offset": 73, + "offset": 72, "line": 2, - "column": 15 + "column": 14 }, "value": "(", "leadingTrivia": [], @@ -980,55 +958,55 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 72, - "end": 73 + "start": 71, + "end": 72 }, "elementList": [ { "id": 15, "kind": "", "startPos": { - "offset": 73, + "offset": 72, "line": 2, - "column": 15 + "column": 14 }, - "fullStart": 73, + "fullStart": 72, "endPos": { - "offset": 76, + "offset": 75, "line": 2, - "column": 18 + "column": 17 }, - "fullEnd": 76, - "start": 73, - "end": 76, + "fullEnd": 75, + "start": 72, + "end": 75, "expression": { "id": 14, "kind": "", "startPos": { - "offset": 73, + "offset": 72, "line": 2, - "column": 15 + "column": 14 }, - "fullStart": 73, + "fullStart": 72, "endPos": { - "offset": 76, + "offset": 75, "line": 2, - "column": 18 + "column": 17 }, - "fullEnd": 76, - "start": 73, - "end": 76, + "fullEnd": 75, + "start": 72, + "end": 75, "literal": { "kind": "", "startPos": { - "offset": 73, + "offset": 72, "line": 2, - "column": 15 + "column": 14 }, "endPos": { - "offset": 76, + "offset": 75, "line": 2, - "column": 18 + "column": 17 }, "value": "255", "leadingTrivia": [], @@ -1036,8 +1014,8 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 73, - "end": 76 + "start": 72, + "end": 75 } } } @@ -1046,14 +1024,14 @@ "tupleCloseParen": { "kind": "", "startPos": { - "offset": 76, + "offset": 75, "line": 2, - "column": 18 + "column": 17 }, "endPos": { - "offset": 77, + "offset": 76, "line": 2, - "column": 19 + "column": 18 }, "value": ")", "leadingTrivia": [], @@ -1061,14 +1039,14 @@ { "kind": "", "startPos": { - "offset": 77, + "offset": 76, "line": 2, - "column": 19 + "column": 18 }, "endPos": { - "offset": 78, + "offset": 77, "line": 2, - "column": 20 + "column": 19 }, "value": " ", "leadingTrivia": [], @@ -1076,15 +1054,15 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 77, - "end": 78 + "start": 76, + "end": 77 } ], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 76, - "end": 77 + "start": 75, + "end": 76 } } }, @@ -1092,30 +1070,30 @@ "id": 20, "kind": "", "startPos": { - "offset": 78, + "offset": 77, "line": 2, - "column": 20 + "column": 19 }, - "fullStart": 78, + "fullStart": 77, "endPos": { - "offset": 86, + "offset": 85, "line": 2, - "column": 28 + "column": 27 }, - "fullEnd": 88, - "start": 78, - "end": 86, + "fullEnd": 87, + "start": 77, + "end": 85, "listOpenBracket": { "kind": "", "startPos": { - "offset": 78, + "offset": 77, "line": 2, - "column": 20 + "column": 19 }, "endPos": { - "offset": 79, + "offset": 78, "line": 2, - "column": 21 + "column": 20 }, "value": "[", "leadingTrivia": [], @@ -1123,56 +1101,56 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 78, - "end": 79 + "start": 77, + "end": 78 }, "elementList": [ { "id": 19, "kind": "", "startPos": { - "offset": 79, + "offset": 78, "line": 2, - "column": 21 + "column": 20 }, - "fullStart": 79, + "fullStart": 78, "endPos": { - "offset": 85, + "offset": 84, "line": 2, - "column": 27 + "column": 26 }, - "fullEnd": 85, - "start": 79, - "end": 85, + "fullEnd": 84, + "start": 78, + "end": 84, "name": { "id": 18, "kind": "", "startPos": { - "offset": 79, + "offset": 78, "line": 2, - "column": 21 + "column": 20 }, - "fullStart": 79, + "fullStart": 78, "endPos": { - "offset": 85, + "offset": 84, "line": 2, - "column": 27 + "column": 26 }, - "fullEnd": 85, - "start": 79, - "end": 85, + "fullEnd": 84, + "start": 78, + "end": 84, "identifiers": [ { "kind": "", "startPos": { - "offset": 79, + "offset": 78, "line": 2, - "column": 21 + "column": 20 }, "endPos": { - "offset": 85, + "offset": 84, "line": 2, - "column": 27 + "column": 26 }, "value": "unique", "leadingTrivia": [], @@ -1180,8 +1158,8 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 79, - "end": 85 + "start": 78, + "end": 84 } ] } @@ -1191,14 +1169,14 @@ "listCloseBracket": { "kind": "", "startPos": { - "offset": 85, + "offset": 84, "line": 2, - "column": 27 + "column": 26 }, "endPos": { - "offset": 86, + "offset": 85, "line": 2, - "column": 28 + "column": 27 }, "value": "]", "leadingTrivia": [], @@ -1206,12 +1184,12 @@ { "kind": "", "startPos": { - "offset": 87, + "offset": 86, "line": 2, - "column": 29 + "column": 28 }, "endPos": { - "offset": 88, + "offset": 87, "line": 3, "column": 0 }, @@ -1221,15 +1199,15 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 87, - "end": 88 + "start": 86, + "end": 87 } ], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 85, - "end": 86 + "start": 84, + "end": 85 } } ] @@ -1238,12 +1216,12 @@ "blockCloseBrace": { "kind": "", "startPos": { - "offset": 88, + "offset": 87, "line": 3, "column": 0 }, "endPos": { - "offset": 89, + "offset": 88, "line": 3, "column": 1 }, @@ -1253,12 +1231,12 @@ { "kind": "", "startPos": { - "offset": 90, + "offset": 89, "line": 3, "column": 2 }, "endPos": { - "offset": 91, + "offset": 90, "line": 4, "column": 0 }, @@ -1268,15 +1246,15 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 90, - "end": 91 + "start": 89, + "end": 90 } ], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 88, - "end": 89 + "start": 87, + "end": 88 } } } @@ -1284,12 +1262,12 @@ "eof": { "kind": "", "startPos": { - "offset": 91, + "offset": 90, "line": 4, "column": 0 }, "endPos": { - "offset": 91, + "offset": 90, "line": 4, "column": 0 }, @@ -1299,9 +1277,10 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 91, - "end": 91 + "start": 90, + "end": 90 } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/last_invalid_number.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/last_invalid_number.out.json index edfd77ac1..ac698676c 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/last_invalid_number.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/last_invalid_number.out.json @@ -797,5 +797,6 @@ "end": 37, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/list_expression.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/list_expression.out.json index e89bdcf75..a936ed4bf 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/list_expression.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/list_expression.out.json @@ -2805,5 +2805,6 @@ "end": 186, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/literal_element_expression.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/literal_element_expression.out.json index eb443eb60..e8240f900 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/literal_element_expression.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/literal_element_expression.out.json @@ -2483,5 +2483,6 @@ "end": 227 } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/nested_element.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/nested_element.out.json index 1202a310c..961bf874f 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/nested_element.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/nested_element.out.json @@ -2288,5 +2288,6 @@ "end": 199 } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/old_undocumented_syntax.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/old_undocumented_syntax.out.json index d9070822e..a1075d6ba 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/old_undocumented_syntax.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/old_undocumented_syntax.out.json @@ -8490,5 +8490,6 @@ "end": 632 } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/partial_injection.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/partial_injection.out.json index 0878681d7..bade05be7 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/partial_injection.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/partial_injection.out.json @@ -955,5 +955,6 @@ "end": 84 } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/ref_setting.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/ref_setting.out.json index bf1f3c9d6..d78c16df4 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/ref_setting.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/ref_setting.out.json @@ -2822,5 +2822,6 @@ "end": 188 } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/trailing_comments.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/trailing_comments.out.json index d03ccbbf4..560e7e571 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/trailing_comments.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/trailing_comments.out.json @@ -5236,5 +5236,6 @@ "end": 396 } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/parser/output/tuple_expression.out.json b/packages/dbml-parse/__tests__/snapshots/parser/output/tuple_expression.out.json index 7cd477ed2..747e283c4 100644 --- a/packages/dbml-parse/__tests__/snapshots/parser/output/tuple_expression.out.json +++ b/packages/dbml-parse/__tests__/snapshots/parser/output/tuple_expression.out.json @@ -3295,5 +3295,6 @@ "end": 75, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/alias_of_duplicated_names.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/alias_of_duplicated_names.out.json index cdb2d41ff..e54d2f56e 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/alias_of_duplicated_names.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/alias_of_duplicated_names.out.json @@ -1522,5 +1522,6 @@ "end": 70, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/checks.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/checks.out.json index b55bb7193..da1fb5710 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/checks.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/checks.out.json @@ -5595,5 +5595,6 @@ "end": 364, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/column_caller_type.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/column_caller_type.out.json index 611526668..4325690a3 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/column_caller_type.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/column_caller_type.out.json @@ -2176,5 +2176,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/complex_indexes.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/complex_indexes.out.json index 5a63b348b..4e736db4b 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/complex_indexes.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/complex_indexes.out.json @@ -9171,5 +9171,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/complex_names.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/complex_names.out.json index 1ef823b4e..6edc53c1b 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/complex_names.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/complex_names.out.json @@ -6961,5 +6961,6 @@ "end": 200, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_alias_name.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_alias_name.out.json index 029c624c5..c359cb683 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_alias_name.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_alias_name.out.json @@ -3422,5 +3422,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_columns.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_columns.out.json index 3b073b7bd..25fcbe730 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_columns.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_columns.out.json @@ -4341,5 +4341,6 @@ "end": 106, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_enum_field.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_enum_field.out.json index 9f7056ea3..960b49d87 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_enum_field.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_enum_field.out.json @@ -1601,5 +1601,6 @@ "end": 24, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_names.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_names.out.json index e23e4d4f2..7e61a148c 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_names.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_names.out.json @@ -3863,5 +3863,6 @@ "end": 215, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_table_partial_injections.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_table_partial_injections.out.json index 6e03d5e2d..d9419340e 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_table_partial_injections.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/duplicate_table_partial_injections.out.json @@ -2327,5 +2327,6 @@ "end": 75, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/enum.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/enum.out.json index f8fb1c675..20e6474ba 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/enum.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/enum.out.json @@ -3532,5 +3532,6 @@ "end": 52, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/enum_as_default_column_value.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/enum_as_default_column_value.out.json index 5476496a0..4008e0ada 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/enum_as_default_column_value.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/enum_as_default_column_value.out.json @@ -7574,5 +7574,6 @@ "end": 542, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/erroneous.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/erroneous.out.json index 255a77c53..6ec23d0bb 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/erroneous.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/erroneous.out.json @@ -6011,5 +6011,6 @@ "end": 215, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/invalid_args.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/invalid_args.out.json index 4bbb619ed..3946ea630 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/invalid_args.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/invalid_args.out.json @@ -8543,5 +8543,6 @@ "end": 289, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/multiple_notes_in_table_group.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/multiple_notes_in_table_group.out.json index a48745ce8..cb1ee5337 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/multiple_notes_in_table_group.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/multiple_notes_in_table_group.out.json @@ -2930,5 +2930,6 @@ "end": 259, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/negative_number.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/negative_number.out.json index 1bbd9204e..b4cec8865 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/negative_number.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/negative_number.out.json @@ -1,6 +1,6 @@ { "value": { - "id": 143, + "id": 141, "kind": "", "startPos": { "offset": 0, @@ -18,7 +18,7 @@ "end": 292, "body": [ { - "id": 62, + "id": 61, "kind": "", "startPos": { "offset": 0, @@ -157,7 +157,7 @@ } }, "body": { - "id": 61, + "id": 60, "kind": "", "startPos": { "offset": 8, @@ -1660,7 +1660,7 @@ "symbol": 3 }, { - "id": 60, + "id": 59, "kind": "", "startPos": { "offset": 69, @@ -1800,8 +1800,8 @@ }, "args": [ { - "id": 50, - "kind": "", + "id": 39, + "kind": "", "startPos": { "offset": 73, "line": 3, @@ -1809,16 +1809,16 @@ }, "fullStart": 73, "endPos": { - "offset": 89, + "offset": 76, "line": 3, - "column": 22 + "column": 9 }, - "fullEnd": 90, + "fullEnd": 77, "start": 73, - "end": 89, - "callee": { - "id": 39, - "kind": "", + "end": 76, + "expression": { + "id": 38, + "kind": "", "startPos": { "offset": 73, "line": 3, @@ -1833,249 +1833,270 @@ "fullEnd": 77, "start": 73, "end": 76, - "expression": { - "id": 38, - "kind": "", + "variable": { + "kind": "", "startPos": { "offset": 73, "line": 3, "column": 6 }, - "fullStart": 73, "endPos": { "offset": 76, "line": 3, "column": 9 }, - "fullEnd": 77, + "value": "int", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 76, + "line": 3, + "column": 9 + }, + "endPos": { + "offset": 77, + "line": 3, + "column": 10 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 76, + "end": 77 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, "start": 73, - "end": 76, - "variable": { - "kind": "", - "startPos": { - "offset": 73, - "line": 3, - "column": 6 - }, - "endPos": { - "offset": 76, - "line": 3, - "column": 9 - }, - "value": "int", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 76, - "line": 3, - "column": 9 - }, - "endPos": { - "offset": 77, - "line": 3, - "column": 10 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 76, - "end": 77 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 73, - "end": 76 - } + "end": 76 } + } + }, + { + "id": 49, + "kind": "", + "startPos": { + "offset": 77, + "line": 3, + "column": 10 }, - "argumentList": { - "id": 49, - "kind": "", + "fullStart": 77, + "endPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "fullEnd": 90, + "start": 77, + "end": 89, + "tupleOpenParen": { + "kind": "", "startPos": { "offset": 77, "line": 3, "column": 10 }, - "fullStart": 77, "endPos": { - "offset": 89, + "offset": 78, "line": 3, - "column": 22 + "column": 11 }, - "fullEnd": 90, + "value": "(", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, "start": 77, - "end": 89, - "tupleOpenParen": { - "kind": "", + "end": 78 + }, + "elementList": [ + { + "id": 48, + "kind": "", "startPos": { - "offset": 77, + "offset": 78, "line": 3, - "column": 10 + "column": 11 }, + "fullStart": 78, "endPos": { - "offset": 78, + "offset": 88, "line": 3, - "column": 11 + "column": 21 }, - "value": "(", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 77, - "end": 78 - }, - "elementList": [ - { - "id": 48, - "kind": "", + "fullEnd": 88, + "start": 78, + "end": 88, + "op": { + "kind": "", "startPos": { "offset": 78, "line": 3, "column": 11 }, - "fullStart": 78, + "endPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 78, + "end": 79 + }, + "expression": { + "id": 47, + "kind": "", + "startPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "fullStart": 79, "endPos": { "offset": 88, "line": 3, "column": 21 }, "fullEnd": 88, - "start": 78, + "start": 79, "end": 88, "op": { "kind": "", "startPos": { - "offset": 78, + "offset": 79, "line": 3, - "column": 11 + "column": 12 }, "endPos": { - "offset": 79, + "offset": 80, "line": 3, - "column": 12 + "column": 13 }, - "value": "+", + "value": "-", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 78, - "end": 79 + "start": 79, + "end": 80 }, "expression": { - "id": 47, + "id": 46, "kind": "", "startPos": { - "offset": 79, + "offset": 80, "line": 3, - "column": 12 + "column": 13 }, - "fullStart": 79, + "fullStart": 80, "endPos": { "offset": 88, "line": 3, "column": 21 }, "fullEnd": 88, - "start": 79, + "start": 80, "end": 88, "op": { "kind": "", "startPos": { - "offset": 79, + "offset": 80, "line": 3, - "column": 12 + "column": 13 }, "endPos": { - "offset": 80, + "offset": 81, "line": 3, - "column": 13 + "column": 14 }, - "value": "-", + "value": "+", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 79, - "end": 80 + "start": 80, + "end": 81 }, "expression": { - "id": 46, + "id": 45, "kind": "", "startPos": { - "offset": 80, + "offset": 81, "line": 3, - "column": 13 + "column": 14 }, - "fullStart": 80, + "fullStart": 81, "endPos": { "offset": 88, "line": 3, "column": 21 }, "fullEnd": 88, - "start": 80, + "start": 81, "end": 88, "op": { "kind": "", "startPos": { - "offset": 80, + "offset": 81, "line": 3, - "column": 13 + "column": 14 }, "endPos": { - "offset": 81, + "offset": 82, "line": 3, - "column": 14 + "column": 15 }, - "value": "+", + "value": "-", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 80, - "end": 81 + "start": 81, + "end": 82 }, "expression": { - "id": 45, + "id": 44, "kind": "", "startPos": { - "offset": 81, + "offset": 82, "line": 3, - "column": 14 + "column": 15 }, - "fullStart": 81, + "fullStart": 82, "endPos": { "offset": 88, "line": 3, "column": 21 }, "fullEnd": 88, - "start": 81, + "start": 82, "end": 88, "op": { "kind": "", "startPos": { - "offset": 81, + "offset": 82, "line": 3, - "column": 14 + "column": 15 }, "endPos": { - "offset": 82, + "offset": 83, "line": 3, - "column": 15 + "column": 16 }, "value": "-", "leadingTrivia": [], @@ -2083,37 +2104,37 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 81, - "end": 82 + "start": 82, + "end": 83 }, "expression": { - "id": 44, + "id": 43, "kind": "", "startPos": { - "offset": 82, + "offset": 83, "line": 3, - "column": 15 + "column": 16 }, - "fullStart": 82, + "fullStart": 83, "endPos": { "offset": 88, "line": 3, "column": 21 }, "fullEnd": 88, - "start": 82, + "start": 83, "end": 88, "op": { "kind": "", "startPos": { - "offset": 82, + "offset": 83, "line": 3, - "column": 15 + "column": 16 }, "endPos": { - "offset": 83, + "offset": 84, "line": 3, - "column": 16 + "column": 17 }, "value": "-", "leadingTrivia": [], @@ -2121,88 +2142,67 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 82, - "end": 83 + "start": 83, + "end": 84 }, "expression": { - "id": 43, + "id": 42, "kind": "", "startPos": { - "offset": 83, + "offset": 84, "line": 3, - "column": 16 + "column": 17 }, - "fullStart": 83, + "fullStart": 84, "endPos": { "offset": 88, "line": 3, "column": 21 }, "fullEnd": 88, - "start": 83, + "start": 84, "end": 88, "op": { "kind": "", "startPos": { - "offset": 83, + "offset": 84, "line": 3, - "column": 16 + "column": 17 }, "endPos": { - "offset": 84, + "offset": 85, "line": 3, - "column": 17 + "column": 18 }, - "value": "-", + "value": "+", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 83, - "end": 84 + "start": 84, + "end": 85 }, "expression": { - "id": 42, - "kind": "", + "id": 41, + "kind": "", "startPos": { - "offset": 84, + "offset": 85, "line": 3, - "column": 17 + "column": 18 }, - "fullStart": 84, + "fullStart": 85, "endPos": { "offset": 88, "line": 3, "column": 21 }, "fullEnd": 88, - "start": 84, + "start": 85, "end": 88, - "op": { - "kind": "", - "startPos": { - "offset": 84, - "line": 3, - "column": 17 - }, - "endPos": { - "offset": 85, - "line": 3, - "column": 18 - }, - "value": "+", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 84, - "end": 85 - }, "expression": { - "id": 41, - "kind": "", + "id": 40, + "kind": "", "startPos": { "offset": 85, "line": 3, @@ -2217,44 +2217,26 @@ "fullEnd": 88, "start": 85, "end": 88, - "expression": { - "id": 40, - "kind": "", + "literal": { + "kind": "", "startPos": { "offset": 85, "line": 3, "column": 18 }, - "fullStart": 85, "endPos": { "offset": 88, "line": 3, "column": 21 }, - "fullEnd": 88, + "value": "0.1", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, "start": 85, - "end": 88, - "literal": { - "kind": "", - "startPos": { - "offset": 85, - "line": 3, - "column": 18 - }, - "endPos": { - "offset": 88, - "line": 3, - "column": 21 - }, - "value": "0.1", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 85, - "end": 88 - } + "end": 88 } } } @@ -2264,55 +2246,55 @@ } } } - ], - "commaList": [], - "tupleCloseParen": { - "kind": "", - "startPos": { - "offset": 88, - "line": 3, - "column": 21 - }, - "endPos": { - "offset": 89, - "line": 3, - "column": 22 - }, - "value": ")", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 89, - "line": 3, - "column": 22 - }, - "endPos": { - "offset": 90, - "line": 3, - "column": 23 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 89, - "end": 90 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 88, - "end": 89 } - } - }, + ], + "commaList": [], + "tupleCloseParen": { + "kind": "", + "startPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "endPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "value": ")", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "endPos": { + "offset": 90, + "line": 3, + "column": 23 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 89, + "end": 90 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 88, + "end": 89 + } + }, { - "id": 59, + "id": 58, "kind": "", "startPos": { "offset": 90, @@ -2351,7 +2333,7 @@ }, "elementList": [ { - "id": 58, + "id": 57, "kind": "", "startPos": { "offset": 91, @@ -2368,7 +2350,7 @@ "start": 91, "end": 110, "name": { - "id": 51, + "id": 50, "kind": "", "startPos": { "offset": 91, @@ -2409,7 +2391,7 @@ ] }, "value": { - "id": 57, + "id": 56, "kind": "", "startPos": { "offset": 100, @@ -2447,7 +2429,7 @@ "end": 101 }, "expression": { - "id": 56, + "id": 55, "kind": "", "startPos": { "offset": 101, @@ -2485,7 +2467,7 @@ "end": 102 }, "expression": { - "id": 55, + "id": 54, "kind": "", "startPos": { "offset": 102, @@ -2523,7 +2505,7 @@ "end": 103 }, "expression": { - "id": 54, + "id": 53, "kind": "", "startPos": { "offset": 103, @@ -2561,7 +2543,7 @@ "end": 104 }, "expression": { - "id": 53, + "id": 52, "kind": "", "startPos": { "offset": 104, @@ -2578,7 +2560,7 @@ "start": 104, "end": 110, "expression": { - "id": 52, + "id": 51, "kind": "", "startPos": { "offset": 104, @@ -2801,11 +2783,11 @@ "end": 125 } }, - "parent": 143, + "parent": 141, "symbol": 1 }, { - "id": 129, + "id": 127, "kind": "", "startPos": { "offset": 127, @@ -2887,7 +2869,7 @@ "end": 139 }, "name": { - "id": 64, + "id": 63, "kind": "", "startPos": { "offset": 140, @@ -2904,7 +2886,7 @@ "start": 140, "end": 142, "expression": { - "id": 63, + "id": 62, "kind": "", "startPos": { "offset": 140, @@ -2966,7 +2948,7 @@ } }, "body": { - "id": 128, + "id": 126, "kind": "", "startPos": { "offset": 143, @@ -3027,7 +3009,7 @@ }, "body": [ { - "id": 80, + "id": 79, "kind": "", "startPos": { "offset": 147, @@ -3044,7 +3026,7 @@ "start": 147, "end": 171, "callee": { - "id": 66, + "id": 65, "kind": "", "startPos": { "offset": 147, @@ -3061,7 +3043,7 @@ "start": 147, "end": 149, "expression": { - "id": 65, + "id": 64, "kind": "", "startPos": { "offset": 147, @@ -3167,7 +3149,7 @@ }, "args": [ { - "id": 73, + "id": 72, "kind": "", "startPos": { "offset": 150, @@ -3184,7 +3166,7 @@ "start": 150, "end": 157, "callee": { - "id": 68, + "id": 67, "kind": "", "startPos": { "offset": 150, @@ -3201,7 +3183,7 @@ "start": 150, "end": 153, "expression": { - "id": 67, + "id": 66, "kind": "", "startPos": { "offset": 150, @@ -3241,7 +3223,7 @@ } }, "argumentList": { - "id": 72, + "id": 71, "kind": "", "startPos": { "offset": 153, @@ -3280,7 +3262,7 @@ }, "elementList": [ { - "id": 71, + "id": 70, "kind": "", "startPos": { "offset": 154, @@ -3318,7 +3300,7 @@ "end": 155 }, "expression": { - "id": 70, + "id": 69, "kind": "", "startPos": { "offset": 155, @@ -3335,7 +3317,7 @@ "start": 155, "end": 156, "expression": { - "id": 69, + "id": 68, "kind": "", "startPos": { "offset": 155, @@ -3423,7 +3405,7 @@ } }, { - "id": 79, + "id": 78, "kind": "", "startPos": { "offset": 158, @@ -3462,7 +3444,7 @@ }, "elementList": [ { - "id": 78, + "id": 77, "kind": "", "startPos": { "offset": 159, @@ -3479,7 +3461,7 @@ "start": 159, "end": 170, "name": { - "id": 74, + "id": 73, "kind": "", "startPos": { "offset": 159, @@ -3520,7 +3502,7 @@ ] }, "value": { - "id": 77, + "id": 76, "kind": "", "startPos": { "offset": 168, @@ -3558,7 +3540,7 @@ "end": 169 }, "expression": { - "id": 76, + "id": 75, "kind": "", "startPos": { "offset": 169, @@ -3575,7 +3557,7 @@ "start": 169, "end": 170, "expression": { - "id": 75, + "id": 74, "kind": "", "startPos": { "offset": 169, @@ -3709,7 +3691,7 @@ "symbol": 6 }, { - "id": 98, + "id": 97, "kind": "", "startPos": { "offset": 174, @@ -3726,7 +3708,7 @@ "start": 174, "end": 201, "callee": { - "id": 82, + "id": 81, "kind": "", "startPos": { "offset": 174, @@ -3743,7 +3725,7 @@ "start": 174, "end": 177, "expression": { - "id": 81, + "id": 80, "kind": "", "startPos": { "offset": 174, @@ -3849,7 +3831,7 @@ }, "args": [ { - "id": 90, + "id": 89, "kind": "", "startPos": { "offset": 178, @@ -3866,7 +3848,7 @@ "start": 178, "end": 186, "callee": { - "id": 84, + "id": 83, "kind": "", "startPos": { "offset": 178, @@ -3883,7 +3865,7 @@ "start": 178, "end": 181, "expression": { - "id": 83, + "id": 82, "kind": "", "startPos": { "offset": 178, @@ -3923,7 +3905,7 @@ } }, "argumentList": { - "id": 89, + "id": 88, "kind": "", "startPos": { "offset": 181, @@ -3962,7 +3944,7 @@ }, "elementList": [ { - "id": 88, + "id": 87, "kind": "", "startPos": { "offset": 182, @@ -4000,7 +3982,7 @@ "end": 183 }, "expression": { - "id": 87, + "id": 86, "kind": "", "startPos": { "offset": 183, @@ -4038,7 +4020,7 @@ "end": 184 }, "expression": { - "id": 86, + "id": 85, "kind": "", "startPos": { "offset": 184, @@ -4055,7 +4037,7 @@ "start": 184, "end": 185, "expression": { - "id": 85, + "id": 84, "kind": "", "startPos": { "offset": 184, @@ -4144,7 +4126,7 @@ } }, { - "id": 97, + "id": 96, "kind": "", "startPos": { "offset": 187, @@ -4183,7 +4165,7 @@ }, "elementList": [ { - "id": 96, + "id": 95, "kind": "", "startPos": { "offset": 188, @@ -4200,7 +4182,7 @@ "start": 188, "end": 200, "name": { - "id": 91, + "id": 90, "kind": "", "startPos": { "offset": 188, @@ -4241,7 +4223,7 @@ ] }, "value": { - "id": 95, + "id": 94, "kind": "", "startPos": { "offset": 197, @@ -4279,7 +4261,7 @@ "end": 198 }, "expression": { - "id": 94, + "id": 93, "kind": "", "startPos": { "offset": 198, @@ -4317,7 +4299,7 @@ "end": 199 }, "expression": { - "id": 93, + "id": 92, "kind": "", "startPos": { "offset": 199, @@ -4334,7 +4316,7 @@ "start": 199, "end": 200, "expression": { - "id": 92, + "id": 91, "kind": "", "startPos": { "offset": 199, @@ -4469,7 +4451,7 @@ "symbol": 7 }, { - "id": 127, + "id": 125, "kind": "", "startPos": { "offset": 204, @@ -4486,7 +4468,7 @@ "start": 204, "end": 250, "callee": { - "id": 100, + "id": 99, "kind": "", "startPos": { "offset": 204, @@ -4503,7 +4485,7 @@ "start": 204, "end": 207, "expression": { - "id": 99, + "id": 98, "kind": "", "startPos": { "offset": 204, @@ -4609,8 +4591,8 @@ }, "args": [ { - "id": 113, - "kind": "", + "id": 101, + "kind": "", "startPos": { "offset": 208, "line": 9, @@ -4618,16 +4600,16 @@ }, "fullStart": 208, "endPos": { - "offset": 224, + "offset": 211, "line": 9, - "column": 22 + "column": 9 }, - "fullEnd": 225, + "fullEnd": 212, "start": 208, - "end": 224, - "callee": { - "id": 102, - "kind": "", + "end": 211, + "expression": { + "id": 100, + "kind": "", "startPos": { "offset": 208, "line": 9, @@ -4642,249 +4624,270 @@ "fullEnd": 212, "start": 208, "end": 211, - "expression": { - "id": 101, - "kind": "", + "variable": { + "kind": "", "startPos": { "offset": 208, "line": 9, "column": 6 }, - "fullStart": 208, "endPos": { "offset": 211, "line": 9, "column": 9 }, - "fullEnd": 212, + "value": "int", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 211, + "line": 9, + "column": 9 + }, + "endPos": { + "offset": 212, + "line": 9, + "column": 10 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 211, + "end": 212 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, "start": 208, - "end": 211, - "variable": { - "kind": "", - "startPos": { - "offset": 208, - "line": 9, - "column": 6 - }, - "endPos": { - "offset": 211, - "line": 9, - "column": 9 - }, - "value": "int", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 211, - "line": 9, - "column": 9 - }, - "endPos": { - "offset": 212, - "line": 9, - "column": 10 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 211, - "end": 212 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 208, - "end": 211 - } + "end": 211 } + } + }, + { + "id": 111, + "kind": "", + "startPos": { + "offset": 212, + "line": 9, + "column": 10 }, - "argumentList": { - "id": 112, - "kind": "", + "fullStart": 212, + "endPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "fullEnd": 225, + "start": 212, + "end": 224, + "tupleOpenParen": { + "kind": "", "startPos": { "offset": 212, "line": 9, "column": 10 }, - "fullStart": 212, "endPos": { - "offset": 224, + "offset": 213, "line": 9, - "column": 22 + "column": 11 }, - "fullEnd": 225, + "value": "(", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, "start": 212, - "end": 224, - "tupleOpenParen": { - "kind": "", + "end": 213 + }, + "elementList": [ + { + "id": 110, + "kind": "", "startPos": { - "offset": 212, + "offset": 213, "line": 9, - "column": 10 + "column": 11 }, + "fullStart": 213, "endPos": { - "offset": 213, + "offset": 223, "line": 9, - "column": 11 + "column": 21 }, - "value": "(", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 212, - "end": 213 - }, - "elementList": [ - { - "id": 111, - "kind": "", + "fullEnd": 223, + "start": 213, + "end": 223, + "op": { + "kind": "", "startPos": { "offset": 213, "line": 9, "column": 11 }, - "fullStart": 213, + "endPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 213, + "end": 214 + }, + "expression": { + "id": 109, + "kind": "", + "startPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "fullStart": 214, "endPos": { "offset": 223, "line": 9, "column": 21 }, "fullEnd": 223, - "start": 213, + "start": 214, "end": 223, "op": { "kind": "", "startPos": { - "offset": 213, + "offset": 214, "line": 9, - "column": 11 + "column": 12 }, "endPos": { - "offset": 214, + "offset": 215, "line": 9, - "column": 12 + "column": 13 }, - "value": "+", + "value": "-", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 213, - "end": 214 + "start": 214, + "end": 215 }, "expression": { - "id": 110, + "id": 108, "kind": "", "startPos": { - "offset": 214, + "offset": 215, "line": 9, - "column": 12 + "column": 13 }, - "fullStart": 214, + "fullStart": 215, "endPos": { "offset": 223, "line": 9, "column": 21 }, "fullEnd": 223, - "start": 214, + "start": 215, "end": 223, "op": { "kind": "", "startPos": { - "offset": 214, + "offset": 215, "line": 9, - "column": 12 + "column": 13 }, "endPos": { - "offset": 215, + "offset": 216, "line": 9, - "column": 13 + "column": 14 }, - "value": "-", + "value": "+", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 214, - "end": 215 + "start": 215, + "end": 216 }, "expression": { - "id": 109, + "id": 107, "kind": "", "startPos": { - "offset": 215, + "offset": 216, "line": 9, - "column": 13 + "column": 14 }, - "fullStart": 215, + "fullStart": 216, "endPos": { "offset": 223, "line": 9, "column": 21 }, "fullEnd": 223, - "start": 215, + "start": 216, "end": 223, "op": { "kind": "", "startPos": { - "offset": 215, + "offset": 216, "line": 9, - "column": 13 + "column": 14 }, "endPos": { - "offset": 216, + "offset": 217, "line": 9, - "column": 14 + "column": 15 }, - "value": "+", + "value": "-", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 215, - "end": 216 + "start": 216, + "end": 217 }, "expression": { - "id": 108, + "id": 106, "kind": "", "startPos": { - "offset": 216, + "offset": 217, "line": 9, - "column": 14 + "column": 15 }, - "fullStart": 216, + "fullStart": 217, "endPos": { "offset": 223, "line": 9, "column": 21 }, "fullEnd": 223, - "start": 216, + "start": 217, "end": 223, "op": { "kind": "", "startPos": { - "offset": 216, + "offset": 217, "line": 9, - "column": 14 + "column": 15 }, "endPos": { - "offset": 217, + "offset": 218, "line": 9, - "column": 15 + "column": 16 }, "value": "-", "leadingTrivia": [], @@ -4892,37 +4895,37 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 216, - "end": 217 + "start": 217, + "end": 218 }, "expression": { - "id": 107, + "id": 105, "kind": "", "startPos": { - "offset": 217, + "offset": 218, "line": 9, - "column": 15 + "column": 16 }, - "fullStart": 217, + "fullStart": 218, "endPos": { "offset": 223, "line": 9, "column": 21 }, "fullEnd": 223, - "start": 217, + "start": 218, "end": 223, "op": { "kind": "", "startPos": { - "offset": 217, + "offset": 218, "line": 9, - "column": 15 + "column": 16 }, "endPos": { - "offset": 218, + "offset": 219, "line": 9, - "column": 16 + "column": 17 }, "value": "-", "leadingTrivia": [], @@ -4930,88 +4933,67 @@ "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 217, - "end": 218 + "start": 218, + "end": 219 }, "expression": { - "id": 106, + "id": 104, "kind": "", "startPos": { - "offset": 218, + "offset": 219, "line": 9, - "column": 16 + "column": 17 }, - "fullStart": 218, + "fullStart": 219, "endPos": { "offset": 223, "line": 9, "column": 21 }, "fullEnd": 223, - "start": 218, + "start": 219, "end": 223, "op": { "kind": "", "startPos": { - "offset": 218, + "offset": 219, "line": 9, - "column": 16 + "column": 17 }, "endPos": { - "offset": 219, + "offset": 220, "line": 9, - "column": 17 + "column": 18 }, - "value": "-", + "value": "+", "leadingTrivia": [], "trailingTrivia": [], "leadingInvalid": [], "trailingInvalid": [], "isInvalid": false, - "start": 218, - "end": 219 + "start": 219, + "end": 220 }, "expression": { - "id": 105, - "kind": "", + "id": 103, + "kind": "", "startPos": { - "offset": 219, + "offset": 220, "line": 9, - "column": 17 + "column": 18 }, - "fullStart": 219, + "fullStart": 220, "endPos": { "offset": 223, "line": 9, "column": 21 }, "fullEnd": 223, - "start": 219, + "start": 220, "end": 223, - "op": { - "kind": "", - "startPos": { - "offset": 219, - "line": 9, - "column": 17 - }, - "endPos": { - "offset": 220, - "line": 9, - "column": 18 - }, - "value": "+", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 219, - "end": 220 - }, "expression": { - "id": 104, - "kind": "", + "id": 102, + "kind": "", "startPos": { "offset": 220, "line": 9, @@ -5026,44 +5008,26 @@ "fullEnd": 223, "start": 220, "end": 223, - "expression": { - "id": 103, - "kind": "", + "literal": { + "kind": "", "startPos": { "offset": 220, "line": 9, "column": 18 }, - "fullStart": 220, "endPos": { "offset": 223, "line": 9, "column": 21 }, - "fullEnd": 223, + "value": "0.1", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, "start": 220, - "end": 223, - "literal": { - "kind": "", - "startPos": { - "offset": 220, - "line": 9, - "column": 18 - }, - "endPos": { - "offset": 223, - "line": 9, - "column": 21 - }, - "value": "0.1", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 220, - "end": 223 - } + "end": 223 } } } @@ -5073,55 +5037,55 @@ } } } - ], - "commaList": [], - "tupleCloseParen": { - "kind": "", - "startPos": { - "offset": 223, - "line": 9, - "column": 21 - }, - "endPos": { - "offset": 224, - "line": 9, - "column": 22 - }, - "value": ")", - "leadingTrivia": [], - "trailingTrivia": [ - { - "kind": "", - "startPos": { - "offset": 224, - "line": 9, - "column": 22 - }, - "endPos": { - "offset": 225, - "line": 9, - "column": 23 - }, - "value": " ", - "leadingTrivia": [], - "trailingTrivia": [], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 224, - "end": 225 - } - ], - "leadingInvalid": [], - "trailingInvalid": [], - "isInvalid": false, - "start": 223, - "end": 224 } + ], + "commaList": [], + "tupleCloseParen": { + "kind": "", + "startPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "endPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "value": ")", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "endPos": { + "offset": 225, + "line": 9, + "column": 23 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 224, + "end": 225 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 223, + "end": 224 } }, { - "id": 126, + "id": 124, "kind": "", "startPos": { "offset": 225, @@ -5160,7 +5124,7 @@ }, "elementList": [ { - "id": 125, + "id": 123, "kind": "", "startPos": { "offset": 226, @@ -5177,7 +5141,7 @@ "start": 226, "end": 249, "name": { - "id": 114, + "id": 112, "kind": "", "startPos": { "offset": 226, @@ -5218,7 +5182,7 @@ ] }, "value": { - "id": 124, + "id": 122, "kind": "", "startPos": { "offset": 235, @@ -5256,7 +5220,7 @@ "end": 236 }, "expression": { - "id": 123, + "id": 121, "kind": "", "startPos": { "offset": 236, @@ -5294,7 +5258,7 @@ "end": 237 }, "expression": { - "id": 122, + "id": 120, "kind": "", "startPos": { "offset": 237, @@ -5332,7 +5296,7 @@ "end": 238 }, "expression": { - "id": 121, + "id": 119, "kind": "", "startPos": { "offset": 238, @@ -5370,7 +5334,7 @@ "end": 239 }, "expression": { - "id": 120, + "id": 118, "kind": "", "startPos": { "offset": 239, @@ -5408,7 +5372,7 @@ "end": 240 }, "expression": { - "id": 119, + "id": 117, "kind": "", "startPos": { "offset": 240, @@ -5446,7 +5410,7 @@ "end": 241 }, "expression": { - "id": 118, + "id": 116, "kind": "", "startPos": { "offset": 241, @@ -5484,7 +5448,7 @@ "end": 242 }, "expression": { - "id": 117, + "id": 115, "kind": "", "startPos": { "offset": 242, @@ -5522,7 +5486,7 @@ "end": 243 }, "expression": { - "id": 116, + "id": 114, "kind": "", "startPos": { "offset": 243, @@ -5539,7 +5503,7 @@ "start": 243, "end": 249, "expression": { - "id": 115, + "id": 113, "kind": "", "startPos": { "offset": 243, @@ -5766,11 +5730,11 @@ "end": 264 } }, - "parent": 143, + "parent": 141, "symbol": 5 }, { - "id": 142, + "id": 140, "kind": "", "startPos": { "offset": 266, @@ -5852,7 +5816,7 @@ "end": 271 }, "name": { - "id": 131, + "id": 129, "kind": "", "startPos": { "offset": 272, @@ -5869,7 +5833,7 @@ "start": 272, "end": 273, "expression": { - "id": 130, + "id": 128, "kind": "", "startPos": { "offset": 272, @@ -5931,7 +5895,7 @@ } }, "body": { - "id": 141, + "id": 139, "kind": "", "startPos": { "offset": 274, @@ -5992,7 +5956,7 @@ }, "body": [ { - "id": 136, + "id": 134, "kind": "", "startPos": { "offset": 278, @@ -6009,7 +5973,7 @@ "start": 278, "end": 284, "callee": { - "id": 133, + "id": 131, "kind": "", "startPos": { "offset": 278, @@ -6026,7 +5990,7 @@ "start": 278, "end": 280, "expression": { - "id": 132, + "id": 130, "kind": "", "startPos": { "offset": 278, @@ -6132,7 +6096,7 @@ }, "args": [ { - "id": 135, + "id": 133, "kind": "", "startPos": { "offset": 281, @@ -6149,7 +6113,7 @@ "start": 281, "end": 284, "expression": { - "id": 134, + "id": 132, "kind": "", "startPos": { "offset": 281, @@ -6214,7 +6178,7 @@ "symbol": 10 }, { - "id": 140, + "id": 138, "kind": "", "startPos": { "offset": 287, @@ -6231,7 +6195,7 @@ "start": 287, "end": 290, "callee": { - "id": 139, + "id": 137, "kind": "", "startPos": { "offset": 287, @@ -6312,7 +6276,7 @@ "end": 288 }, "expression": { - "id": 138, + "id": 136, "kind": "", "startPos": { "offset": 288, @@ -6329,7 +6293,7 @@ "start": 288, "end": 290, "expression": { - "id": 137, + "id": 135, "kind": "", "startPos": { "offset": 288, @@ -6416,7 +6380,7 @@ "end": 292 } }, - "parent": 143, + "parent": 141, "symbol": 9 } ], @@ -6460,10 +6424,10 @@ "Column:id3": { "references": [], "id": 4, - "declaration": 60 + "declaration": 59 } }, - "declaration": 62 + "declaration": 61 }, "TablePartial:P1": { "references": [], @@ -6472,20 +6436,20 @@ "Column:id": { "references": [], "id": 6, - "declaration": 80 + "declaration": 79 }, "Column:id2": { "references": [], "id": 7, - "declaration": 98 + "declaration": 97 }, "Column:id3": { "references": [], "id": 8, - "declaration": 127 + "declaration": 125 } }, - "declaration": 129 + "declaration": 127 }, "Table:b": { "references": [], @@ -6494,21 +6458,1921 @@ "Column:id": { "references": [], "id": 10, - "declaration": 136 + "declaration": 134 }, "PartialInjection:P1": { "references": [], "id": 11, "symbolTable": {}, - "declaration": 140 + "declaration": 138 } }, - "declaration": 142 + "declaration": 140 } }, "id": 0, "references": [] } }, - "errors": [] + "errors": [ + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 49, + "kind": "", + "startPos": { + "offset": 77, + "line": 3, + "column": 10 + }, + "fullStart": 77, + "endPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "fullEnd": 90, + "start": 77, + "end": 89, + "tupleOpenParen": { + "kind": "", + "startPos": { + "offset": 77, + "line": 3, + "column": 10 + }, + "endPos": { + "offset": 78, + "line": 3, + "column": 11 + }, + "value": "(", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 77, + "end": 78 + }, + "elementList": [ + { + "id": 48, + "kind": "", + "startPos": { + "offset": 78, + "line": 3, + "column": 11 + }, + "fullStart": 78, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 78, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 78, + "line": 3, + "column": 11 + }, + "endPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 78, + "end": 79 + }, + "expression": { + "id": 47, + "kind": "", + "startPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "fullStart": 79, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 79, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 79, + "line": 3, + "column": 12 + }, + "endPos": { + "offset": 80, + "line": 3, + "column": 13 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 79, + "end": 80 + }, + "expression": { + "id": 46, + "kind": "", + "startPos": { + "offset": 80, + "line": 3, + "column": 13 + }, + "fullStart": 80, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 80, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 80, + "line": 3, + "column": 13 + }, + "endPos": { + "offset": 81, + "line": 3, + "column": 14 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 80, + "end": 81 + }, + "expression": { + "id": 45, + "kind": "", + "startPos": { + "offset": 81, + "line": 3, + "column": 14 + }, + "fullStart": 81, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 81, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 81, + "line": 3, + "column": 14 + }, + "endPos": { + "offset": 82, + "line": 3, + "column": 15 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 81, + "end": 82 + }, + "expression": { + "id": 44, + "kind": "", + "startPos": { + "offset": 82, + "line": 3, + "column": 15 + }, + "fullStart": 82, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 82, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 82, + "line": 3, + "column": 15 + }, + "endPos": { + "offset": 83, + "line": 3, + "column": 16 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 82, + "end": 83 + }, + "expression": { + "id": 43, + "kind": "", + "startPos": { + "offset": 83, + "line": 3, + "column": 16 + }, + "fullStart": 83, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 83, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 83, + "line": 3, + "column": 16 + }, + "endPos": { + "offset": 84, + "line": 3, + "column": 17 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 83, + "end": 84 + }, + "expression": { + "id": 42, + "kind": "", + "startPos": { + "offset": 84, + "line": 3, + "column": 17 + }, + "fullStart": 84, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 84, + "end": 88, + "op": { + "kind": "", + "startPos": { + "offset": 84, + "line": 3, + "column": 17 + }, + "endPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 84, + "end": 85 + }, + "expression": { + "id": 41, + "kind": "", + "startPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "fullStart": 85, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 85, + "end": 88, + "expression": { + "id": 40, + "kind": "", + "startPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "fullStart": 85, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "fullEnd": 88, + "start": 85, + "end": 88, + "literal": { + "kind": "", + "startPos": { + "offset": 85, + "line": 3, + "column": 18 + }, + "endPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "value": "0.1", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 85, + "end": 88 + } + } + } + } + } + } + } + } + } + } + ], + "commaList": [], + "tupleCloseParen": { + "kind": "", + "startPos": { + "offset": 88, + "line": 3, + "column": 21 + }, + "endPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "value": ")", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 89, + "line": 3, + "column": 22 + }, + "endPos": { + "offset": 90, + "line": 3, + "column": 23 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 89, + "end": 90 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 88, + "end": 89 + } + }, + "start": 77, + "end": 89, + "name": "CompileError" + }, + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 58, + "kind": "", + "startPos": { + "offset": 90, + "line": 3, + "column": 23 + }, + "fullStart": 90, + "endPos": { + "offset": 111, + "line": 3, + "column": 44 + }, + "fullEnd": 124, + "start": 90, + "end": 111, + "listOpenBracket": { + "kind": "", + "startPos": { + "offset": 90, + "line": 3, + "column": 23 + }, + "endPos": { + "offset": 91, + "line": 3, + "column": 24 + }, + "value": "[", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 90, + "end": 91 + }, + "elementList": [ + { + "id": 57, + "kind": "", + "startPos": { + "offset": 91, + "line": 3, + "column": 24 + }, + "fullStart": 91, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 91, + "end": 110, + "name": { + "id": 50, + "kind": "", + "startPos": { + "offset": 91, + "line": 3, + "column": 24 + }, + "fullStart": 91, + "endPos": { + "offset": 98, + "line": 3, + "column": 31 + }, + "fullEnd": 98, + "start": 91, + "end": 98, + "identifiers": [ + { + "kind": "", + "startPos": { + "offset": 91, + "line": 3, + "column": 24 + }, + "endPos": { + "offset": 98, + "line": 3, + "column": 31 + }, + "value": "default", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 91, + "end": 98 + } + ] + }, + "value": { + "id": 56, + "kind": "", + "startPos": { + "offset": 100, + "line": 3, + "column": 33 + }, + "fullStart": 100, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 100, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 100, + "line": 3, + "column": 33 + }, + "endPos": { + "offset": 101, + "line": 3, + "column": 34 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 100, + "end": 101 + }, + "expression": { + "id": 55, + "kind": "", + "startPos": { + "offset": 101, + "line": 3, + "column": 34 + }, + "fullStart": 101, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 101, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 101, + "line": 3, + "column": 34 + }, + "endPos": { + "offset": 102, + "line": 3, + "column": 35 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 101, + "end": 102 + }, + "expression": { + "id": 54, + "kind": "", + "startPos": { + "offset": 102, + "line": 3, + "column": 35 + }, + "fullStart": 102, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 102, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 102, + "line": 3, + "column": 35 + }, + "endPos": { + "offset": 103, + "line": 3, + "column": 36 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 102, + "end": 103 + }, + "expression": { + "id": 53, + "kind": "", + "startPos": { + "offset": 103, + "line": 3, + "column": 36 + }, + "fullStart": 103, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 103, + "end": 110, + "op": { + "kind": "", + "startPos": { + "offset": 103, + "line": 3, + "column": 36 + }, + "endPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 103, + "end": 104 + }, + "expression": { + "id": 52, + "kind": "", + "startPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "fullStart": 104, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 104, + "end": 110, + "expression": { + "id": 51, + "kind": "", + "startPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "fullStart": 104, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "fullEnd": 110, + "start": 104, + "end": 110, + "literal": { + "kind": "", + "startPos": { + "offset": 104, + "line": 3, + "column": 37 + }, + "endPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "value": "7.2225", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 104, + "end": 110 + } + } + } + } + } + } + }, + "colon": { + "kind": "", + "startPos": { + "offset": 98, + "line": 3, + "column": 31 + }, + "endPos": { + "offset": 99, + "line": 3, + "column": 32 + }, + "value": ":", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 99, + "line": 3, + "column": 32 + }, + "endPos": { + "offset": 100, + "line": 3, + "column": 33 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 99, + "end": 100 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 98, + "end": 99 + } + } + ], + "commaList": [], + "listCloseBracket": { + "kind": "", + "startPos": { + "offset": 110, + "line": 3, + "column": 43 + }, + "endPos": { + "offset": 111, + "line": 3, + "column": 44 + }, + "value": "]", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 111, + "line": 3, + "column": 44 + }, + "endPos": { + "offset": 112, + "line": 3, + "column": 45 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 111, + "end": 112 + }, + { + "kind": "", + "startPos": { + "offset": 112, + "line": 3, + "column": 45 + }, + "endPos": { + "offset": 123, + "line": 3, + "column": 56 + }, + "value": " positive", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 112, + "end": 123 + }, + { + "kind": "", + "startPos": { + "offset": 123, + "line": 3, + "column": 56 + }, + "endPos": { + "offset": 124, + "line": 4, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 123, + "end": 124 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 110, + "end": 111 + } + }, + "start": 90, + "end": 111, + "name": "CompileError" + }, + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 111, + "kind": "", + "startPos": { + "offset": 212, + "line": 9, + "column": 10 + }, + "fullStart": 212, + "endPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "fullEnd": 225, + "start": 212, + "end": 224, + "tupleOpenParen": { + "kind": "", + "startPos": { + "offset": 212, + "line": 9, + "column": 10 + }, + "endPos": { + "offset": 213, + "line": 9, + "column": 11 + }, + "value": "(", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 212, + "end": 213 + }, + "elementList": [ + { + "id": 110, + "kind": "", + "startPos": { + "offset": 213, + "line": 9, + "column": 11 + }, + "fullStart": 213, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 213, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 213, + "line": 9, + "column": 11 + }, + "endPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 213, + "end": 214 + }, + "expression": { + "id": 109, + "kind": "", + "startPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "fullStart": 214, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 214, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 214, + "line": 9, + "column": 12 + }, + "endPos": { + "offset": 215, + "line": 9, + "column": 13 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 214, + "end": 215 + }, + "expression": { + "id": 108, + "kind": "", + "startPos": { + "offset": 215, + "line": 9, + "column": 13 + }, + "fullStart": 215, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 215, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 215, + "line": 9, + "column": 13 + }, + "endPos": { + "offset": 216, + "line": 9, + "column": 14 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 215, + "end": 216 + }, + "expression": { + "id": 107, + "kind": "", + "startPos": { + "offset": 216, + "line": 9, + "column": 14 + }, + "fullStart": 216, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 216, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 216, + "line": 9, + "column": 14 + }, + "endPos": { + "offset": 217, + "line": 9, + "column": 15 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 216, + "end": 217 + }, + "expression": { + "id": 106, + "kind": "", + "startPos": { + "offset": 217, + "line": 9, + "column": 15 + }, + "fullStart": 217, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 217, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 217, + "line": 9, + "column": 15 + }, + "endPos": { + "offset": 218, + "line": 9, + "column": 16 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 217, + "end": 218 + }, + "expression": { + "id": 105, + "kind": "", + "startPos": { + "offset": 218, + "line": 9, + "column": 16 + }, + "fullStart": 218, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 218, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 218, + "line": 9, + "column": 16 + }, + "endPos": { + "offset": 219, + "line": 9, + "column": 17 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 218, + "end": 219 + }, + "expression": { + "id": 104, + "kind": "", + "startPos": { + "offset": 219, + "line": 9, + "column": 17 + }, + "fullStart": 219, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 219, + "end": 223, + "op": { + "kind": "", + "startPos": { + "offset": 219, + "line": 9, + "column": 17 + }, + "endPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 219, + "end": 220 + }, + "expression": { + "id": 103, + "kind": "", + "startPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "fullStart": 220, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 220, + "end": 223, + "expression": { + "id": 102, + "kind": "", + "startPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "fullStart": 220, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "fullEnd": 223, + "start": 220, + "end": 223, + "literal": { + "kind": "", + "startPos": { + "offset": 220, + "line": 9, + "column": 18 + }, + "endPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "value": "0.1", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 220, + "end": 223 + } + } + } + } + } + } + } + } + } + } + ], + "commaList": [], + "tupleCloseParen": { + "kind": "", + "startPos": { + "offset": 223, + "line": 9, + "column": 21 + }, + "endPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "value": ")", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 224, + "line": 9, + "column": 22 + }, + "endPos": { + "offset": 225, + "line": 9, + "column": 23 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 224, + "end": 225 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 223, + "end": 224 + } + }, + "start": 212, + "end": 224, + "name": "CompileError" + }, + { + "code": 3019, + "diagnostic": "These fields must be some inline settings optionally ended with a setting list", + "nodeOrToken": { + "id": 124, + "kind": "", + "startPos": { + "offset": 225, + "line": 9, + "column": 23 + }, + "fullStart": 225, + "endPos": { + "offset": 250, + "line": 9, + "column": 48 + }, + "fullEnd": 263, + "start": 225, + "end": 250, + "listOpenBracket": { + "kind": "", + "startPos": { + "offset": 225, + "line": 9, + "column": 23 + }, + "endPos": { + "offset": 226, + "line": 9, + "column": 24 + }, + "value": "[", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 225, + "end": 226 + }, + "elementList": [ + { + "id": 123, + "kind": "", + "startPos": { + "offset": 226, + "line": 9, + "column": 24 + }, + "fullStart": 226, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 226, + "end": 249, + "name": { + "id": 112, + "kind": "", + "startPos": { + "offset": 226, + "line": 9, + "column": 24 + }, + "fullStart": 226, + "endPos": { + "offset": 233, + "line": 9, + "column": 31 + }, + "fullEnd": 233, + "start": 226, + "end": 233, + "identifiers": [ + { + "kind": "", + "startPos": { + "offset": 226, + "line": 9, + "column": 24 + }, + "endPos": { + "offset": 233, + "line": 9, + "column": 31 + }, + "value": "default", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 226, + "end": 233 + } + ] + }, + "value": { + "id": 122, + "kind": "", + "startPos": { + "offset": 235, + "line": 9, + "column": 33 + }, + "fullStart": 235, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 235, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 235, + "line": 9, + "column": 33 + }, + "endPos": { + "offset": 236, + "line": 9, + "column": 34 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 235, + "end": 236 + }, + "expression": { + "id": 121, + "kind": "", + "startPos": { + "offset": 236, + "line": 9, + "column": 34 + }, + "fullStart": 236, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 236, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 236, + "line": 9, + "column": 34 + }, + "endPos": { + "offset": 237, + "line": 9, + "column": 35 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 236, + "end": 237 + }, + "expression": { + "id": 120, + "kind": "", + "startPos": { + "offset": 237, + "line": 9, + "column": 35 + }, + "fullStart": 237, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 237, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 237, + "line": 9, + "column": 35 + }, + "endPos": { + "offset": 238, + "line": 9, + "column": 36 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 237, + "end": 238 + }, + "expression": { + "id": 119, + "kind": "", + "startPos": { + "offset": 238, + "line": 9, + "column": 36 + }, + "fullStart": 238, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 238, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 238, + "line": 9, + "column": 36 + }, + "endPos": { + "offset": 239, + "line": 9, + "column": 37 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 238, + "end": 239 + }, + "expression": { + "id": 118, + "kind": "", + "startPos": { + "offset": 239, + "line": 9, + "column": 37 + }, + "fullStart": 239, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 239, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 239, + "line": 9, + "column": 37 + }, + "endPos": { + "offset": 240, + "line": 9, + "column": 38 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 239, + "end": 240 + }, + "expression": { + "id": 117, + "kind": "", + "startPos": { + "offset": 240, + "line": 9, + "column": 38 + }, + "fullStart": 240, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 240, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 240, + "line": 9, + "column": 38 + }, + "endPos": { + "offset": 241, + "line": 9, + "column": 39 + }, + "value": "+", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 240, + "end": 241 + }, + "expression": { + "id": 116, + "kind": "", + "startPos": { + "offset": 241, + "line": 9, + "column": 39 + }, + "fullStart": 241, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 241, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 241, + "line": 9, + "column": 39 + }, + "endPos": { + "offset": 242, + "line": 9, + "column": 40 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 241, + "end": 242 + }, + "expression": { + "id": 115, + "kind": "", + "startPos": { + "offset": 242, + "line": 9, + "column": 40 + }, + "fullStart": 242, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 242, + "end": 249, + "op": { + "kind": "", + "startPos": { + "offset": 242, + "line": 9, + "column": 40 + }, + "endPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "value": "-", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 242, + "end": 243 + }, + "expression": { + "id": 114, + "kind": "", + "startPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "fullStart": 243, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 243, + "end": 249, + "expression": { + "id": 113, + "kind": "", + "startPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "fullStart": 243, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "fullEnd": 249, + "start": 243, + "end": 249, + "literal": { + "kind": "", + "startPos": { + "offset": 243, + "line": 9, + "column": 41 + }, + "endPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "value": "7.2225", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 243, + "end": 249 + } + } + } + } + } + } + } + } + } + } + }, + "colon": { + "kind": "", + "startPos": { + "offset": 233, + "line": 9, + "column": 31 + }, + "endPos": { + "offset": 234, + "line": 9, + "column": 32 + }, + "value": ":", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 234, + "line": 9, + "column": 32 + }, + "endPos": { + "offset": 235, + "line": 9, + "column": 33 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 234, + "end": 235 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 233, + "end": 234 + } + } + ], + "commaList": [], + "listCloseBracket": { + "kind": "", + "startPos": { + "offset": 249, + "line": 9, + "column": 47 + }, + "endPos": { + "offset": 250, + "line": 9, + "column": 48 + }, + "value": "]", + "leadingTrivia": [], + "trailingTrivia": [ + { + "kind": "", + "startPos": { + "offset": 250, + "line": 9, + "column": 48 + }, + "endPos": { + "offset": 251, + "line": 9, + "column": 49 + }, + "value": " ", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 250, + "end": 251 + }, + { + "kind": "", + "startPos": { + "offset": 251, + "line": 9, + "column": 49 + }, + "endPos": { + "offset": 262, + "line": 9, + "column": 60 + }, + "value": " negative", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 251, + "end": 262 + }, + { + "kind": "", + "startPos": { + "offset": 262, + "line": 9, + "column": 60 + }, + "endPos": { + "offset": 263, + "line": 10, + "column": 0 + }, + "value": "\n", + "leadingTrivia": [], + "trailingTrivia": [], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 262, + "end": 263 + } + ], + "leadingInvalid": [], + "trailingInvalid": [], + "isInvalid": false, + "start": 249, + "end": 250 + } + }, + "start": 225, + "end": 250, + "name": "CompileError" + } + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/nested_duplicate_names.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/nested_duplicate_names.out.json index b985b2dbf..869a2fc6e 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/nested_duplicate_names.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/nested_duplicate_names.out.json @@ -3439,5 +3439,6 @@ "end": 95, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/old_undocumented_syntax.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/old_undocumented_syntax.out.json index 9b5254503..28e8c5105 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/old_undocumented_syntax.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/old_undocumented_syntax.out.json @@ -8630,5 +8630,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/public_schema.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/public_schema.out.json index 2b2fc7e5d..ba778de61 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/public_schema.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/public_schema.out.json @@ -1685,5 +1685,6 @@ "end": 51, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/redefined_note.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/redefined_note.out.json index add433979..a0daae50a 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/redefined_note.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/redefined_note.out.json @@ -3179,5 +3179,6 @@ "end": 202, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/ref.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/ref.out.json index 299e9ef1d..18c2724d1 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/ref.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/ref.out.json @@ -887,5 +887,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/ref_error_setting.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/ref_error_setting.out.json index 819086f72..ba3028bd4 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/ref_error_setting.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/ref_error_setting.out.json @@ -13454,5 +13454,6 @@ "end": 690, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/ref_in_table.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/ref_in_table.out.json index db4a7a21d..4f55bdb4e 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/ref_in_table.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/ref_in_table.out.json @@ -4470,5 +4470,6 @@ "end": 170, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/schema_nested_tablegroup.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/schema_nested_tablegroup.out.json index 7dd27d636..da1ef7c90 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/schema_nested_tablegroup.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/schema_nested_tablegroup.out.json @@ -1008,5 +1008,6 @@ "end": 39, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/sticky_notes.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/sticky_notes.out.json index 540c8d0be..cf5d6825b 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/sticky_notes.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/sticky_notes.out.json @@ -5995,5 +5995,6 @@ "end": 407, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/table_group_settings.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/table_group_settings.out.json index 4123db1b0..aaf2474ea 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/table_group_settings.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/table_group_settings.out.json @@ -4536,5 +4536,6 @@ "end": 220, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_check.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_check.out.json index 56450957a..44dd2d169 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_check.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_check.out.json @@ -5412,5 +5412,6 @@ "end": 358, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_settings_general.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_settings_general.out.json index 6ba1ef362..e53063c53 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_settings_general.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/table_partial_settings_general.out.json @@ -7368,5 +7368,6 @@ "end": 294, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_check.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_check.out.json index a91ddd015..48bfaf206 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_check.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_check.out.json @@ -5412,5 +5412,6 @@ "end": 351, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_general.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_general.out.json index 1d4e2ab5a..2979232f9 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_general.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/table_settings_general.out.json @@ -6800,5 +6800,6 @@ "end": 281, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/table_with_no_columns.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/table_with_no_columns.out.json index f85578cca..bd0df283c 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/table_with_no_columns.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/table_with_no_columns.out.json @@ -299,5 +299,6 @@ "references": [] } }, - "errors": [] + "errors": [], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_sub_element_declarations.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_sub_element_declarations.out.json index 2e5380b9c..85eed2328 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_sub_element_declarations.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_sub_element_declarations.out.json @@ -5162,5 +5162,6 @@ "end": 42, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_table_partial_injection_syntax.out.json b/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_table_partial_injection_syntax.out.json index 1419e505b..0f08e8c4a 100644 --- a/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_table_partial_injection_syntax.out.json +++ b/packages/dbml-parse/__tests__/snapshots/validator/output/wrong_table_partial_injection_syntax.out.json @@ -2784,5 +2784,6 @@ "end": 74, "name": "CompileError" } - ] + ], + "warnings": [] } \ No newline at end of file diff --git a/packages/dbml-parse/__tests__/utils/compiler.ts b/packages/dbml-parse/__tests__/utils/compiler.ts index 010163208..d56ec1eec 100644 --- a/packages/dbml-parse/__tests__/utils/compiler.ts +++ b/packages/dbml-parse/__tests__/utils/compiler.ts @@ -17,6 +17,7 @@ import { BlockExpressionNode, ListExpressionNode, TupleExpressionNode, + CommaExpressionNode, CallExpressionNode, LiteralNode, VariableNode, @@ -25,22 +26,22 @@ import { } from '@/core/parser/nodes'; import { NodeSymbolIdGenerator } from '@/core/analyzer/symbol/symbols'; import Report from '@/core/report'; -import { CompileError, Compiler, SyntaxToken } from '@/index'; +import { Compiler, SyntaxToken } from '@/index'; import { Database } from '@/core/interpreter/types'; -export function lex (source: string): Report { +export function lex (source: string): Report { return new Lexer(source).lex(); } -export function parse (source: string): Report<{ ast: ProgramNode; tokens: SyntaxToken[] }, CompileError> { +export function parse (source: string): Report<{ ast: ProgramNode; tokens: SyntaxToken[] }> { return new Lexer(source).lex().chain((tokens) => new Parser(tokens, new SyntaxNodeIdGenerator()).parse()); } -export function analyze (source: string): Report { +export function analyze (source: string): Report { return parse(source).chain(({ ast }) => new Analyzer(ast, new NodeSymbolIdGenerator()).analyze()); } -export function interpret (source: string): Report { +export function interpret (source: string): Report { const compiler = new Compiler(); compiler.setSource(source); return compiler.parse._().map(({ rawDb }) => rawDb); @@ -173,6 +174,13 @@ export function print (source: string, ast: SyntaxNode): string { break; } + case SyntaxNodeKind.COMMA_EXPRESSION: { + const comma = node as CommaExpressionNode; + comma.elementList.forEach(collectTokens); + comma.commaList.forEach(collectTokens); + break; + } + case SyntaxNodeKind.CALL_EXPRESSION: { const call = node as CallExpressionNode; if (call.callee) collectTokens(call.callee); @@ -205,8 +213,8 @@ export function print (source: string, ast: SyntaxNode): string { break; } - case SyntaxNodeKind.DUMMY: - // Dummy nodes don't contribute to output + case SyntaxNodeKind.EMPTY: + // Empty nodes don't contribute to output break; default: { diff --git a/packages/dbml-parse/__tests__/utils/mocks.ts b/packages/dbml-parse/__tests__/utils/mocks.ts index a4845197b..7250e6a64 100644 --- a/packages/dbml-parse/__tests__/utils/mocks.ts +++ b/packages/dbml-parse/__tests__/utils/mocks.ts @@ -49,6 +49,11 @@ export class MockTextModel { getValue (): string { return this.content; } + + getLineContent (lineNumber: number): string { + const lines = this.content.split(/\r\n|\r|\n/); + return lines[lineNumber - 1] || ''; + } } export function createMockTextModel (content: string, uri: string = ''): TextModel { diff --git a/packages/dbml-parse/__tests__/utils/testHelpers.ts b/packages/dbml-parse/__tests__/utils/testHelpers.ts index 09ba4bfbb..707f3d0dd 100644 --- a/packages/dbml-parse/__tests__/utils/testHelpers.ts +++ b/packages/dbml-parse/__tests__/utils/testHelpers.ts @@ -1,6 +1,6 @@ import { NodeSymbol } from '@/core/analyzer/symbol/symbols'; import Report from '@/core/report'; -import { CompileError, ProgramNode, SyntaxNode } from '@/index'; +import { ProgramNode, SyntaxNode } from '@/index'; import fs from 'fs'; export function scanTestNames (_path: any) { @@ -22,7 +22,7 @@ export function scanTestNames (_path: any) { * - 'symbolTable': Converts Map to Object for JSON compatibility */ export function serialize ( - report: Readonly>, + report: Readonly>, pretty: boolean = false, ): string { return JSON.stringify( diff --git a/packages/dbml-parse/package.json b/packages/dbml-parse/package.json index aeff10a40..497d2f469 100644 --- a/packages/dbml-parse/package.json +++ b/packages/dbml-parse/package.json @@ -38,6 +38,7 @@ "devDependencies": { "@stylistic/eslint-plugin": "^5.5.0", "@types/lodash-es": "^4.17.12", + "@types/luxon": "^3.7.1", "@types/node": "^20.8.8", "@typescript-eslint/eslint-plugin": "^8.46.3", "@typescript-eslint/parser": "^8.46.3", @@ -49,7 +50,8 @@ "vite-plugin-dts": "^4.5.4" }, "dependencies": { - "lodash-es": "^4.17.21" + "lodash-es": "^4.17.21", + "luxon": "^3.7.2" }, "engines": { "node": ">=18" diff --git a/packages/dbml-parse/src/compiler/index.ts b/packages/dbml-parse/src/compiler/index.ts index 02b75d1f6..9cf60ef15 100644 --- a/packages/dbml-parse/src/compiler/index.ts +++ b/packages/dbml-parse/src/compiler/index.ts @@ -1,22 +1,37 @@ import { SyntaxNodeIdGenerator, ProgramNode } from '@/core/parser/nodes'; import { NodeSymbolIdGenerator } from '@/core/analyzer/symbol/symbols'; import { SyntaxToken } from '@/core/lexer/tokens'; -import { CompileError } from '@/core/errors'; import { Database } from '@/core/interpreter/types'; import Report from '@/core/report'; import Lexer from '@/core/lexer/lexer'; import Parser from '@/core/parser/parser'; import Analyzer from '@/core/analyzer/analyzer'; import Interpreter from '@/core/interpreter/interpreter'; -import { DBMLCompletionItemProvider, DBMLDefinitionProvider, DBMLReferencesProvider } from '@/services/index'; -import { ast, errors, tokens, rawDb, publicSymbolTable } from './queries/parse'; +import { DBMLCompletionItemProvider, DBMLDefinitionProvider, DBMLReferencesProvider, DBMLDiagnosticsProvider } from '@/services/index'; +import { ast, errors, warnings, tokens, rawDb, publicSymbolTable } from './queries/parse'; import { invalidStream, flatStream } from './queries/token'; import { symbolOfName, symbolOfNameToKey, symbolMembers } from './queries/symbol'; import { containerStack, containerToken, containerElement, containerScope, containerScopeKind } from './queries/container'; -import { renameTable, applyTextEdits, type TextEdit, type TableNameInput } from './queries/transform'; +import { + renameTable, + applyTextEdits, + appendRecords, + updateRecordField, + deleteRecordRow, + deleteRecordValue, + removeAllRecords, + type TextEdit, + type TableNameInput, + type RecordValue, +} from './queries/transform'; +import { splitQualifiedIdentifier, unescapeString, escapeString, formatRecordValue, isValidIdentifier, addDoubleQuoteIfNeeded } from './queries/utils'; // Re-export types export { ScopeKind } from './types'; +export type { TextEdit, TableNameInput, RecordValue }; + +// Re-export utilities +export { splitQualifiedIdentifier, unescapeString, escapeString, formatRecordValue, isValidIdentifier, addDoubleQuoteIfNeeded }; export default class Compiler { private source = ''; @@ -58,18 +73,18 @@ export default class Compiler { }) as (...args: Args) => Return; } - private interpret (): Report<{ ast: ProgramNode; tokens: SyntaxToken[]; rawDb?: Database }, CompileError> { - const parseRes: Report<{ ast: ProgramNode; tokens: SyntaxToken[] }, CompileError> = new Lexer(this.source) + private interpret (): Report<{ ast: ProgramNode; tokens: SyntaxToken[]; rawDb?: Database }> { + const parseRes: Report<{ ast: ProgramNode; tokens: SyntaxToken[] }> = new Lexer(this.source) .lex() .chain((lexedTokens) => new Parser(lexedTokens as SyntaxToken[], this.nodeIdGenerator).parse()) .chain(({ ast, tokens }) => new Analyzer(ast, this.symbolIdGenerator).analyze().map(() => ({ ast, tokens }))); if (parseRes.getErrors().length > 0) { - return parseRes as Report<{ ast: ProgramNode; tokens: SyntaxToken[]; rawDb?: Database }, CompileError>; + return parseRes as Report<{ ast: ProgramNode; tokens: SyntaxToken[]; rawDb?: Database }>; } return parseRes.chain(({ ast, tokens }) => - new Interpreter(ast).interpret().map((rawDb) => ({ ast, tokens, rawDb })), + new Interpreter(ast, this.source).interpret().map((rawDb) => ({ ast, tokens, rawDb })), ); } @@ -84,6 +99,44 @@ export default class Compiler { return applyTextEdits(this.parse.source(), edits); } + appendRecords ( + tableName: TableNameInput, + columns: string[], + values: RecordValue[][], + ): string { + return appendRecords.call(this, tableName, columns, values); + } + + updateRecordField ( + tableName: TableNameInput, + rowIndex: number, + fieldName: string, + newValue: RecordValue, + ): string { + return updateRecordField.call(this, tableName, rowIndex, fieldName, newValue); + } + + deleteRecordRow ( + tableName: TableNameInput, + rowIndex: number, + ): string { + return deleteRecordRow.call(this, tableName, rowIndex); + } + + deleteRecordValue ( + tableName: TableNameInput, + rowIndex: number, + columnName: string, + ): string { + return deleteRecordValue.call(this, tableName, rowIndex, columnName); + } + + removeAllRecords ( + tableName: TableNameInput, + ): string { + return removeAllRecords.call(this, tableName); + } + readonly token = { invalidStream: this.query(invalidStream), flatStream: this.query(flatStream), @@ -94,6 +147,7 @@ export default class Compiler { _: this.query(this.interpret), ast: this.query(ast), errors: this.query(errors), + warnings: this.query(warnings), tokens: this.query(tokens), rawDb: this.query(rawDb), publicSymbolTable: this.query(publicSymbolTable), @@ -117,6 +171,7 @@ export default class Compiler { definitionProvider: new DBMLDefinitionProvider(this), referenceProvider: new DBMLReferencesProvider(this), autocompletionProvider: new DBMLCompletionItemProvider(this), + diagnosticsProvider: new DBMLDiagnosticsProvider(this), }; } } diff --git a/packages/dbml-parse/src/compiler/queries/container/scopeKind.ts b/packages/dbml-parse/src/compiler/queries/container/scopeKind.ts index 8d97c8160..9c4358873 100644 --- a/packages/dbml-parse/src/compiler/queries/container/scopeKind.ts +++ b/packages/dbml-parse/src/compiler/queries/container/scopeKind.ts @@ -28,6 +28,8 @@ export function containerScopeKind (this: Compiler, offset: number): ScopeKind { return ScopeKind.TABLEPARTIAL; case 'checks': return ScopeKind.CHECKS; + case 'records': + return ScopeKind.RECORDS; default: return ScopeKind.CUSTOM; } diff --git a/packages/dbml-parse/src/compiler/queries/container/stack.ts b/packages/dbml-parse/src/compiler/queries/container/stack.ts index fb03262d8..0486d2710 100644 --- a/packages/dbml-parse/src/compiler/queries/container/stack.ts +++ b/packages/dbml-parse/src/compiler/queries/container/stack.ts @@ -8,6 +8,7 @@ import { InfixExpressionNode, ListExpressionNode, TupleExpressionNode, + CommaExpressionNode, BlockExpressionNode, IdentiferStreamNode, } from '@/core/parser/nodes'; @@ -75,6 +76,12 @@ export function containerStack (this: Compiler, offset: number): readonly Readon res.pop(); popOnce = true; } + } else if (lastContainer instanceof CommaExpressionNode) { + // CommaExpressionNode has no closing delimiter, so pop when offset is past its end + if (lastContainer.end <= offset) { + res.pop(); + popOnce = true; + } } else if (lastContainer instanceof BlockExpressionNode) { if (lastContainer.blockCloseBrace && lastContainer.end <= offset) { res.pop(); diff --git a/packages/dbml-parse/src/compiler/queries/parse.ts b/packages/dbml-parse/src/compiler/queries/parse.ts index bb2191a19..14936d8e2 100644 --- a/packages/dbml-parse/src/compiler/queries/parse.ts +++ b/packages/dbml-parse/src/compiler/queries/parse.ts @@ -1,7 +1,7 @@ import type Compiler from '../index'; import type { ProgramNode } from '@/core/parser/nodes'; import type { SyntaxToken } from '@/core/lexer/tokens'; -import type { CompileError } from '@/core/errors'; +import type { CompileError, CompileWarning } from '@/core/errors'; import type { Database } from '@/core/interpreter/types'; import type SymbolTable from '@/core/analyzer/symbol/symbolTable'; @@ -13,6 +13,10 @@ export function errors (this: Compiler): readonly Readonly[] { return this.parse._().getErrors(); } +export function warnings (this: Compiler): readonly Readonly[] { + return this.parse._().getWarnings(); +} + export function tokens (this: Compiler): Readonly[] { return this.parse._().getValue().tokens; } diff --git a/packages/dbml-parse/src/compiler/queries/transform/index.ts b/packages/dbml-parse/src/compiler/queries/transform/index.ts index 7947a39be..2324636db 100644 --- a/packages/dbml-parse/src/compiler/queries/transform/index.ts +++ b/packages/dbml-parse/src/compiler/queries/transform/index.ts @@ -1,2 +1,11 @@ -export { renameTable, type TableNameInput } from './renameTable'; +export { renameTable } from './renameTable'; export { applyTextEdits, type TextEdit } from './applyTextEdits'; +export { type TableNameInput } from './utils'; +export { + appendRecords, + updateRecordField, + deleteRecordRow, + deleteRecordValue, + removeAllRecords, + type RecordValue, +} from './records'; diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/appendRecords.ts b/packages/dbml-parse/src/compiler/queries/transform/records/appendRecords.ts new file mode 100644 index 000000000..ca859bf80 --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/appendRecords.ts @@ -0,0 +1,127 @@ +import { DEFAULT_SCHEMA_NAME } from '@/constants'; +import type Compiler from '../../../index'; +import { formatRecordValue, addDoubleQuoteIfNeeded } from '../../utils'; +import { normalizeTableName, type TableNameInput } from '../utils'; +import type { RecordValue } from './types'; +import { findRecordsForTable } from './utils'; +import { ElementDeclarationNode } from '@/core/parser/nodes'; + +/** + * Checks if a Records block's columns are a superset of the target columns. + */ +function doesRecordMatchColumns (recordsColumns: string[], targetColumns: string[]): boolean { + const recordsSet = new Set(recordsColumns); + return targetColumns.every((col) => recordsSet.has(col)); +} + +/** + * Inserts rows into an existing Records block by reordering values to match. + */ +function insertIntoExistingRecords ( + source: string, + element: ElementDeclarationNode, + recordsColumns: string[], + targetColumns: string[], + values: RecordValue[][], +): string { + const body = element.body; + if (!body) { + return source; + } + + // Build the new rows + const newRows: string[] = []; + for (const row of values) { + const reorderedValues: string[] = []; + for (const col of recordsColumns) { + const targetIndex = targetColumns.indexOf(col); + if (targetIndex >= 0 && targetIndex < row.length) { + reorderedValues.push(formatRecordValue(row[targetIndex])); + } else { + reorderedValues.push('null'); + } + } + newRows.push(' ' + reorderedValues.join(', ')); + } + + // Find the position to insert (before the closing brace) + const closingBracePos = body.end - 1; + const beforeBrace = source.slice(0, closingBracePos); + const afterBrace = source.slice(closingBracePos); + + // Add newline if the body is not empty + const bodyText = source.slice(body.start + 1, body.end - 1).trim(); + const separator = bodyText.length > 0 ? '\n' : ''; + + return beforeBrace + separator + newRows.join('\n') + '\n' + afterBrace; +} + +/** + * Appends a new Records block to the end of the source. + */ +function appendNewRecordsBlock ( + source: string, + schemaName: string, + tableName: string, + columns: string[], + values: RecordValue[][], +): string { + const tableQualifier = schemaName === DEFAULT_SCHEMA_NAME + ? addDoubleQuoteIfNeeded(tableName) + : `${addDoubleQuoteIfNeeded(schemaName)}.${addDoubleQuoteIfNeeded(tableName)}`; + + const columnList = columns.map(addDoubleQuoteIfNeeded).join(', '); + + const rows: string[] = []; + for (const row of values) { + const formattedValues = row.map(formatRecordValue); + rows.push(' ' + formattedValues.join(', ')); + } + + const recordsBlock = `\nrecords ${tableQualifier}(${columnList}) {\n${rows.join('\n')}\n}\n`; + + return source + recordsBlock; +} + +/** + * Appends records to a table, merging into the last matching Records block if possible. + */ +export function appendRecords ( + this: Compiler, + tableName: TableNameInput, + columns: string[], + values: RecordValue[][], +): string { + // Validation + if (columns.length === 0) { + throw new Error('Columns must not be empty'); + } + + if (values.length === 0) { + return this.parse.source(); + } + + // Validate all rows have correct number of values + for (const row of values) { + if (row.length !== columns.length) { + throw new Error('Data record entry does not have the same columns'); + } + } + + const source = this.parse.source(); + const { schema: schemaName, table: tableNameStr } = normalizeTableName(tableName); + + // Find existing Records blocks + const existingRecords = findRecordsForTable(this, schemaName, tableNameStr); + + // Check if last Records block can be merged into + if (existingRecords.length > 0) { + const lastRecord = existingRecords[existingRecords.length - 1]; + if (doesRecordMatchColumns(lastRecord.columns, columns)) { + return insertIntoExistingRecords(source, lastRecord.element, lastRecord.columns, columns, values); + } + } + + // Append new Records block + return appendNewRecordsBlock(source, schemaName, tableNameStr, columns, values); +} diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/deleteRecordRow.ts b/packages/dbml-parse/src/compiler/queries/transform/records/deleteRecordRow.ts new file mode 100644 index 000000000..aebefb11a --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/deleteRecordRow.ts @@ -0,0 +1,77 @@ +import type Compiler from '../../../index'; +import { ElementDeclarationNode, BlockExpressionNode, FunctionApplicationNode } from '@/core/parser/nodes'; +import { normalizeTableName, type TableNameInput } from '../utils'; +import { applyTextEdits, type TextEdit } from '../applyTextEdits'; +import { findRecordsForTable } from './utils'; + +/** + * Deletes a specific row from records by index. + */ +export function deleteRecordRow ( + this: Compiler, + targetName: TableNameInput, + rowIndex: number, +): string { + const source = this.parse.source(); + const { schema: schemaName, table: tableName } = normalizeTableName(targetName); + + const existingRecords = findRecordsForTable(this, schemaName, tableName).map((r) => r.element); + + if (existingRecords.length === 0) { + return source; + } + + let targetBlock: ElementDeclarationNode | null = null; + let localIndex = rowIndex; + + // Find which Records block contains the target row + for (const element of existingRecords) { + const body = element.body; + if (!(body instanceof BlockExpressionNode)) { + continue; + } + + const rowCount = body.body.filter((node) => node instanceof FunctionApplicationNode).length; + + if (localIndex < rowCount) { + targetBlock = element; + break; + } + + localIndex -= rowCount; + } + + if (!targetBlock) { + return source; // Index out of range + } + + const body = targetBlock.body; + if (!(body instanceof BlockExpressionNode)) { + return source; + } + + // Get data rows from AST + const dataRows = body.body.filter((node): node is FunctionApplicationNode => node instanceof FunctionApplicationNode); + + // Check if we're deleting the last row + if (dataRows.length === 1) { + // Remove the entire Records element + const edits: TextEdit[] = [{ + start: targetBlock.fullStart, + end: targetBlock.fullEnd, + newText: '', + }]; + + return applyTextEdits(source, edits); + } + + // Delete the specific row + const targetRow = dataRows[localIndex]; + const edits: TextEdit[] = [{ + start: targetRow.fullStart, + end: targetRow.fullEnd, + newText: '', + }]; + + return applyTextEdits(source, edits); +} diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/deleteRecordValue.ts b/packages/dbml-parse/src/compiler/queries/transform/records/deleteRecordValue.ts new file mode 100644 index 000000000..32eead08d --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/deleteRecordValue.ts @@ -0,0 +1,82 @@ +import type Compiler from '../../../index'; +import { ElementDeclarationNode, BlockExpressionNode, FunctionApplicationNode } from '@/core/parser/nodes'; +import { normalizeTableName, type TableNameInput } from '../utils'; +import { applyTextEdits, type TextEdit } from '../applyTextEdits'; +import { findRecordsForTable, extractRowValues } from './utils'; + +/** + * Deletes a specific value (sets to null) at row and column index. + */ +export function deleteRecordValue ( + this: Compiler, + targetName: TableNameInput, + rowIndex: number, + columnName: string, +): string { + const source = this.parse.source(); + const { schema: schemaName, table: tableName } = normalizeTableName(targetName); + + const existingRecords = findRecordsForTable(this, schemaName, tableName); + + if (existingRecords.length === 0) { + return source; + } + + // Find the target block and local row index + let localIndex = rowIndex; + let targetBlock: { element: ElementDeclarationNode; columns: string[] } | null = null; + + for (const record of existingRecords) { + const body = record.element.body; + if (!(body instanceof BlockExpressionNode)) { + continue; + } + + const rowCount = body.body.filter((node) => node instanceof FunctionApplicationNode).length; + + if (localIndex < rowCount) { + targetBlock = record; + break; + } + + localIndex -= rowCount; + } + + if (!targetBlock) { + return source; // Index out of range + } + + const columnIndex = targetBlock.columns.indexOf(columnName); + if (columnIndex < 0) { + return source; // Column not found + } + + const body = targetBlock.element.body; + if (!(body instanceof BlockExpressionNode)) { + return source; + } + + // Get data rows from AST + const dataRows = body.body.filter((node): node is FunctionApplicationNode => node instanceof FunctionApplicationNode); + const targetRow = dataRows[localIndex]; + + if (!targetRow) { + return source; + } + + // Get value nodes from the row + const values = extractRowValues(targetRow); + const targetValue = values[columnIndex]; + + if (!targetValue) { + return source; + } + + const edits: TextEdit[] = [{ + start: targetValue.start, + end: targetValue.end, + newText: 'null', + }]; + + return applyTextEdits(source, edits); +} diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/index.ts b/packages/dbml-parse/src/compiler/queries/transform/records/index.ts new file mode 100644 index 000000000..dd407c839 --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/index.ts @@ -0,0 +1,6 @@ +export { appendRecords } from './appendRecords'; +export { updateRecordField } from './updateRecordField'; +export { deleteRecordRow } from './deleteRecordRow'; +export { deleteRecordValue } from './deleteRecordValue'; +export { removeAllRecords } from './removeAllRecords'; +export type { RecordValue } from './types'; diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/removeAllRecords.ts b/packages/dbml-parse/src/compiler/queries/transform/records/removeAllRecords.ts new file mode 100644 index 000000000..b30d3dc5e --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/removeAllRecords.ts @@ -0,0 +1,32 @@ +import type Compiler from '../../../index'; +import { normalizeTableName, type TableNameInput } from '../utils'; +import { applyTextEdits, type TextEdit } from '../applyTextEdits'; +import { findRecordsForTable } from './utils'; + +/** + * Removes all Records blocks for a table. + */ +export function removeAllRecords ( + this: Compiler, + targetName: TableNameInput, +): string { + const source = this.parse.source(); + const { schema: schemaName, table: tableName } = normalizeTableName(targetName); + + const existingRecords = findRecordsForTable(this, schemaName, tableName).map((r) => r.element); + + if (existingRecords.length === 0) { + return source; + } + + // Create text edits for each Records element + const edits: TextEdit[] = existingRecords.map((element) => { + return { + start: element.fullStart, + end: element.fullEnd, + newText: '', + }; + }); + + return applyTextEdits(source, edits); +} diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/types.ts b/packages/dbml-parse/src/compiler/queries/transform/records/types.ts new file mode 100644 index 000000000..8d4163285 --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/types.ts @@ -0,0 +1,4 @@ +export interface RecordValue { + value: any; + type: string; +} diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/updateRecordField.ts b/packages/dbml-parse/src/compiler/queries/transform/records/updateRecordField.ts new file mode 100644 index 000000000..b359d182c --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/updateRecordField.ts @@ -0,0 +1,90 @@ +import type Compiler from '../../../index'; +import { formatRecordValue } from '../../utils'; +import { ElementDeclarationNode, BlockExpressionNode, FunctionApplicationNode } from '@/core/parser/nodes'; +import { normalizeTableName, type TableNameInput } from '../utils'; +import { applyTextEdits, type TextEdit } from '../applyTextEdits'; +import type { RecordValue } from './types'; +import { findRecordsForTable, extractRowValues } from './utils'; + +/** + * Updates a specific field value in one row for a table. + */ +export function updateRecordField ( + this: Compiler, + targetName: TableNameInput, + rowIndex: number, + fieldName: string, + newValue: RecordValue, +): string { + const source = this.parse.source(); + + const { schema: schemaName, table: tableName } = normalizeTableName(targetName); + + // Find existing Records elements for this table + const existingRecords = findRecordsForTable(this, schemaName, tableName); + + if (existingRecords.length === 0) { + return source; + } + + // Find which Records block contains the target row + let localIndex = rowIndex; + let targetBlock: { element: ElementDeclarationNode; columns: string[] } | null = null; + + for (const record of existingRecords) { + const body = record.element.body; + if (!(body instanceof BlockExpressionNode)) { + continue; + } + + const rowCount = body.body.filter((node) => node instanceof FunctionApplicationNode).length; + + if (localIndex < rowCount) { + targetBlock = record; + break; + } + + localIndex -= rowCount; + } + + if (!targetBlock) { + return source; // Index out of range + } + + const { element, columns } = targetBlock; + const fieldIndex = columns.indexOf(fieldName); + + if (fieldIndex < 0) { + return source; // Column not found + } + + const body = element.body; + if (!(body instanceof BlockExpressionNode)) { + return source; + } + + // Get data rows from AST + const dataRows = body.body.filter((node): node is FunctionApplicationNode => node instanceof FunctionApplicationNode); + const targetRow = dataRows[localIndex]; + + if (!targetRow) { + return source; + } + + // Get value nodes from the row + const values = extractRowValues(targetRow); + const targetValue = values[fieldIndex]; + + if (!targetValue) { + return source; + } + + // Replace the value + const edits: TextEdit[] = [{ + start: targetValue.start, + end: targetValue.end, + newText: formatRecordValue(newValue), + }]; + + return applyTextEdits(source, edits); +} diff --git a/packages/dbml-parse/src/compiler/queries/transform/records/utils.ts b/packages/dbml-parse/src/compiler/queries/transform/records/utils.ts new file mode 100644 index 000000000..f4d221821 --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/records/utils.ts @@ -0,0 +1,104 @@ +import { DEFAULT_SCHEMA_NAME } from '@/constants'; +import type Compiler from '../../../index'; +import { ElementDeclarationNode, FunctionApplicationNode, CommaExpressionNode, SyntaxNode } from '@/core/parser/nodes'; +import { getElementKind, extractVarNameFromPrimaryVariable, destructureCallExpression } from '@/core/analyzer/utils'; +import { ElementKind } from '@/core/analyzer/types'; +import { createTableSymbolIndex, createSchemaSymbolIndex } from '@/core/analyzer/symbol/symbolIndex'; + +/** + * Extracts value nodes from a row (FunctionApplicationNode). + */ +export function extractRowValues (row: FunctionApplicationNode): SyntaxNode[] { + if (row.args.length > 0) { + return []; + } + + if (row.callee instanceof CommaExpressionNode) { + return row.callee.elementList; + } + + if (row.callee) { + return [row.callee]; + } + + return []; +} + +/** + * Extracts column names from a Records element declaration. + */ +export function extractColumnsFromRecords (recordsDecl: ElementDeclarationNode): string[] { + if (!recordsDecl.name) { + return []; + } + + const fragments = destructureCallExpression(recordsDecl.name).unwrap_or(undefined); + if (!fragments || !fragments.args) { + return []; + } + + const names = fragments.args + .map((arg) => extractVarNameFromPrimaryVariable(arg).unwrap_or(null)); + if (names.some((name) => name === null)) { + return []; + } + return names as string[]; +} + +/** + * Finds existing Records elements that reference the given table. + */ +export function findRecordsForTable ( + compiler: Compiler, + schemaName: string, + tableName: string, +): Array<{ element: ElementDeclarationNode; columns: string[] }> { + const symbolTable = compiler.parse.publicSymbolTable(); + const ast = compiler.parse.ast(); + + // Get table symbol + const schemaIndex = createSchemaSymbolIndex(schemaName); + const tableIndex = createTableSymbolIndex(tableName); + + let tableSymbol; + if (schemaName === DEFAULT_SCHEMA_NAME) { + tableSymbol = symbolTable.get(tableIndex); + } else { + const schemaSymbol = symbolTable.get(schemaIndex); + tableSymbol = schemaSymbol?.symbolTable?.get(tableIndex); + } + + if (!tableSymbol) { + return []; + } + + // Scan AST for top-level Records elements + const recordsElements: Array<{ element: ElementDeclarationNode; columns: string[] }> = []; + + for (const element of ast.body) { + const kind = getElementKind(element).unwrap_or(undefined); + if (kind !== ElementKind.Records || !element.body) { + continue; + } + + // Check if this Records element references our table + if (!element.name) { + continue; + } + + // Get the table reference from the Records name + const fragments = destructureCallExpression(element.name).unwrap_or(undefined); + if (!fragments || fragments.variables.length === 0) { + continue; + } + + // The last variable in the fragments is the table reference + const tableRef = fragments.variables[fragments.variables.length - 1]; + if (tableRef.referee !== tableSymbol) continue; + const columns = extractColumnsFromRecords(element); + if (columns.length === 0) continue; + recordsElements.push({ element, columns }); + } + + return recordsElements; +} diff --git a/packages/dbml-parse/src/compiler/queries/transform/renameTable.ts b/packages/dbml-parse/src/compiler/queries/transform/renameTable.ts index b985f85db..a84704dc4 100644 --- a/packages/dbml-parse/src/compiler/queries/transform/renameTable.ts +++ b/packages/dbml-parse/src/compiler/queries/transform/renameTable.ts @@ -3,14 +3,10 @@ import type Compiler from '../../index'; import { SyntaxNode } from '@/core/parser/nodes'; import SymbolTable from '@/core/analyzer/symbol/symbolTable'; import { TableSymbol } from '@/core/analyzer/symbol/symbols'; -import { - createSchemaSymbolIndex, - createTableSymbolIndex, -} from '@/core/analyzer/symbol/symbolIndex'; +import { createSchemaSymbolIndex, createTableSymbolIndex } from '@/core/analyzer/symbol/symbolIndex'; import { applyTextEdits, TextEdit } from './applyTextEdits'; import { isAlphaOrUnderscore, isDigit } from '@/core/utils'; - -export type TableNameInput = string | { schema?: string; table: string }; +import { normalizeTableName, lookupTableSymbol, stripQuotes, type TableNameInput } from './utils'; interface FormattedTableName { schema: string; @@ -21,53 +17,6 @@ interface FormattedTableName { shouldQuoteTable: boolean; } -/** - * Removes surrounding double quotes from a string if present. - */ -function stripQuotes (str: string): string { - if (str.startsWith('"') && str.endsWith('"') && str.length >= 2) { - return str.slice(1, -1); - } - return str; -} - -/** - * Normalizes a table name input to { schema, table } format. - * FIXME: String parsing uses simple split('.') which doesn't handle quoted identifiers with dots - */ -function normalizeTableName (input: TableNameInput): { schema: string; table: string } { - if (typeof input !== 'string') { - return { - schema: input.schema ?? DEFAULT_SCHEMA_NAME, - table: input.table, - }; - } - - // FIXME: This simple split doesn't handle quoted identifiers containing dots - const parts = input.split('.'); - - if (parts.length === 1) { - return { - schema: DEFAULT_SCHEMA_NAME, - table: stripQuotes(parts[0]), - }; - } - - if (parts.length === 2) { - return { - schema: stripQuotes(parts[0]), - table: stripQuotes(parts[1]), - }; - } - - // More than 2 parts - treat the last as table, rest as schema - const tablePart = parts.pop()!; - return { - schema: stripQuotes(parts.join('.')), - table: stripQuotes(tablePart), - }; -} - /** * Checks if an identifier is valid (can be used without quotes). */ @@ -124,32 +73,6 @@ function formatTableName ( }; } -/** - * Looks up a table symbol from the symbol table. - */ -function lookupTableSymbol ( - symbolTable: Readonly, - schema: string, - table: string, -): TableSymbol | null { - const tableSymbolIndex = createTableSymbolIndex(table); - - if (schema === DEFAULT_SCHEMA_NAME) { - const symbol = symbolTable.get(tableSymbolIndex); - return symbol instanceof TableSymbol ? symbol : null; - } - - const schemaSymbolIndex = createSchemaSymbolIndex(schema); - const schemaSymbol = symbolTable.get(schemaSymbolIndex); - - if (!schemaSymbol || !schemaSymbol.symbolTable) { - return null; - } - - const symbol = schemaSymbol.symbolTable.get(tableSymbolIndex); - return symbol instanceof TableSymbol ? symbol : null; -} - /** * Checks if renaming would cause a name collision. */ diff --git a/packages/dbml-parse/src/compiler/queries/transform/utils.ts b/packages/dbml-parse/src/compiler/queries/transform/utils.ts new file mode 100644 index 000000000..e1fd6dcf0 --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/transform/utils.ts @@ -0,0 +1,87 @@ +import { DEFAULT_SCHEMA_NAME } from '@/constants'; +import { splitQualifiedIdentifier } from '../utils'; +import { createTableSymbolIndex, createSchemaSymbolIndex } from '@/core/analyzer/symbol/symbolIndex'; +import type SymbolTable from '@/core/analyzer/symbol/symbolTable'; +import { TableSymbol } from '@/core/analyzer/symbol/symbols'; + +export type TableNameInput = string | { schema?: string; table: string }; + +/** + * Normalizes a table name input to { schema, table } format. + * Properly handles quoted identifiers with dots inside. + */ +export function normalizeTableName (input: TableNameInput): { schema: string; table: string } { + if (typeof input !== 'string') { + return { + schema: input.schema ?? DEFAULT_SCHEMA_NAME, + table: input.table, + }; + } + + const parts = splitQualifiedIdentifier(input); + + if (parts.length === 0) { + return { + schema: DEFAULT_SCHEMA_NAME, + table: '', + }; + } + + if (parts.length === 1) { + return { + schema: DEFAULT_SCHEMA_NAME, + table: parts[0], + }; + } + + if (parts.length === 2) { + return { + schema: parts[0], + table: parts[1], + }; + } + + // More than 2 parts - treat the last as table, rest as schema + const tablePart = parts[parts.length - 1]; + const schemaPart = parts.slice(0, -1).join('.'); + return { + schema: schemaPart, + table: tablePart, + }; +} + +/** + * Looks up a table symbol from the symbol table. + */ +export function lookupTableSymbol ( + symbolTable: Readonly, + schema: string, + table: string, +): TableSymbol | null { + const tableSymbolIndex = createTableSymbolIndex(table); + + if (schema === DEFAULT_SCHEMA_NAME) { + const symbol = symbolTable.get(tableSymbolIndex); + return symbol instanceof TableSymbol ? symbol : null; + } + + const schemaSymbolIndex = createSchemaSymbolIndex(schema); + const schemaSymbol = symbolTable.get(schemaSymbolIndex); + + if (!schemaSymbol || !schemaSymbol.symbolTable) { + return null; + } + + const symbol = schemaSymbol.symbolTable.get(tableSymbolIndex); + return symbol instanceof TableSymbol ? symbol : null; +} + +/** + * Removes surrounding double quotes from a string if present. + */ +export function stripQuotes (str: string): string { + if (str.startsWith('"') && str.endsWith('"') && str.length >= 2) { + return str.slice(1, -1); + } + return str; +} diff --git a/packages/dbml-parse/src/compiler/queries/utils.ts b/packages/dbml-parse/src/compiler/queries/utils.ts new file mode 100644 index 000000000..a9f209410 --- /dev/null +++ b/packages/dbml-parse/src/compiler/queries/utils.ts @@ -0,0 +1,271 @@ +import { + isBooleanType, + isNumericType, + isDateTimeType, + tryExtractBoolean, + tryExtractNumeric, + tryExtractString, + tryExtractDateTime, +} from '@/core/interpreter/records/utils'; +import { isAlphaOrUnderscore, isDigit } from '@/core/utils'; + +/** + * Checks if an identifier is valid (can be used without quotes in DBML). + * Valid identifiers must: + * - Contain only alphanumeric characters and underscores + * - Not start with a digit + * + * @param name - The identifier to check + * @returns True if the identifier is valid and doesn't need quotes + * + * @example + * isValidIdentifier('users') => true + * isValidIdentifier('user_name') => true + * isValidIdentifier('user name') => false (contains space) + * isValidIdentifier('123users') => false (starts with digit) + */ +export function isValidIdentifier (name: string): boolean { + if (!name) return false; + return name.split('').every((char) => isAlphaOrUnderscore(char) || isDigit(char)) && !isDigit(name[0]); +} + +/** + * Adds double quotes around an identifier if needed. + * Identifiers need quotes if they: + * - Contain non-alphanumeric characters (except underscore) + * - Start with a digit + * - Are empty strings + * + * @param identifier - The identifier to potentially quote + * @returns The identifier with double quotes if needed, otherwise unchanged + * + * @example + * addDoubleQuoteIfNeeded('users') => 'users' + * addDoubleQuoteIfNeeded('user name') => '"user name"' + * addDoubleQuoteIfNeeded('123users') => '"123users"' + * addDoubleQuoteIfNeeded('user-name') => '"user-name"' + */ +export function addDoubleQuoteIfNeeded (identifier: string): string { + if (isValidIdentifier(identifier)) { + return identifier; + } + return `"${identifier}"`; +} + +/** + * Unescapes a string by processing escape sequences. + * Handles escaped quotes (\"), common escape sequences, unicode (\uHHHH), and arbitrary escapes. + * + * @param str - The string to unescape + * @returns The unescaped string + * + * @example + * unescapeString('table\\"name') => 'table"name' + * unescapeString('line1\\nline2') => 'line1\nline2' + * unescapeString('\\u0041BC') => 'ABC' + * unescapeString('\\x') => 'x' + */ +export function unescapeString (str: string): string { + let result = ''; + let i = 0; + + while (i < str.length) { + if (str[i] === '\\' && i + 1 < str.length) { + const nextChar = str[i + 1]; + + // Handle unicode escape sequences \uHHHH + if (nextChar === 'u' && i + 5 < str.length) { + const hex = str.slice(i + 2, i + 6); + if (/^[0-9a-fA-F]{4}$/.test(hex)) { + result += String.fromCharCode(parseInt(hex, 16)); + i += 6; + continue; + } + } + + // Handle common escape sequences + const escapeMap: Record = { + 'n': '\n', + 't': '\t', + 'r': '\r', + 'b': '\b', + 'f': '\f', + 'v': '\v', + '0': '\0', + '\\': '\\', + '"': '"', + '\'': '\'', + '`': '`', + }; + + if (nextChar in escapeMap) { + result += escapeMap[nextChar]; + i += 2; + } else { + // Unknown escape sequence - just use the character after backslash + result += nextChar; + i += 2; + } + } else { + result += str[i]; + i++; + } + } + + return result; +} + +/** + * Escapes a string by adding backslashes before special characters. + * Handles quotes and other characters that need escaping. + * + * @param str - The string to escape + * @returns The escaped string + * + * @example + * escapeString('table"name') => 'table\\"name' + * escapeString('line1\nline2') => 'line1\\nline2' + */ +export function escapeString (str: string): string { + let result = ''; + + for (let i = 0; i < str.length; i++) { + const char = str[i]; + + switch (char) { + case '\\': + result += '\\\\'; + break; + case '"': + result += '\\"'; + break; + case '\'': + result += "\\'"; + break; + case '\n': + result += '\\n'; + break; + case '\t': + result += '\\t'; + break; + case '\r': + result += '\\r'; + break; + case '\b': + result += '\\b'; + break; + case '\f': + result += '\\f'; + break; + case '\v': + result += '\\v'; + break; + case '\0': + result += '\\0'; + break; + default: + result += char; + } + } + + return result; +} + +/** + * Formats a record value for DBML output. + * Handles different data types and converts them to appropriate DBML syntax. + * + * @param recordValue - The record value with type information + * @returns The formatted string representation for DBML + * + * @example + * formatRecordValue({ value: 1, type: 'integer' }) => '1' + * formatRecordValue({ value: 'Alice', type: 'string' }) => "'Alice'" + * formatRecordValue({ value: true, type: 'bool' }) => 'true' + * formatRecordValue({ value: null, type: 'string' }) => 'null' + */ +export function formatRecordValue (recordValue: { value: any; type: string }): string { + const { value, type } = recordValue; + + // Handle null/undefined values + if (value === null || value === undefined) { + return 'null'; + } + + // Handle expressions (backtick strings) + if (type === 'expression') { + return `\`${value}\``; + } + + // Try to extract typed values using tryExtract functions + // If extraction fails, fall back to function expression + + if (isBooleanType(type)) { + const extracted = tryExtractBoolean(value); + if (extracted !== null) { + return extracted ? 'true' : 'false'; + } + // If extraction failed, wrap in function expression + return `\`${value}\``; + } + + if (isNumericType(type)) { + const extracted = tryExtractNumeric(value); + if (extracted !== null) { + return String(extracted); + } + // If extraction failed, wrap in function expression + return `\`${value}\``; + } + + if (isDateTimeType(type)) { + const extracted = tryExtractDateTime(value); + if (extracted !== null) { + const quote = extracted.includes('\n') ? '\'\'\'' : '\''; + return `${quote}${extracted.replaceAll('\\', '\\\\').replaceAll("'", "\\'")}${quote}`; + } + // If extraction failed, wrap in function expression + return `\`${value}\``; + } + + // Default: string types and others + const extracted = tryExtractString(value); + if (extracted !== null) { + const quote = extracted.includes('\n') ? '\'\'\'' : '\''; + return `${quote}${extracted.replaceAll('\\', '\\\\').replaceAll("'", "\\'")}${quote}`; + } + + // If all extractions failed, wrap in function expression + return `\`${value}\``; +} + +/** + * Splits a qualified identifier string into its components, handling quoted segments. + * + * Examples: + * - "schema.table" => ["schema", "table"] + * - '"schema name".table' => ["schema name", "table"] + * - '"schema.with.dots"."table.with.dots".column' => ["schema.with.dots", "table.with.dots", "column"] + * - 'schema."table name"."column name"' => ["schema", "table name", "column name"] + * - 'schema . table' => ["schema", "table"] + * + * @param identifier - The qualified identifier string to split + * @returns Array of unquoted identifier components + */ +export function splitQualifiedIdentifier (identifier: string): string[] { + // Match quoted strings (with escaped quotes) or unquoted identifiers + const pattern = /"(?:[^"\\]|\\.)*"|[^."]+/g; + const matches = identifier.match(pattern) || []; + + return matches + .map((match) => { + // If quoted, remove quotes and unescape + if (match.startsWith('"') && match.endsWith('"')) { + const content = match.slice(1, -1); + return unescapeString(content); + } + // Otherwise trim whitespace from unquoted component + return match.trim(); + }) + .filter((component) => component.length > 0); +} diff --git a/packages/dbml-parse/src/compiler/types.ts b/packages/dbml-parse/src/compiler/types.ts index 6bb512015..24bb8bbea 100644 --- a/packages/dbml-parse/src/compiler/types.ts +++ b/packages/dbml-parse/src/compiler/types.ts @@ -10,4 +10,5 @@ export const enum ScopeKind { TOPLEVEL, TABLEPARTIAL, CHECKS, + RECORDS, } diff --git a/packages/dbml-parse/src/constants.ts b/packages/dbml-parse/src/constants.ts index ab1dda4c1..22e54600f 100644 --- a/packages/dbml-parse/src/constants.ts +++ b/packages/dbml-parse/src/constants.ts @@ -1,3 +1,11 @@ export const KEYWORDS_OF_DEFAULT_SETTING = ['null', 'true', 'false'] as readonly string[]; export const NUMERIC_LITERAL_PREFIX = ['-', '+'] as readonly string[]; export const DEFAULT_SCHEMA_NAME = 'public'; + +// Ref relation operators +export enum RefRelation { + ManyToOne = '>', + OneToMany = '<', + OneToOne = '-', + ManyToMany = '<>', +} diff --git a/packages/dbml-parse/src/core/analyzer/analyzer.ts b/packages/dbml-parse/src/core/analyzer/analyzer.ts index ab352dc1b..c14db9d9e 100644 --- a/packages/dbml-parse/src/core/analyzer/analyzer.ts +++ b/packages/dbml-parse/src/core/analyzer/analyzer.ts @@ -2,7 +2,6 @@ import Validator from '@/core/analyzer/validator/validator'; import Binder from '@/core/analyzer/binder/binder'; import { ProgramNode } from '@/core/parser/nodes'; import Report from '@/core/report'; -import { CompileError } from '@/core/errors'; import { NodeSymbolIdGenerator } from '@/core/analyzer/symbol/symbols'; import SymbolFactory from '@/core/analyzer/symbol/factory'; @@ -15,8 +14,8 @@ export default class Analyzer { this.symbolFactory = new SymbolFactory(symbolIdGenerator); } - // Analyzing: Invoking both the validator and binder - analyze (): Report { + // Analyzing: Invoking the validator + analyze (): Report { const validator = new Validator(this.ast, this.symbolFactory); return validator.validate().chain((program) => { @@ -26,8 +25,7 @@ export default class Analyzer { }); } - // For invoking the validator only - validate (): Report { + validate (): Report { const validator = new Validator(this.ast, this.symbolFactory); return validator.validate().chain((program) => new Report(program, [])); diff --git a/packages/dbml-parse/src/core/analyzer/binder/binder.ts b/packages/dbml-parse/src/core/analyzer/binder/binder.ts index ffa42651e..0d415877a 100644 --- a/packages/dbml-parse/src/core/analyzer/binder/binder.ts +++ b/packages/dbml-parse/src/core/analyzer/binder/binder.ts @@ -25,7 +25,7 @@ export default class Binder { }); } - resolve (): Report { + resolve (): Report { const errors: CompileError[] = []; // Must call this before binding errors.push(...this.resolvePartialInjections()); diff --git a/packages/dbml-parse/src/core/analyzer/binder/elementBinder/records.ts b/packages/dbml-parse/src/core/analyzer/binder/elementBinder/records.ts new file mode 100644 index 000000000..26a09fbf0 --- /dev/null +++ b/packages/dbml-parse/src/core/analyzer/binder/elementBinder/records.ts @@ -0,0 +1,241 @@ +import { SyntaxToken } from '../../../lexer/tokens'; +import { ElementBinder } from '../types'; +import { + BlockExpressionNode, CommaExpressionNode, ElementDeclarationNode, FunctionApplicationNode, ProgramNode, SyntaxNode, +} from '../../../parser/nodes'; +import { CompileError, CompileErrorCode } from '../../../errors'; +import { lookupAndBindInScope, pickBinder, scanNonListNodeForBinding } from '../utils'; +import SymbolFactory from '../../symbol/factory'; +import { + destructureCallExpression, + extractVarNameFromPrimaryVariable, + getElementKind, +} from '../../utils'; +import { createColumnSymbolIndex, SymbolKind } from '../../symbol/symbolIndex'; +import { ElementKind } from '../../types'; +import { isTupleOfVariables } from '../../validator/utils'; +import { NodeSymbol } from '../../symbol/symbols'; +import { getElementNameString } from '@/core/parser/utils'; + +export default class RecordsBinder implements ElementBinder { + private symbolFactory: SymbolFactory; + private declarationNode: ElementDeclarationNode & { type: SyntaxToken }; + private ast: ProgramNode; + // A mapping from bound column symbols to the referencing primary expressions nodes of column + // Example: Records (col1, col2) -> Map symbol of `col1` to the `col1` in `Records (col1, col2)`` + private boundColumns: Map; + + constructor (declarationNode: ElementDeclarationNode & { type: SyntaxToken }, ast: ProgramNode, symbolFactory: SymbolFactory) { + this.declarationNode = declarationNode; + this.ast = ast; + this.symbolFactory = symbolFactory; + this.boundColumns = new Map(); + } + + bind (): CompileError[] { + const errors: CompileError[] = []; + + if (this.declarationNode.name) { + errors.push(...this.bindRecordsName(this.declarationNode.name)); + } + + if (this.declarationNode.body instanceof BlockExpressionNode) { + errors.push(...this.bindBody(this.declarationNode.body)); + } + + return errors; + } + + private bindRecordsName (nameNode: SyntaxNode): CompileError[] { + const parent = this.declarationNode.parent; + const isTopLevel = parent instanceof ProgramNode; + + return isTopLevel + ? this.bindTopLevelName(nameNode) + : this.bindInsideTableName(nameNode); + } + + // At top-level - bind table and column references: + // records users(id, name) { } // binds: Table[users], Column[id], Column[name] + // records myschema.users(id, name) { } // binds: Schema[myschema], Table[users], Column[id], Column[name] + private bindTopLevelName (nameNode: SyntaxNode): CompileError[] { + const fragments = destructureCallExpression(nameNode).unwrap_or(undefined); + if (!fragments) { + return []; + } + + const tableBindee = fragments.variables.pop(); + const schemaBindees = fragments.variables; + + if (!tableBindee) { + return []; + } + + const tableErrors = lookupAndBindInScope(this.ast, [ + ...schemaBindees.map((b) => ({ node: b, kind: SymbolKind.Schema })), + { node: tableBindee, kind: SymbolKind.Table }, + ]); + + if (tableErrors.length > 0) { + return tableErrors; + } + + const tableSymbol = tableBindee.referee; + if (!tableSymbol?.symbolTable) { + return []; + } + + const tableName = getElementNameString(tableBindee.referee?.declaration).unwrap_or(''); + + const errors: CompileError[] = []; + for (const columnBindee of fragments.args) { + const columnName = extractVarNameFromPrimaryVariable(columnBindee).unwrap_or(''); + const columnIndex = createColumnSymbolIndex(columnName); + const columnSymbol = tableSymbol.symbolTable.get(columnIndex); + + if (!columnSymbol) { + errors.push(new CompileError( + CompileErrorCode.BINDING_ERROR, + `Column '${columnName}' does not exist in Table '${tableName}'`, + columnBindee, + )); + continue; + } + columnBindee.referee = columnSymbol; + columnSymbol.references.push(columnBindee); + + const originalBindee = this.boundColumns.get(columnSymbol); + if (originalBindee) { + errors.push(new CompileError( + CompileErrorCode.DUPLICATE_COLUMN_REFERENCES_IN_RECORDS, + `Column '${columnName}' is referenced more than once in a Records for Table '${tableName}'`, + originalBindee, + )); + errors.push(new CompileError( + CompileErrorCode.DUPLICATE_COLUMN_REFERENCES_IN_RECORDS, + `Column '${columnName}' is referenced more than once in a Records for Table '${tableName}'`, + columnBindee, + )); + } + this.boundColumns.set(columnSymbol, columnBindee); + } + + return errors; + } + + // Inside a table - bind column references to parent table: + // table users { records (id, name) { } } // binds: Column[id], Column[name] from parent table + // table users { records { } } // no columns to bind + private bindInsideTableName (nameNode: SyntaxNode): CompileError[] { + const parent = this.declarationNode.parent; + if (!(parent instanceof ElementDeclarationNode)) { + return []; + } + + const elementKind = getElementKind(parent).unwrap_or(undefined); + if (elementKind !== ElementKind.Table) { + return []; + } + + const tableSymbolTable = parent.symbol?.symbolTable; + if (!tableSymbolTable) { + return []; + } + + if (!isTupleOfVariables(nameNode)) { + return []; + } + + const tableName = getElementNameString(parent).unwrap_or(''); + + const errors: CompileError[] = []; + for (const columnBindee of nameNode.elementList) { + const columnName = extractVarNameFromPrimaryVariable(columnBindee).unwrap_or(''); + const columnIndex = createColumnSymbolIndex(columnName); + const columnSymbol = tableSymbolTable.get(columnIndex); + + if (!columnSymbol) { + errors.push(new CompileError( + CompileErrorCode.BINDING_ERROR, + `Column '${columnName}' does not exist in Table '${tableName}'`, + columnBindee, + )); + continue; + } + + columnBindee.referee = columnSymbol; + columnSymbol.references.push(columnBindee); + } + + return errors; + } + + // Bind enum field references in data rows. + // Example data rows with enum references: + // 1, status.active, 'hello' // binds: Enum[status], EnumField[active] + // myschema.status.pending, 42 // binds: Schema[myschema], Enum[status], EnumField[pending] + private bindBody (body?: FunctionApplicationNode | BlockExpressionNode): CompileError[] { + if (!body) { + return []; + } + if (body instanceof FunctionApplicationNode) { + return this.bindDataRow(body); + } + + const functions = body.body.filter((e) => e instanceof FunctionApplicationNode); + const subs = body.body.filter((e) => e instanceof ElementDeclarationNode); + + return [ + ...this.bindDataRows(functions as FunctionApplicationNode[]), + ...this.bindSubElements(subs as ElementDeclarationNode[]), + ]; + } + + private bindDataRows (rows: FunctionApplicationNode[]): CompileError[] { + return rows.flatMap((row) => this.bindDataRow(row)); + } + + // Bind a single data row. Structure: + // row.callee = CommaExpressionNode (e.g., 1, status.active, 'hello') or single value + // row.args = [] (empty) + private bindDataRow (row: FunctionApplicationNode): CompileError[] { + if (!row.callee) { + return []; + } + + const values = row.callee instanceof CommaExpressionNode + ? row.callee.elementList + : [row.callee]; + + const bindees = values.flatMap(scanNonListNodeForBinding); + + return bindees.flatMap((bindee) => { + const enumFieldBindee = bindee.variables.pop(); + const enumBindee = bindee.variables.pop(); + + if (!enumFieldBindee || !enumBindee) { + return []; + } + + const schemaBindees = bindee.variables; + + return lookupAndBindInScope(this.ast, [ + ...schemaBindees.map((b) => ({ node: b, kind: SymbolKind.Schema })), + { node: enumBindee, kind: SymbolKind.Enum }, + { node: enumFieldBindee, kind: SymbolKind.EnumField }, + ]); + }); + } + + private bindSubElements (subs: ElementDeclarationNode[]): CompileError[] { + return subs.flatMap((sub) => { + if (!sub.type) { + return []; + } + const _Binder = pickBinder(sub as ElementDeclarationNode & { type: SyntaxToken }); + const binder = new _Binder(sub as ElementDeclarationNode & { type: SyntaxToken }, this.ast, this.symbolFactory); + + return binder.bind(); + }); + } +} diff --git a/packages/dbml-parse/src/core/analyzer/binder/elementBinder/table.ts b/packages/dbml-parse/src/core/analyzer/binder/elementBinder/table.ts index 0ea929524..cd476d9ad 100644 --- a/packages/dbml-parse/src/core/analyzer/binder/elementBinder/table.ts +++ b/packages/dbml-parse/src/core/analyzer/binder/elementBinder/table.ts @@ -4,7 +4,7 @@ import { } from '../../../parser/nodes'; import { ElementBinder } from '../types'; import { SyntaxToken } from '../../../lexer/tokens'; -import { CompileError, CompileErrorCode } from '../../../errors'; +import { CompileError } from '../../../errors'; import { lookupAndBindInScope, pickBinder, scanNonListNodeForBinding } from '../utils'; import { aggregateSettingList, isValidPartialInjection } from '../../validator/utils'; import { SymbolKind, createColumnSymbolIndex } from '../../symbol/symbolIndex'; diff --git a/packages/dbml-parse/src/core/analyzer/binder/utils.ts b/packages/dbml-parse/src/core/analyzer/binder/utils.ts index 92c86122e..7157c3ed3 100644 --- a/packages/dbml-parse/src/core/analyzer/binder/utils.ts +++ b/packages/dbml-parse/src/core/analyzer/binder/utils.ts @@ -14,9 +14,10 @@ import TablePartialBinder from './elementBinder/tablePartial'; import { destructureComplexVariableTuple, extractVarNameFromPrimaryVariable } from '@/core/analyzer/utils'; import { SymbolKind, createNodeSymbolIndex } from '@/core/analyzer/symbol/symbolIndex'; import { getSymbolKind } from '@/core/analyzer/symbol/utils'; -import { getElementName, isExpressionAVariableNode } from '@/core/parser/utils'; +import { getElementNameString, isExpressionAVariableNode } from '@/core/parser/utils'; import { CompileError, CompileErrorCode } from '@/core/errors'; import { DEFAULT_SCHEMA_NAME } from '@/constants'; +import RecordsBinder from './elementBinder/records'; export function pickBinder (element: ElementDeclarationNode & { type: SyntaxToken }) { switch (element.type.value.toLowerCase() as ElementKind) { @@ -38,6 +39,8 @@ export function pickBinder (element: ElementDeclarationNode & { type: SyntaxToke return TablePartialBinder; case ElementKind.Check: return ChecksBinder; + case ElementKind.Records: + return RecordsBinder; default: return CustomBinder; } @@ -95,7 +98,7 @@ export function lookupAndBindInScope ( let curSymbolTable = initialScope.symbol.symbolTable; let curKind = getSymbolKind(initialScope.symbol); - let curName = initialScope instanceof ElementDeclarationNode ? getElementName(initialScope).unwrap_or('') : DEFAULT_SCHEMA_NAME; + let curName = initialScope instanceof ElementDeclarationNode ? getElementNameString(initialScope).unwrap_or('') : DEFAULT_SCHEMA_NAME; if (initialScope instanceof ProgramNode && symbolInfos.length) { const { node, kind } = symbolInfos[0]; diff --git a/packages/dbml-parse/src/core/analyzer/types.ts b/packages/dbml-parse/src/core/analyzer/types.ts index 1c082ff97..587dbbdcc 100644 --- a/packages/dbml-parse/src/core/analyzer/types.ts +++ b/packages/dbml-parse/src/core/analyzer/types.ts @@ -8,6 +8,7 @@ export enum ElementKind { TableGroup = 'tablegroup', TablePartial = 'tablepartial', Check = 'checks', + Records = 'records', } export enum SettingName { diff --git a/packages/dbml-parse/src/core/analyzer/utils.ts b/packages/dbml-parse/src/core/analyzer/utils.ts index 35b4dd87e..11a4762e4 100644 --- a/packages/dbml-parse/src/core/analyzer/utils.ts +++ b/packages/dbml-parse/src/core/analyzer/utils.ts @@ -4,12 +4,15 @@ import { ElementDeclarationNode, FunctionExpressionNode, InfixExpressionNode, + LiteralNode, PrimaryExpressionNode, ProgramNode, SyntaxNode, TupleExpressionNode, VariableNode, + CallExpressionNode, } from '@/core/parser/nodes'; +import { SyntaxToken, SyntaxTokenKind } from '@/core/lexer/tokens'; import { isRelationshipOp, isTupleOfVariables } from '@/core/analyzer/validator/utils'; import { NodeSymbolIndex, isPublicSchemaIndex } from '@/core/analyzer/symbol/symbolIndex'; import { NodeSymbol } from '@/core/analyzer/symbol/symbols'; @@ -18,7 +21,6 @@ import { isExpressionAQuotedString, isExpressionAVariableNode, } from '@/core/parser/utils'; -import { SyntaxToken } from '@/core/lexer/tokens'; import { ElementKind } from '@/core/analyzer/types'; export function getElementKind (node?: ElementDeclarationNode): Option { @@ -33,6 +35,7 @@ export function getElementKind (node?: ElementDeclarationNode): Option { return new Some(value.expression.literal.value); } +export function extractNumericLiteral (node?: SyntaxNode): number | null { + if (node instanceof PrimaryExpressionNode && node.expression instanceof LiteralNode) { + if (node.expression.literal?.kind === SyntaxTokenKind.NUMERIC_LITERAL) { + return Number(node.expression.literal.value); + } + } + return null; +} + +// Extract referee from a simple variable (x) or complex variable (a.b.c) +// For complex variables, returns the referee of the rightmost part +export function extractReferee (node?: SyntaxNode): NodeSymbol | undefined { + if (!node) return undefined; + + // Simple variable: x + if (isExpressionAVariableNode(node)) { + return node.referee; + } + + // Complex variable: a.b.c - get referee from rightmost part + if (node instanceof InfixExpressionNode && node.op?.value === '.') { + return extractReferee(node.rightExpression); + } + + return node.referee; +} + export function isBinaryRelationship (value?: SyntaxNode): value is InfixExpressionNode { if (!(value instanceof InfixExpressionNode)) { return false; @@ -222,6 +252,41 @@ export function extractIndexName ( return value.value.value; } +// Destructure a call expression like `schema.table(col1, col2)` or `table(col1, col2)`. +// Returns the callee variables (schema, table) and the args (col1, col2). +// schema.table(col1, col2) => { variables: [schema, table], args: [col1, col2] } +// table(col1, col2) => { variables: [table], args: [col1, col2] } +// table() => { variables: [table], args: [] } +export function destructureCallExpression ( + node?: SyntaxNode, +): Option<{ variables: (PrimaryExpressionNode & { expression: VariableNode })[]; args: (PrimaryExpressionNode & { expression: VariableNode })[] }> { + if (!(node instanceof CallExpressionNode) || !node.callee) { + return new None(); + } + + // Destructure the callee (e.g., schema.table or just table) + const fragments = destructureMemberAccessExpression(node.callee).unwrap_or(undefined); + if (!fragments || fragments.length === 0) { + return new None(); + } + + // All callee fragments must be simple variables + if (!fragments.every(isExpressionAVariableNode)) { + return new None(); + } + + // Get args from argument list + let args: (PrimaryExpressionNode & { expression: VariableNode })[] = []; + if (isTupleOfVariables(node.argumentList)) { + args = [...node.argumentList.elementList]; + } + + return new Some({ + variables: fragments as (PrimaryExpressionNode & { expression: VariableNode })[], + args, + }); +} + // Starting from `startElement` // find the closest outer scope that contains `id` // and return the symbol corresponding to `id` in that scope diff --git a/packages/dbml-parse/src/core/analyzer/validator/elementValidators/records.ts b/packages/dbml-parse/src/core/analyzer/validator/elementValidators/records.ts new file mode 100644 index 000000000..de0ca9cfd --- /dev/null +++ b/packages/dbml-parse/src/core/analyzer/validator/elementValidators/records.ts @@ -0,0 +1,259 @@ +import { partition } from 'lodash-es'; +import SymbolFactory from '@/core/analyzer/symbol/factory'; +import { CompileError, CompileErrorCode } from '@/core/errors'; +import { + BlockExpressionNode, CallExpressionNode, CommaExpressionNode, ElementDeclarationNode, EmptyNode, FunctionApplicationNode, FunctionExpressionNode, ListExpressionNode, ProgramNode, SyntaxNode, +} from '@/core/parser/nodes'; +import { SyntaxToken } from '@/core/lexer/tokens'; +import { ElementValidator } from '@/core/analyzer/validator/types'; +import { isExpressionASignedNumberExpression, isTupleOfVariables, isValidName, pickValidator } from '@/core/analyzer/validator/utils'; +import SymbolTable from '@/core/analyzer/symbol/symbolTable'; +import { destructureComplexVariable, getElementKind } from '@/core/analyzer/utils'; +import { ElementKind } from '@/core/analyzer/types'; +import { isAccessExpression, isExpressionAQuotedString, isExpressionAVariableNode } from '@/core/parser/utils'; + +export default class RecordsValidator implements ElementValidator { + private declarationNode: ElementDeclarationNode & { type: SyntaxToken }; + private publicSymbolTable: SymbolTable; + private symbolFactory: SymbolFactory; + + constructor (declarationNode: ElementDeclarationNode & { type: SyntaxToken }, publicSymbolTable: SymbolTable, symbolFactory: SymbolFactory) { + this.declarationNode = declarationNode; + this.publicSymbolTable = publicSymbolTable; + this.symbolFactory = symbolFactory; + } + + validate (): CompileError[] { + return [...this.validateContext(), ...this.validateName(this.declarationNode.name), ...this.validateAlias(this.declarationNode.alias), ...this.validateSettingList(this.declarationNode.attributeList), ...this.validateBody(this.declarationNode.body)]; + } + + // Validate that Records can only appear top-level or inside a Table. + // Valid: + // records users(id, name) { ... } // top-level + // table users { records (id, name) { } } // inside a table + // Invalid: + // enum status { records { } } // inside an enum + // indexes { records { } } // inside indexes + private validateContext (): CompileError[] { + const parent = this.declarationNode.parent; + const isTopLevel = parent instanceof ProgramNode; + + if (isTopLevel) { + return []; + } + + // Check if parent is a table + if (parent instanceof ElementDeclarationNode) { + const elementKind = getElementKind(parent).unwrap_or(undefined); + if (elementKind === ElementKind.Table) { + return []; + } + } + + return [new CompileError( + CompileErrorCode.INVALID_RECORDS_CONTEXT, + 'Records can only appear at top-level or inside a Table', + this.declarationNode, + )]; + } + + private validateName (nameNode?: SyntaxNode): CompileError[] { + const parent = this.declarationNode.parent; + const isTopLevel = parent instanceof ProgramNode; + + return isTopLevel + ? this.validateTopLevelName(nameNode) + : this.validateInsideTableName(nameNode); + } + + // At top-level - must reference a table with column list: + // Valid: records users(id, name, email) { } + // Valid: records myschema.users(id, name) { } + // Invalid: records users { } // missing column list + // Invalid: records { } // missing table reference + private validateTopLevelName (nameNode?: SyntaxNode): CompileError[] { + if (!(nameNode instanceof CallExpressionNode)) { + return [new CompileError( + CompileErrorCode.INVALID_RECORDS_NAME, + 'Records at top-level must have a name in the form of table(col1, col2, ...) or schema.table(col1, col2, ...)', + nameNode || this.declarationNode.type, + )]; + } + + const errors: CompileError[] = []; + + // Validate callee is a valid name (simple or complex variable like schema.table) + if (!nameNode.callee || !isValidName(nameNode.callee)) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_NAME, + 'Records table reference must be a valid table name', + nameNode.callee || nameNode, + )); + } + + // Validate argument list is a tuple of simple variables + if (!nameNode.argumentList || !isTupleOfVariables(nameNode.argumentList)) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_NAME, + 'Records column list must be simple column names', + nameNode.argumentList || nameNode, + )); + } + + return errors; + } + + // Inside a table - optional column list only: + // Valid: records (id, name) { } + // Valid: records { } // all columns + // Invalid: records other_table(id) { } // can't reference another table + private validateInsideTableName (nameNode?: SyntaxNode): CompileError[] { + if (nameNode && !isTupleOfVariables(nameNode)) { + return [new CompileError( + CompileErrorCode.INVALID_RECORDS_NAME, + 'Records inside a Table can only have a column list like (col1, col2, ...)', + nameNode, + )]; + } + + return []; + } + + private validateAlias (aliasNode?: SyntaxNode): CompileError[] { + if (aliasNode) { + return [new CompileError(CompileErrorCode.UNEXPECTED_ALIAS, 'Records cannot have an alias', aliasNode)]; + } + return []; + } + + private validateSettingList (settingList?: ListExpressionNode): CompileError[] { + if (settingList) { + return [new CompileError(CompileErrorCode.UNEXPECTED_SETTINGS, 'Records cannot have a setting list', settingList)]; + } + return []; + } + + // Validate that records body contains only simple values (one comma-separated row per line). + // Valid values: + // 1, 2, 3 // numbers + // -5, +10 // signed numbers + // 'hello', "world" // quoted strings + // `backtick string` // function expression (backtick string) + // true, false, TRUE, FALSE // booleans + // null, NULL // null + // ,, , // empty values (consecutive commas) + // status.active // enum field reference + // myschema.status.pending // schema.enum.field reference + // Invalid values: + // 2 + 1, 3 * 2 // arithmetic expressions + // func() // function calls + // (1, 2) // nested tuples + validateBody (body?: FunctionApplicationNode | BlockExpressionNode): CompileError[] { + if (!body) { + return []; + } + if (body instanceof FunctionApplicationNode) { + return this.validateDataRow(body); + } + + const [fields, subs] = partition(body.body, (e) => e instanceof FunctionApplicationNode); + return [ + ...this.validateDataRows(fields as FunctionApplicationNode[]), + ...this.validateSubElements(subs as ElementDeclarationNode[]), + ]; + } + + private validateDataRows (rows: FunctionApplicationNode[]): CompileError[] { + return rows.flatMap((row) => this.validateDataRow(row)); + } + + // Validate a single data row. Structure should be: + // row.callee = CommaExpressionNode (e.g., 1, 'hello', true) or single value (e.g., 1) + // row.args = [] (empty) + private validateDataRow (row: FunctionApplicationNode): CompileError[] { + const errors: CompileError[] = []; + + // Callee must exist & Args should be empty - all values should be in callee as a comma expression + if (!row.callee || row.args.length > 0) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + 'Invalid record row structure', + row, + )); + return errors; + } + + // Callee should be either a CommaExpressionNode or a single valid value + if (row.callee instanceof CommaExpressionNode) { + // Validate each element in the comma expression + for (const value of row.callee.elementList) { + if (!this.isValidRecordValue(value)) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + 'Records can only contain simple values (literals, null, true, false, or enum references). Complex expressions are not allowed.', + value, + )); + } + } + } else { + // Single value (no comma) + if (!this.isValidRecordValue(row.callee)) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + 'Records can only contain simple values (literals, null, true, false, or enum references). Complex expressions are not allowed.', + row.callee, + )); + } + } + + return errors; + } + + // Check if a value is valid for a record field. + private isValidRecordValue (value: SyntaxNode): boolean { + // Empty values from consecutive commas: 1,,3 or ,1,2 + if (value instanceof EmptyNode) { + return true; + } + + // Signed numbers: -2, +5, 42, 3.14 + if (isExpressionASignedNumberExpression(value)) { + return true; + } + + // Quoted strings: 'single', "double" + if (isExpressionAQuotedString(value)) { + return true; + } + + // Backtick strings: `hello world` + if (value instanceof FunctionExpressionNode) { + return true; + } + + // Simple identifiers: true, false, null, NULL, TRUE, FALSE + if (isExpressionAVariableNode(value)) { + return true; + } + + // Member access for enum field references: status.active, myschema.status.pending + if (isAccessExpression(value)) { + const fragments = destructureComplexVariable(value).unwrap_or(undefined); + return fragments !== undefined && fragments.length > 0; + } + + return false; + } + + private validateSubElements (subs: ElementDeclarationNode[]): CompileError[] { + return subs.flatMap((sub) => { + sub.parent = this.declarationNode; + if (!sub.type) { + return []; + } + const _Validator = pickValidator(sub as ElementDeclarationNode & { type: SyntaxToken }); + const validator = new _Validator(sub as ElementDeclarationNode & { type: SyntaxToken }, this.publicSymbolTable, this.symbolFactory); + return validator.validate(); + }); + } +} diff --git a/packages/dbml-parse/src/core/analyzer/validator/utils.ts b/packages/dbml-parse/src/core/analyzer/validator/utils.ts index 311715273..05ead97d4 100644 --- a/packages/dbml-parse/src/core/analyzer/validator/utils.ts +++ b/packages/dbml-parse/src/core/analyzer/validator/utils.ts @@ -38,6 +38,7 @@ import { CompileError, CompileErrorCode } from '@/core/errors'; import { ElementKind } from '@/core/analyzer/types'; import TablePartialValidator from './elementValidators/tablePartial'; import ChecksValidator from './elementValidators/checks'; +import RecordsValidator from './elementValidators/records'; export function pickValidator (element: ElementDeclarationNode & { type: SyntaxToken }) { switch (element.type.value.toLowerCase() as ElementKind) { @@ -59,6 +60,8 @@ export function pickValidator (element: ElementDeclarationNode & { type: SyntaxT return TablePartialValidator; case ElementKind.Check: return ChecksValidator; + case ElementKind.Records: + return RecordsValidator; default: return CustomValidator; } @@ -289,7 +292,7 @@ export function isValidColumnType (type: SyntaxNode): boolean { return variables !== undefined && variables.length > 0; } -export function aggregateSettingList (settingList?: ListExpressionNode): Report<{ [index: string]: AttributeNode[] }, CompileError> { +export function aggregateSettingList (settingList?: ListExpressionNode): Report<{ [index: string]: AttributeNode[] }> { const map: { [index: string]: AttributeNode[] } = {}; const errors: CompileError[] = []; if (!settingList) { diff --git a/packages/dbml-parse/src/core/analyzer/validator/validator.ts b/packages/dbml-parse/src/core/analyzer/validator/validator.ts index b860c5a4d..93c8e8816 100644 --- a/packages/dbml-parse/src/core/analyzer/validator/validator.ts +++ b/packages/dbml-parse/src/core/analyzer/validator/validator.ts @@ -27,7 +27,7 @@ export default class Validator { this.ast.symbol.declaration = this.ast; } - validate (): Report { + validate (): Report { const errors: CompileError[] = []; this.ast.body.forEach((element) => { diff --git a/packages/dbml-parse/src/core/errors.ts b/packages/dbml-parse/src/core/errors.ts index dff63b991..3e48b1028 100644 --- a/packages/dbml-parse/src/core/errors.ts +++ b/packages/dbml-parse/src/core/errors.ts @@ -109,6 +109,11 @@ export enum CompileErrorCode { DUPLICATE_CHECK_SETTING, INVALID_CHECK_SETTING_VALUE, + INVALID_RECORDS_CONTEXT, + INVALID_RECORDS_NAME, + INVALID_RECORDS_FIELD, + DUPLICATE_COLUMN_REFERENCES_IN_RECORDS, + BINDING_ERROR = 4000, UNSUPPORTED = 5000, @@ -124,7 +129,7 @@ export class CompileError extends Error { diagnostic: Readonly; - nodeOrToken: Readonly; // The nodes or tokens that cause the error + nodeOrToken: Readonly; // The nodes or tokens that cause the error start: Readonly; @@ -141,3 +146,8 @@ export class CompileError extends Error { Object.setPrototypeOf(this, CompileError.prototype); } } + +// CompileWarning is just an alias for CompileError +// Data type and constraint validation "errors" are returned as warnings +// but use the same class structure +export type CompileWarning = CompileError; diff --git a/packages/dbml-parse/src/core/interpreter/elementInterpreter/table.ts b/packages/dbml-parse/src/core/interpreter/elementInterpreter/table.ts index ce81fcf2b..440ad3d2f 100644 --- a/packages/dbml-parse/src/core/interpreter/elementInterpreter/table.ts +++ b/packages/dbml-parse/src/core/interpreter/elementInterpreter/table.ts @@ -157,6 +157,11 @@ export class TableInterpreter implements ElementInterpreter { case ElementKind.Check: return this.interpretChecks(sub); + case ElementKind.Records: + // Collect nested records for later interpretation + this.env.recordsElements.push(sub); + return []; + default: return []; } @@ -202,7 +207,7 @@ export class TableInterpreter implements ElementInterpreter { column.name = extractVarNameFromPrimaryVariable(field.callee as any).unwrap(); - const typeReport = processColumnType(field.args[0]); + const typeReport = processColumnType(field.args[0], this.env); column.type = typeReport.getValue(); errors.push(...typeReport.getErrors()); diff --git a/packages/dbml-parse/src/core/interpreter/elementInterpreter/tablePartial.ts b/packages/dbml-parse/src/core/interpreter/elementInterpreter/tablePartial.ts index 4f08080fb..9ba68a3eb 100644 --- a/packages/dbml-parse/src/core/interpreter/elementInterpreter/tablePartial.ts +++ b/packages/dbml-parse/src/core/interpreter/elementInterpreter/tablePartial.ts @@ -136,7 +136,7 @@ export class TablePartialInterpreter implements ElementInterpreter { column.name = extractVarNameFromPrimaryVariable(field.callee as any).unwrap(); - const typeReport = processColumnType(field.args[0]); + const typeReport = processColumnType(field.args[0], this.env); column.type = typeReport.getValue(); errors.push(...typeReport.getErrors()); diff --git a/packages/dbml-parse/src/core/interpreter/interpreter.ts b/packages/dbml-parse/src/core/interpreter/interpreter.ts index bee5c6d32..aecc28816 100644 --- a/packages/dbml-parse/src/core/interpreter/interpreter.ts +++ b/packages/dbml-parse/src/core/interpreter/interpreter.ts @@ -1,6 +1,5 @@ import { ProgramNode } from '@/core/parser/nodes'; -import { CompileError } from '@/core/errors'; -import { Database, InterpreterDatabase } from '@/core/interpreter/types'; +import { Database, InterpreterDatabase, TableRecord } from '@/core/interpreter/types'; import { TableInterpreter } from '@/core/interpreter/elementInterpreter/table'; import { StickyNoteInterpreter } from '@/core/interpreter/elementInterpreter/sticky_note'; import { RefInterpreter } from '@/core/interpreter/elementInterpreter/ref'; @@ -8,11 +7,44 @@ import { TableGroupInterpreter } from '@/core/interpreter/elementInterpreter/tab import { EnumInterpreter } from '@/core/interpreter/elementInterpreter/enum'; import { ProjectInterpreter } from '@/core/interpreter/elementInterpreter/project'; import { TablePartialInterpreter } from '@/core/interpreter/elementInterpreter/tablePartial'; +import { RecordsInterpreter } from '@/core/interpreter/records'; import Report from '@/core/report'; import { getElementKind } from '@/core/analyzer/utils'; import { ElementKind } from '@/core/analyzer/types'; function convertEnvToDb (env: InterpreterDatabase): Database { + // Convert records Map to array of TableRecord + const records: TableRecord[] = []; + for (const [table, rows] of env.records) { + if (rows.length > 0) { + // Collect all unique column names from all rows + const columnsSet = new Set(); + for (const row of rows) { + for (const colName of Object.keys(row.values)) { + columnsSet.add(colName); + } + } + + const columns = Array.from(columnsSet); + records.push({ + schemaName: table.schemaName || undefined, + tableName: table.name, + columns, + values: rows.map((r) => { + // Convert object-based values to array-based values ordered by columns + return columns.map((col) => { + const val = r.values[col]; + if (val) { + return { value: val.value, type: val.type }; + } + // Column not present in this row (shouldn't happen with validation) + return { value: null, type: 'unknown' }; + }); + }), + }); + } + } + return { schemas: [], tables: Array.from(env.tables.values()), @@ -23,6 +55,7 @@ function convertEnvToDb (env: InterpreterDatabase): Database { aliases: env.aliases, project: Array.from(env.project.values())[0] || {}, tablePartials: Array.from(env.tablePartials.values()), + records, }; } @@ -31,7 +64,7 @@ export default class Interpreter { ast: ProgramNode; env: InterpreterDatabase; - constructor (ast: ProgramNode) { + constructor (ast: ProgramNode, source: string) { this.ast = ast; this.env = { schema: [], @@ -45,10 +78,14 @@ export default class Interpreter { aliases: [], project: new Map(), tablePartials: new Map(), + records: new Map(), + recordsElements: [], + source, }; } - interpret (): Report { + interpret (): Report { + // First pass: interpret all non-records elements const errors = this.ast.body.flatMap((element) => { switch (getElementKind(element).unwrap_or(undefined)) { case ElementKind.Table: @@ -65,11 +102,20 @@ export default class Interpreter { return (new EnumInterpreter(element, this.env)).interpret(); case ElementKind.Project: return (new ProjectInterpreter(element, this.env)).interpret(); + case ElementKind.Records: + // Defer records interpretation - collect for later + this.env.recordsElements.push(element); + return []; default: return []; } }); - return new Report(convertEnvToDb(this.env), errors); + // Second pass: interpret all records elements grouped by table + // Now that all tables, enums, etc. are interpreted, we can validate records properly + const recordsResult = new RecordsInterpreter(this.env).interpret(this.env.recordsElements); + errors.push(...recordsResult.getErrors()); + + return new Report(convertEnvToDb(this.env), errors, recordsResult.getWarnings()); } } diff --git a/packages/dbml-parse/src/core/interpreter/records/index.ts b/packages/dbml-parse/src/core/interpreter/records/index.ts new file mode 100644 index 000000000..0b80e9d96 --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/index.ts @@ -0,0 +1,421 @@ +import { + BlockExpressionNode, + CommaExpressionNode, + ElementDeclarationNode, + FunctionApplicationNode, + FunctionExpressionNode, + SyntaxNode, + TupleExpressionNode, +} from '@/core/parser/nodes'; +import { CompileError, CompileErrorCode, CompileWarning } from '@/core/errors'; +import Report from '@/core/report'; +import { + RecordValue, + InterpreterDatabase, + Table, + Column, +} from '@/core/interpreter/types'; +import { + isNullish, + isEmptyStringLiteral, + tryExtractNumeric, + tryExtractBoolean, + tryExtractString, + tryExtractDateTime, + extractEnumAccess, + isNumericType, + isIntegerType, + isFloatType, + isBooleanType, + isStringType, + isDateTimeType, + getRecordValueType, + validatePrimaryKey, + validateUnique, + validateForeignKeys, +} from './utils'; +import { destructureCallExpression, extractVariableFromExpression } from '@/core/analyzer/utils'; +import { last } from 'lodash-es'; +import { mergeTableAndPartials } from '../utils'; + +export class RecordsInterpreter { + private env: InterpreterDatabase; + + constructor (env: InterpreterDatabase) { + this.env = env; + } + + interpret (elements: ElementDeclarationNode[]): Report { + const errors: CompileError[] = []; + const warnings: CompileWarning[] = []; + + for (const element of elements) { + const { table, mergedColumns } = getTableAndColumnsOfRecords(element, this.env); + for (const row of (element.body as BlockExpressionNode).body) { + const rowNode = row as FunctionApplicationNode; + const result = extractDataFromRow(rowNode, mergedColumns, table.schemaName, this.env); + errors.push(...result.getErrors()); + warnings.push(...result.getWarnings()); + const rowData = result.getValue(); + if (!rowData.row) continue; + if (!this.env.records.has(table)) { + this.env.records.set(table, []); + } + const tableRecords = this.env.records.get(table); + tableRecords!.push({ + values: rowData.row, + node: rowNode, + columnNodes: rowData.columnNodes, + }); + } + } + + const constraintResult = this.validateConstraints(); + warnings.push(...constraintResult); + + return new Report(undefined, errors, warnings); + } + + private validateConstraints (): CompileWarning[] { + const warnings: CompileWarning[] = []; + + // Validate PK constraints + warnings.push(...validatePrimaryKey(this.env)); + + // Validate unique constraints + warnings.push(...validateUnique(this.env)); + + // Validate FK constraints + warnings.push(...validateForeignKeys(this.env)); + + return warnings; + } +} + +function getTableAndColumnsOfRecords (records: ElementDeclarationNode, env: InterpreterDatabase): { table: Table; mergedTable: Table; mergedColumns: Column[] } { + const nameNode = records.name; + const parent = records.parent; + if (parent instanceof ElementDeclarationNode) { + const table = env.tables.get(parent)!; + const mergedTable = mergeTableAndPartials(table, env); + if (!nameNode) return { + table, + mergedTable, + mergedColumns: mergedTable.fields, + }; + const mergedColumns = (nameNode as TupleExpressionNode).elementList.map((e) => mergedTable.fields.find((f) => f.name === extractVariableFromExpression(e).unwrap())!); + return { + table, + mergedTable, + mergedColumns, + }; + } + const fragments = destructureCallExpression(nameNode!).unwrap(); + const tableNode = last(fragments.variables)!.referee!.declaration as ElementDeclarationNode; + const table = env.tables.get(tableNode)!; + const mergedTable = mergeTableAndPartials(table, env); + const mergedColumns = fragments.args.map((e) => mergedTable.fields.find((f) => f.name === extractVariableFromExpression(e).unwrap())!); + return { + table, + mergedTable, + mergedColumns, + }; +} + +function extractRowValues (row: FunctionApplicationNode): SyntaxNode[] { + if (row.args.length > 0) { + return []; + } + + if (row.callee instanceof CommaExpressionNode) { + return row.callee.elementList; + } + + if (row.callee) { + return [row.callee]; + } + + return []; +} + +type RowData = { row: Record | null; columnNodes: Record }; + +function extractDataFromRow ( + row: FunctionApplicationNode, + mergedColumns: Column[], + tableSchemaName: string | null, + env: InterpreterDatabase, +): Report { + const errors: CompileError[] = []; + const warnings: CompileWarning[] = []; + const rowObj: Record = {}; + const columnNodes: Record = {}; + + const args = extractRowValues(row); + if (args.length !== mergedColumns.length) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Expected ${mergedColumns.length} values but got ${args.length}`, + row, + )); + return new Report({ row: null, columnNodes: {} }, errors, warnings); + } + + for (let i = 0; i < mergedColumns.length; i++) { + const arg = args[i]; + const column = mergedColumns[i]; + columnNodes[column.name] = arg; + const result = extractValue(arg, column, tableSchemaName, env); + errors.push(...result.getErrors()); + warnings.push(...result.getWarnings()); + const value = result.getValue(); + if (value !== null) { + rowObj[column.name] = value; + } + } + + return new Report({ row: rowObj, columnNodes }, errors, warnings); +} + +function getNodeSourceText (node: SyntaxNode, source: string): string { + if (node instanceof FunctionExpressionNode) { + return node.value?.value || ''; + } + // Extract the source text using node start and end positions + if (!isNaN(node.start) && !isNaN(node.end)) { + return source.slice(node.start, node.end); + } + return ''; +} + +function extractValue ( + node: SyntaxNode, + column: Column, + tableSchemaName: string | null, + env: InterpreterDatabase, +): Report { + // FIXME: Make this more precise + const type = column.type.type_name.split('(')[0]; + const { increment, not_null: notNull, dbdefault } = column; + const isEnum = column.type.isEnum || false; + const valueType = getRecordValueType(type, isEnum); + + // Function expression - keep original type, mark as expression + if (node instanceof FunctionExpressionNode) { + return new Report({ + value: node.value?.value || '', + type: 'expression', + }, [], []); + } + + // NULL literal + if (isNullish(node) || (isEmptyStringLiteral(node) && !isStringType(type))) { + const hasDefaultValue = dbdefault && dbdefault.value.toString().toLowerCase() !== 'null'; + if (notNull && !hasDefaultValue && !increment) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `NULL not allowed for non-nullable column '${column.name}' without default and increment`, + node, + )]); + } + return new Report({ value: null, type: valueType }, [], []); + } + + // Enum type + if (isEnum) { + const enumAccess = extractEnumAccess(node); + if (enumAccess === null) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Invalid enum value for column '${column.name}'`, + node, + )]); + } + + const { path, value: enumValue } = enumAccess; + + // Validate enum value against enum definition + const enumTypeName = type; + // Parse column type to get schema and enum name + // Type can be 'status' or 'app.status' + const typeParts = enumTypeName.split('.'); + const expectedEnumName = typeParts[typeParts.length - 1]; + const expectedSchemaName = typeParts.length > 1 ? typeParts.slice(0, -1).join('.') : tableSchemaName; + + // Validate enum access path matches the enum type + if (path.length === 0) { + // String literal - only allowed for enums without schema qualification + if (expectedSchemaName !== null) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Enum value must be fully qualified: expected ${expectedSchemaName}.${expectedEnumName}.${enumValue}, got string literal ${JSON.stringify(enumValue)}`, + node, + )]); + } + } else { + // Enum access syntax - validate path + const actualPath = path.join('.'); + const expectedPath = expectedSchemaName ? `${expectedSchemaName}.${expectedEnumName}` : expectedEnumName; + + if (actualPath !== expectedPath) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Enum path mismatch: expected ${expectedPath}.${enumValue}, got ${actualPath}.${enumValue}`, + node, + )]); + } + } + + // Find the enum definition + let enumDef = Array.from(env.enums.values()).find( + (e) => e.name === expectedEnumName && e.schemaName === expectedSchemaName, + ); + // Fallback to null schema if not found + if (!enumDef && expectedSchemaName === tableSchemaName) { + enumDef = Array.from(env.enums.values()).find( + (e) => e.name === expectedEnumName && e.schemaName === null, + ); + } + + if (enumDef) { + const validValues = new Set(enumDef.values.map((v) => v.name)); + if (!validValues.has(enumValue)) { + const validValuesList = Array.from(validValues).join(', '); + const fullEnumPath = expectedSchemaName ? `${expectedSchemaName}.${expectedEnumName}` : expectedEnumName; + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Invalid enum value ${JSON.stringify(enumValue)} for column '${column.name}' of type '${fullEnumPath}' (valid values: ${validValuesList})`, + node, + )]); + } + } + + return new Report({ value: enumValue, type: valueType }, [], []); + } + + // Numeric type + if (isNumericType(type)) { + const numValue = tryExtractNumeric(node); + if (numValue === null) { + return new Report( + { value: getNodeSourceText(node, env.source), type: 'expression' }, + [], + [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Invalid numeric value for column '${column.name}'`, + node, + )], + ); + } + + // Integer type: validate no decimal point + if (isIntegerType(type) && !Number.isInteger(numValue)) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Invalid integer value ${numValue} for column '${column.name}': expected integer, got decimal`, + node, + )]); + } + + // Decimal/numeric type: validate precision and scale + if (isFloatType(type) && column.type.numericParams) { + const { precision, scale } = column.type.numericParams; + const numStr = numValue.toString(); + const parts = numStr.split('.'); + const integerPart = parts[0].replace(/^-/, ''); // Remove sign + const decimalPart = parts[1] || ''; + + const totalDigits = integerPart.length + decimalPart.length; + const decimalDigits = decimalPart.length; + + if (totalDigits > precision) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Numeric value ${numValue} for column '${column.name}' exceeds precision: expected at most ${precision} total digits, got ${totalDigits}`, + node, + )]); + } + + if (decimalDigits > scale) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Numeric value ${numValue} for column '${column.name}' exceeds scale: expected at most ${scale} decimal digits, got ${decimalDigits}`, + node, + )]); + } + } + + return new Report({ value: numValue, type: valueType }, [], []); + } + + // Boolean type + if (isBooleanType(type)) { + const boolValue = tryExtractBoolean(node); + if (boolValue === null) { + return new Report( + { value: getNodeSourceText(node, env.source), type: 'expression' }, + [], + [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Invalid boolean value for column '${column.name}'`, + node, + )], + ); + } + return new Report({ value: boolValue, type: valueType }, [], []); + } + + // Datetime type + if (isDateTimeType(type)) { + const dtValue = tryExtractDateTime(node); + if (dtValue === null) { + return new Report( + { value: getNodeSourceText(node, env.source), type: 'expression' }, + [], + [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Invalid datetime value for column '${column.name}', expected valid datetime format (e.g., 'YYYY-MM-DD', 'HH:MM:SS', 'YYYY-MM-DD HH:MM:SS', 'MM/DD/YYYY', 'D MMM YYYY', or 'MMM D, YYYY')`, + node, + )], + ); + } + return new Report({ value: dtValue, type: valueType }, [], []); + } + + // String type + if (isStringType(type)) { + const strValue = tryExtractString(node); + if (strValue === null) { + return new Report( + { value: getNodeSourceText(node, env.source), type: 'expression' }, + [], + [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `Invalid string value for column '${column.name}'`, + node, + )], + ); + } + + // Validate string length (using UTF-8 byte length like SQL engines) + if (column.type.lengthParam) { + const { length } = column.type.lengthParam; + // Calculate byte length in UTF-8 encoding (matching SQL behavior) + const actualByteLength = new TextEncoder().encode(strValue).length; + + if (actualByteLength > length) { + return new Report(null, [], [new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + `String value for column '${column.name}' exceeds maximum length: expected at most ${length} bytes (UTF-8), got ${actualByteLength} bytes`, + node, + )]); + } + } + + return new Report({ value: strValue, type: 'string' }, [], []); + } + + // Fallback - try to extract as string + const strValue = tryExtractString(node); + return new Report({ value: strValue, type: valueType }, [], []); +} diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/constraints/fk.ts b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/fk.ts new file mode 100644 index 000000000..e041a9cc8 --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/fk.ts @@ -0,0 +1,216 @@ +import { CompileError, CompileErrorCode } from '@/core/errors'; +import { InterpreterDatabase, Ref, RefEndpoint, Table, TableRecordRow } from '@/core/interpreter/types'; +import { extractKeyValueWithDefault, hasNullInKey, formatFullColumnNames } from './helper'; +import { DEFAULT_SCHEMA_NAME } from '@/constants'; +import { mergeTableAndPartials, extractInlineRefsFromTablePartials } from '@/core/interpreter/utils'; + +interface TableLookup { + table: Table; + mergedTable: Table; + rows: TableRecordRow[]; +} + +type LookupMap = Map; + +// Create a table key from schema and table name +function makeTableKey (schema: string | null | undefined, table: string): string { + return schema ? `${schema}.${table}` : `${DEFAULT_SCHEMA_NAME}.${table}`; +} + +function createRecordMapFromKey ( + tables: Map, + records: Map, + env: InterpreterDatabase, +): LookupMap { + const lookup = new Map(); + + for (const table of tables.values()) { + const key = makeTableKey(table.schemaName, table.name); + const rows = records.get(table) || []; + const mergedTable = mergeTableAndPartials(table, env); + lookup.set(key, { table, mergedTable, rows }); + } + + return lookup; +} + +function collectValidKeys (rows: TableRecordRow[], columnNames: string[]): Set { + const keys = new Set(); + for (const row of rows) { + if (!hasNullInKey(row.values, columnNames)) { + keys.add(extractKeyValueWithDefault(row.values, columnNames)); + } + } + return keys; +} + +// Validate FK direction: source table values must exist in target table +function validateDirection ( + source: TableLookup, + target: TableLookup, + sourceEndpoint: RefEndpoint, + targetEndpoint: RefEndpoint, +): CompileError[] { + const errors: CompileError[] = []; + + if (source.rows.length === 0) { + return errors; + } + + const sourceTableColumns = new Set(source.mergedTable.fields.map((f) => f.name)); + if (sourceEndpoint.fieldNames.some((col) => !sourceTableColumns.has(col))) { + return errors; + } + + const targetTableColumns = new Set(target.mergedTable.fields.map((f) => f.name)); + if (targetEndpoint.fieldNames.some((col) => !targetTableColumns.has(col))) { + return errors; + } + + const validKeys = collectValidKeys(target.rows, targetEndpoint.fieldNames); + + for (const row of source.rows) { + if (hasNullInKey(row.values, sourceEndpoint.fieldNames)) continue; + + const key = extractKeyValueWithDefault(row.values, sourceEndpoint.fieldNames); + if (!validKeys.has(key)) { + // Create separate error for each column in the constraint + const errorNodes = sourceEndpoint.fieldNames + .map((col) => row.columnNodes[col]) + .filter(Boolean); + const isComposite = sourceEndpoint.fieldNames.length > 1; + const sourceColumnRef = formatFullColumnNames(source.mergedTable.schemaName, source.mergedTable.name, sourceEndpoint.fieldNames); + const targetColumnRef = formatFullColumnNames(target.mergedTable.schemaName, target.mergedTable.name, targetEndpoint.fieldNames); + + let msg: string; + if (isComposite) { + const valueStr = sourceEndpoint.fieldNames.map((col) => JSON.stringify(row.values[col]?.value)).join(', '); + msg = `FK violation: ${sourceColumnRef} = (${valueStr}) does not exist in ${targetColumnRef}`; + } else { + const value = JSON.stringify(row.values[sourceEndpoint.fieldNames[0]]?.value); + msg = `FK violation: ${sourceColumnRef} = ${value} does not exist in ${targetColumnRef}`; + } + + if (errorNodes.length > 0) { + // Create one error per column node + for (const node of errorNodes) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + node, + )); + } + } else { + // Fallback to row node if no column nodes available + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + row.node, + )); + } + } + } + + return errors; +} + +// Validate 1-1 relationship (both directions) +// * 1-1: Both sides reference each other. Every non-null value in table1 +// * must exist in table2, and vice versa. +function validateOneToOne ( + table1: TableLookup, + table2: TableLookup, + endpoint1: RefEndpoint, + endpoint2: RefEndpoint, +): CompileError[] { + return [ + ...validateDirection(table1, table2, endpoint1, endpoint2), + ...validateDirection(table2, table1, endpoint2, endpoint1), + ]; +} + +// Validate many-to-one relationship (FK on many side) +// * *-1: Many-to-one. The "*" side (endpoint1) has FK referencing the "1" side. +// * Values in endpoint1 must exist in endpoint2. +// * 1-*: One-to-many. The "*" side (endpoint2) has FK referencing the "1" side. +// * Values in endpoint2 must exist in endpoint1. +function validateManyToOne ( + manyTable: TableLookup, + oneTable: TableLookup, + manyEndpoint: RefEndpoint, + oneEndpoint: RefEndpoint, +): CompileError[] { + return validateDirection(manyTable, oneTable, manyEndpoint, oneEndpoint); +} + +// Validate many-to-many relationship (both directions) +// * *-*: Many-to-many. Both sides reference each other. +// * Values in each table must exist in the other. +function validateManyToMany ( + table1: TableLookup, + table2: TableLookup, + endpoint1: RefEndpoint, + endpoint2: RefEndpoint, +): CompileError[] { + return [ + ...validateDirection(table1, table2, endpoint1, endpoint2), + ...validateDirection(table2, table1, endpoint2, endpoint1), + ]; +} + +function validateRef (ref: Ref, lookup: LookupMap): CompileError[] { + if (!ref.endpoints) { + return []; + } + const [endpoint1, endpoint2] = ref.endpoints; + + const table1 = lookup.get(makeTableKey(endpoint1.schemaName, endpoint1.tableName)); + const table2 = lookup.get(makeTableKey(endpoint2.schemaName, endpoint2.tableName)); + + if (!table1 || !table2) return []; + + const rel1 = endpoint1.relation; + const rel2 = endpoint2.relation; + + if (rel1 === '1' && rel2 === '1') { + return validateOneToOne(table1, table2, endpoint1, endpoint2); + } + + if (rel1 === '*' && rel2 === '1') { + return validateManyToOne(table1, table2, endpoint1, endpoint2); + } + + if (rel1 === '1' && rel2 === '*') { + return validateManyToOne(table2, table1, endpoint2, endpoint1); + } + + if (rel1 === '*' && rel2 === '*') { + return validateManyToMany(table1, table2, endpoint1, endpoint2); + } + + return []; +} + +export function validateForeignKeys ( + env: InterpreterDatabase, +): CompileError[] { + const lookup = createRecordMapFromKey(env.tables, env.records, env); + const refs = Array.from(env.ref.values()); + const errors: CompileError[] = []; + + for (const ref of refs) { + errors.push(...validateRef(ref, lookup)); + } + + // Also validate inline refs from table partials + for (const mergedTableData of lookup.values()) { + const { table } = mergedTableData; + const partialRefs = extractInlineRefsFromTablePartials(table, env); + + for (const ref of partialRefs) { + errors.push(...validateRef(ref, lookup)); + } + } + + return errors; +} diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/constraints/helper.ts b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/helper.ts new file mode 100644 index 000000000..24876bbb4 --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/helper.ts @@ -0,0 +1,79 @@ +import { RecordValue, Column } from '@/core/interpreter/types'; +import { isSerialType } from '../data'; + +// Given a set of columns and a row +// Return a string contain the values of the columns joined together with `|` -> This string is used for deduplication +// Note that we do not take autoincrement into account, as we cannot know its value +export function extractKeyValueWithDefault ( + row: Record, + columnNames: string[], + columns?: (Column | undefined)[], +): string { + return columnNames.map((name, idx) => { + const value = row[name]?.value; + + if ((value === null || value === undefined) && columns && columns[idx]) { + const column = columns[idx]; + if (column?.dbdefault) { + return JSON.stringify(column.dbdefault.value); + } + } + + return JSON.stringify(value); + }).join('|'); +} + +export function hasNullInKey ( + row: Record, + columnNames: string[], + columns?: (Column | undefined)[], +): boolean { + return columnNames.some((name, idx) => { + const value = row[name]?.value; + + // If value is null/undefined but column has default, it's not null + if ((value === null || value === undefined) && columns && columns[idx]) { + const column = columns[idx]; + if (column?.dbdefault) { + return false; // Has default, so not null + } + } + + return value === null || value === undefined; + }); +} + +// Check if column is an auto-increment column (serial types or increment flag) +export function isAutoIncrementColumn (column: Column): boolean { + return column.increment || isSerialType(column.type.type_name); +} + +// Check if column has NOT NULL constraint with a default value +export function hasNotNullWithDefault (column: Column): boolean { + return (column.not_null || false) && !!column.dbdefault; +} + +// Format full column name with schema and table +export function formatFullColumnName ( + schemaName: string | null, + tableName: string, + columnName: string, +): string { + if (schemaName) { + return `${schemaName}.${tableName}.${columnName}`; + } + return `${tableName}.${columnName}`; +} + +// Format full column names for single or composite constraints +export function formatFullColumnNames ( + schemaName: string | null, + tableName: string, + columnNames: string[], +): string { + if (columnNames.length === 1) { + return formatFullColumnName(schemaName, tableName, columnNames[0]); + } + const formatted = columnNames.map((col) => formatFullColumnName(schemaName, tableName, col)); + return `(${formatted.join(', ')})`; +} diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/constraints/index.ts b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/index.ts new file mode 100644 index 000000000..e7451dc08 --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/index.ts @@ -0,0 +1,3 @@ +export * from './pk'; +export * from './unique'; +export * from './fk'; diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/constraints/pk.ts b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/pk.ts new file mode 100644 index 000000000..8f0dd1f1c --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/pk.ts @@ -0,0 +1,179 @@ +import { CompileError, CompileErrorCode } from '@/core/errors'; +import { InterpreterDatabase } from '@/core/interpreter/types'; +import { + extractKeyValueWithDefault, + hasNullInKey, + isAutoIncrementColumn, + formatFullColumnNames, +} from './helper'; +import { mergeTableAndPartials } from '@/core/interpreter/utils'; + +export function validatePrimaryKey ( + env: InterpreterDatabase, +): CompileError[] { + const errors: CompileError[] = []; + + for (const [table, rows] of env.records) { + const mergedTable = mergeTableAndPartials(table, env); + if (rows.length === 0) continue; + + const pkConstraints: string[][] = []; + for (const field of mergedTable.fields) { + if (field.pk) { + pkConstraints.push([field.name]); + } + } + for (const index of mergedTable.indexes) { + if (index.pk) { + pkConstraints.push(index.columns.map((c) => c.value)); + } + } + + const columnsSet = new Set(); + for (const row of rows) { + for (const colName of Object.keys(row.values)) { + columnsSet.add(colName); + } + } + const columns = Array.from(columnsSet); + const columnMap = new Map(mergedTable.fields.map((c) => [c.name, c])); + + for (const pkColumns of pkConstraints) { + const missingColumns = pkColumns.filter((col) => !columns.includes(col)); + const pkColumnFields = pkColumns.map((col) => columnMap.get(col)).filter(Boolean); + + // If PK column is completely missing from records, check if it has default/autoincrement + if (missingColumns.length > 0) { + const missingColumnsWithoutDefaults = missingColumns.filter((colName) => { + const col = columnMap.get(colName); + // Allow missing only if column has autoincrement or has a default value + return col && !col.increment && !col.dbdefault; + }); + + // Report error for missing columns without defaults/autoincrement + if (missingColumnsWithoutDefaults.length > 0) { + const isComposite = missingColumnsWithoutDefaults.length > 1; + const constraintType = isComposite ? 'Composite PK' : 'PK'; + const columnRef = formatFullColumnNames(mergedTable.schemaName, mergedTable.name, missingColumnsWithoutDefaults); + const msg = `${constraintType}: Column ${columnRef} is missing from record and has no default value`; + for (const row of rows) { + // Create separate error for each column in the constraint + const errorNodes = pkColumns + .map((col) => row.columnNodes[col]) + .filter(Boolean); + + if (errorNodes.length > 0) { + // Create one error per column node + for (const node of errorNodes) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + node, + )); + } + } else { + // Fallback to row node if no column nodes available + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + row.node, + )); + } + } + } + continue; + } + + // Check if ALL pk columns are auto-increment (serial/increment) + // Only then can we skip NULL checks and treat nulls as unique + const allAutoIncrement = pkColumnFields.every((col) => col && isAutoIncrementColumn(col)); + + const seen = new Map(); // key -> first row index + + for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { + const row = rows[rowIndex]; + + // Check for NULL in PK (considering defaults) + const hasNull = hasNullInKey(row.values, pkColumns, pkColumnFields); + if (hasNull) { + // Auto-increment columns can have NULL - each gets a unique value from DB + // Skip duplicate checking for this row (will be unique) + if (allAutoIncrement) { + continue; + } + // Non-auto-increment PK columns cannot have NULL (even with defaults) + // Create separate error for each column in the constraint + const errorNodes = pkColumns + .map((col) => row.columnNodes[col]) + .filter(Boolean); + const isComposite = pkColumns.length > 1; + const constraintType = isComposite ? 'Composite PK' : 'PK'; + const columnRef = formatFullColumnNames(mergedTable.schemaName, mergedTable.name, pkColumns); + const msg = `NULL in ${constraintType}: ${columnRef} cannot be NULL`; + + if (errorNodes.length > 0) { + // Create one error per column node + for (const node of errorNodes) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + node, + )); + } + } else { + // Fallback to row node if no column nodes available + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + row.node, + )); + } + continue; + } + + // Check for duplicates (using defaults for missing values) + const keyValue = extractKeyValueWithDefault(row.values, pkColumns, pkColumnFields); + if (seen.has(keyValue)) { + // Create separate error for each column in the constraint + const errorNodes = pkColumns + .map((col) => row.columnNodes[col]) + .filter(Boolean); + const isComposite = pkColumns.length > 1; + const constraintType = isComposite ? 'Composite PK' : 'PK'; + const columnRef = formatFullColumnNames(mergedTable.schemaName, mergedTable.name, pkColumns); + + let msg: string; + if (isComposite) { + const valueStr = pkColumns.map((col) => JSON.stringify(row.values[col]?.value)).join(', '); + msg = `Duplicate ${constraintType}: ${columnRef} = (${valueStr})`; + } else { + const value = JSON.stringify(row.values[pkColumns[0]]?.value); + msg = `Duplicate ${constraintType}: ${columnRef} = ${value}`; + } + + if (errorNodes.length > 0) { + // Create one error per column node + for (const node of errorNodes) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + node, + )); + } + } else { + // Fallback to row node if no column nodes available + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + row.node, + )); + } + } else { + seen.set(keyValue, rowIndex); + } + } + } + } + + return errors; +} diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/constraints/unique.ts b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/unique.ts new file mode 100644 index 000000000..82273059f --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/constraints/unique.ts @@ -0,0 +1,99 @@ +import { CompileError, CompileErrorCode } from '@/core/errors'; +import { InterpreterDatabase } from '@/core/interpreter/types'; +import { + extractKeyValueWithDefault, + hasNullInKey, + formatFullColumnNames, +} from './helper'; +import { mergeTableAndPartials } from '@/core/interpreter/utils'; + +export function validateUnique ( + env: InterpreterDatabase, +): CompileError[] { + const errors: CompileError[] = []; + + for (const [table, rows] of env.records) { + const mergedTable = mergeTableAndPartials(table, env); + if (rows.length === 0) continue; + + const uniqueConstraints: string[][] = []; + for (const field of mergedTable.fields) { + if (field.unique) { + uniqueConstraints.push([field.name]); + } + } + for (const index of mergedTable.indexes) { + if (index.unique) { + uniqueConstraints.push(index.columns.map((c) => c.value)); + } + } + + // Collect all unique column names from all rows + const columnsSet = new Set(); + for (const row of rows) { + for (const colName of Object.keys(row.values)) { + columnsSet.add(colName); + } + } + const columnMap = new Map(mergedTable.fields.map((c) => [c.name, c])); + + for (const uniqueColumns of uniqueConstraints) { + const uniqueColumnFields = uniqueColumns.map((col) => columnMap.get(col)).filter(Boolean); + + const seen = new Map(); // key -> first row index + + for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) { + const row = rows[rowIndex]; + + const hasNull = hasNullInKey(row.values, uniqueColumns, uniqueColumnFields); + + // NULL values are allowed in unique constraints and don't conflict + if (hasNull) { + continue; + } + + const keyValue = extractKeyValueWithDefault(row.values, uniqueColumns, uniqueColumnFields); + if (seen.has(keyValue)) { + // Create separate error for each column in the constraint + const errorNodes = uniqueColumns + .map((col) => row.columnNodes[col]) + .filter(Boolean); + const isComposite = uniqueColumns.length > 1; + const constraintType = isComposite ? 'Composite UNIQUE' : 'UNIQUE'; + const columnRef = formatFullColumnNames(mergedTable.schemaName, mergedTable.name, uniqueColumns); + + let msg: string; + if (isComposite) { + const valueStr = uniqueColumns.map((col) => JSON.stringify(row.values[col]?.value)).join(', '); + msg = `Duplicate ${constraintType}: ${columnRef} = (${valueStr})`; + } else { + const value = JSON.stringify(row.values[uniqueColumns[0]]?.value); + msg = `Duplicate ${constraintType}: ${columnRef} = ${value}`; + } + + if (errorNodes.length > 0) { + // Create one error per column node + for (const node of errorNodes) { + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + node, + )); + } + } else { + // Fallback to row node if no column nodes available + errors.push(new CompileError( + CompileErrorCode.INVALID_RECORDS_FIELD, + msg, + row.node, + )); + } + } else { + seen.set(keyValue, rowIndex); + } + } + } + } + + return errors; +} diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/data/index.ts b/packages/dbml-parse/src/core/interpreter/records/utils/data/index.ts new file mode 100644 index 000000000..69d7d1970 --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/data/index.ts @@ -0,0 +1,2 @@ +export * from './sqlTypes'; +export * from './values'; diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/data/sqlTypes.ts b/packages/dbml-parse/src/core/interpreter/records/utils/data/sqlTypes.ts new file mode 100644 index 000000000..0d359108b --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/data/sqlTypes.ts @@ -0,0 +1,189 @@ +import { + CallExpressionNode, + FunctionApplicationNode, +} from '@/core/parser/nodes'; +import { extractNumericLiteral } from '@/core/analyzer/utils'; +import { ColumnSymbol } from '@/core/analyzer/symbol/symbols'; + +export type SqlDialect = 'mysql' | 'postgres' | 'mssql' | 'oracle' | 'snowflake'; + +// Dialect-specific type mappings +const DIALECT_INTEGER_TYPES: Record> = { + mysql: new Set(['int', 'integer', 'smallint', 'bigint', 'tinyint', 'mediumint']), + postgres: new Set(['int', 'integer', 'smallint', 'bigint', 'serial', 'bigserial', 'smallserial']), + mssql: new Set(['int', 'integer', 'smallint', 'bigint', 'tinyint']), + oracle: new Set(['int', 'integer', 'smallint']), + snowflake: new Set(['int', 'integer', 'smallint', 'bigint', 'tinyint']), +}; + +const DIALECT_FLOAT_TYPES: Record> = { + mysql: new Set(['decimal', 'numeric', 'float', 'double', 'real']), + postgres: new Set(['decimal', 'numeric', 'real', 'float', 'double precision']), + mssql: new Set(['decimal', 'numeric', 'real', 'float']), + oracle: new Set(['number', 'decimal', 'numeric', 'float', 'real']), + snowflake: new Set(['number', 'decimal', 'numeric', 'float', 'double', 'real']), +}; + +const DIALECT_BOOL_TYPES: Record> = { + mysql: new Set(['bool', 'boolean', 'bit']), + postgres: new Set(['bool', 'boolean']), + mssql: new Set(['bit']), + oracle: new Set([]), // Oracle typically uses number(1) + snowflake: new Set(['boolean']), +}; + +const DIALECT_STRING_TYPES: Record> = { + mysql: new Set(['varchar', 'char', 'text', 'tinytext', 'mediumtext', 'longtext', 'string']), + postgres: new Set(['varchar', 'char', 'character', 'character varying', 'text', 'string']), + mssql: new Set(['varchar', 'char', 'nvarchar', 'nchar', 'text', 'ntext', 'string']), + oracle: new Set(['varchar', 'varchar2', 'char', 'nvarchar2', 'nchar', 'string']), + snowflake: new Set(['varchar', 'char', 'text', 'string']), +}; + +const DIALECT_BINARY_TYPES: Record> = { + mysql: new Set(['binary', 'varbinary', 'blob', 'tinyblob', 'mediumblob', 'longblob']), + postgres: new Set(['bytea']), + mssql: new Set(['binary', 'varbinary']), + oracle: new Set(['blob', 'raw']), + snowflake: new Set(['binary', 'varbinary']), +}; + +const DIALECT_DATETIME_TYPES: Record> = { + mysql: new Set(['date', 'datetime', 'timestamp', 'time']), + postgres: new Set(['date', 'timestamp', 'timestamptz', 'timestamp with time zone', 'timestamp without time zone', 'time', 'timetz', 'time with time zone', 'time without time zone']), + mssql: new Set(['date', 'datetime', 'datetime2', 'smalldatetime', 'time']), + oracle: new Set(['date', 'timestamp', 'timestamp with time zone', 'timestamp with local time zone']), + snowflake: new Set(['date', 'datetime', 'timestamp', 'time']), +}; + +const DIALECT_SERIAL_TYPES: Record> = { + mysql: new Set([]), + postgres: new Set(['serial', 'smallserial', 'bigserial']), + mssql: new Set([]), + oracle: new Set([]), + snowflake: new Set([]), +}; + +// Normalize a type name (lowercase, trim, collapse spaces) +export function normalizeTypeName (type: string): string { + return type.toLowerCase().trim().replace(/\s+/g, ' '); +} + +export function isIntegerType (type: string, dialect?: SqlDialect): boolean { + const normalized = normalizeTypeName(type); + if (dialect) { + return DIALECT_INTEGER_TYPES[dialect].has(normalized); + } + // Check if any dialect has this type + return Object.values(DIALECT_INTEGER_TYPES).some((set) => set.has(normalized)); +} + +export function isFloatType (type: string, dialect?: SqlDialect): boolean { + const normalized = normalizeTypeName(type); + if (dialect) { + return DIALECT_FLOAT_TYPES[dialect].has(normalized); + } + // Check if any dialect has this type + return Object.values(DIALECT_FLOAT_TYPES).some((set) => set.has(normalized)); +} + +export function isNumericType (type: string, dialect?: SqlDialect): boolean { + return isIntegerType(type, dialect) || isFloatType(type, dialect); +} + +export function isBooleanType (type: string, dialect?: SqlDialect): boolean { + const normalized = normalizeTypeName(type); + if (dialect) { + return DIALECT_BOOL_TYPES[dialect].has(normalized); + } + // Check if any dialect has this type + return Object.values(DIALECT_BOOL_TYPES).some((set) => set.has(normalized)); +} + +export function isStringType (type: string, dialect?: SqlDialect): boolean { + const normalized = normalizeTypeName(type); + if (dialect) { + return DIALECT_STRING_TYPES[dialect].has(normalized); + } + // Check if any dialect has this type + return Object.values(DIALECT_STRING_TYPES).some((set) => set.has(normalized)); +} + +export function isBinaryType (type: string, dialect?: SqlDialect): boolean { + const normalized = normalizeTypeName(type); + if (dialect) { + return DIALECT_BINARY_TYPES[dialect].has(normalized); + } + // Check if any dialect has this type + return Object.values(DIALECT_BINARY_TYPES).some((set) => set.has(normalized)); +} + +export function isDateTimeType (type: string, dialect?: SqlDialect): boolean { + const normalized = normalizeTypeName(type); + if (dialect) { + return DIALECT_DATETIME_TYPES[dialect].has(normalized); + } + // Check if any dialect has this type + return Object.values(DIALECT_DATETIME_TYPES).some((set) => set.has(normalized)); +} + +export function isSerialType (type: string, dialect?: SqlDialect): boolean { + const normalized = normalizeTypeName(type); + if (dialect) { + return DIALECT_SERIAL_TYPES[dialect].has(normalized); + } + // Check if any dialect has this type + return Object.values(DIALECT_SERIAL_TYPES).some((set) => set.has(normalized)); +} + +// Get type node from a column symbol's declaration +function getTypeNode (columnSymbol: ColumnSymbol) { + const declaration = columnSymbol.declaration; + if (!(declaration instanceof FunctionApplicationNode)) { + return null; + } + return declaration.args[0] || null; +} + +// Get numeric type parameters (precision, scale) from a column (e.g., decimal(10, 2)) +export function getNumericTypeParams (columnSymbol: ColumnSymbol): { precision?: number; scale?: number } { + const typeNode = getTypeNode(columnSymbol); + if (!(typeNode instanceof CallExpressionNode)) return {}; + if (!typeNode.argumentList || typeNode.argumentList.elementList.length !== 2) return {}; + + const precision = extractNumericLiteral(typeNode.argumentList.elementList[0]); + const scale = extractNumericLiteral(typeNode.argumentList.elementList[1]); + if (precision === null || scale === null) return {}; + + return { precision: Math.trunc(precision), scale: Math.trunc(scale) }; +} + +// Get length type parameter from a column (e.g., varchar(255)) +export function getLengthTypeParam (columnSymbol: ColumnSymbol): { length?: number } { + const typeNode = getTypeNode(columnSymbol); + if (!(typeNode instanceof CallExpressionNode)) return {}; + if (!typeNode.argumentList || typeNode.argumentList.elementList.length !== 1) return {}; + + const length = extractNumericLiteral(typeNode.argumentList.elementList[0]); + if (length === null) return {}; + + return { length: Math.trunc(length) }; +} + +// Get the record value type based on SQL type +// Returns: 'string' | 'bool' | 'integer' | 'real' | 'date' | 'time' | 'datetime' | original type +export function getRecordValueType (sqlType: string, isEnum: boolean): string { + if (isEnum) return 'string'; + if (isIntegerType(sqlType)) return 'integer'; + if (isFloatType(sqlType)) return 'real'; + if (isBooleanType(sqlType)) return 'bool'; + if (isStringType(sqlType)) return 'string'; + + // Specific datetime type mapping + const normalized = normalizeTypeName(sqlType); + if (normalized === 'date') return 'date'; + if (normalized === 'time' || normalized === 'timetz' || normalized === 'time with time zone' || normalized === 'time without time zone') return 'time'; + if (isDateTimeType(sqlType)) return 'datetime'; + + return sqlType; // Keep original type if not recognized +} diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/data/values.ts b/packages/dbml-parse/src/core/interpreter/records/utils/data/values.ts new file mode 100644 index 000000000..a101e905b --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/data/values.ts @@ -0,0 +1,282 @@ +import { + EmptyNode, + FunctionExpressionNode, + PrefixExpressionNode, + SyntaxNode, +} from '@/core/parser/nodes'; +import { isExpressionAnIdentifierNode } from '@/core/parser/utils'; +import { isExpressionASignedNumberExpression } from '@/core/analyzer/validator/utils'; +import { destructureComplexVariable, extractQuotedStringToken, extractNumericLiteral } from '@/core/analyzer/utils'; +import { last } from 'lodash-es'; +import { DateTime } from 'luxon'; + +export { extractNumericLiteral } from '@/core/analyzer/utils'; + +// Check if value is a NULL literal/Empty node +export function isNullish (value: SyntaxNode): boolean { + if (isExpressionAnIdentifierNode(value)) { + const varName = value.expression.variable?.value?.toLowerCase(); + return varName === 'null'; + } + return value instanceof EmptyNode; +} + +export function isEmptyStringLiteral (value: SyntaxNode): boolean { + return extractQuotedStringToken(value).unwrap_or(undefined) === ''; +} + +export function isFunctionExpression (value: SyntaxNode): value is FunctionExpressionNode { + return value instanceof FunctionExpressionNode; +} + +// Extract a signed number from a node (e.g., -42, +3.14) +// Handles prefix operators on numeric literals +export function extractSignedNumber (node: SyntaxNode): number | null { + // Try plain numeric literal first + const literal = extractNumericLiteral(node); + if (literal !== null) return literal; + + // Try signed number: -42, +3.14 + if (isExpressionASignedNumberExpression(node)) { + if (node instanceof PrefixExpressionNode && node.expression) { + const op = node.op?.value; + const inner = extractNumericLiteral(node.expression); + if (inner !== null) { + return op === '-' ? -inner : inner; + } + } + } + + return null; +} + +// Try to extract a numeric value from a syntax node or primitive +// Example: 0, 1, '0', '1', "2", -2, "-2" +export function tryExtractNumeric (value: SyntaxNode | number | string | boolean | undefined | null): number | null { + // Handle null/undefined + if (value === null || value === undefined) return null; + + // Handle primitive types + if (typeof value === 'number') return value; + if (typeof value === 'string') { + const parsed = Number(value); + return !isNaN(parsed) ? parsed : null; + } + if (typeof value === 'boolean') return value ? 1 : 0; + + // Numeric literal or signed number + const num = extractSignedNumber(value); + if (num !== null) return num; + + // Quoted string containing number: "42", '3.14' + const strValue = extractQuotedStringToken(value).unwrap_or(undefined); + if (strValue !== undefined) { + const parsed = Number(strValue); + if (!isNaN(parsed)) { + return parsed; + } + } + + return null; +} + +// Try to extract an integer value from a syntax node or primitive +// Rejects decimal values +// Example: 0, 1, '0', '1', "2", -2, "-2" +export function tryExtractInteger (value: SyntaxNode | number | string | boolean | undefined | null): number | null { + // Handle null/undefined + if (value === null || value === undefined) return null; + + // Handle primitive types + if (typeof value === 'number') { + // Reject if it has a decimal part + if (!Number.isInteger(value)) return null; + return value; + } + if (typeof value === 'string') { + const parsed = Number(value); + if (isNaN(parsed)) return null; + // Reject if it has a decimal part + if (!Number.isInteger(parsed)) return null; + return parsed; + } + if (typeof value === 'boolean') return value ? 1 : 0; + + // Numeric literal or signed number + const num = extractSignedNumber(value); + if (num !== null) { + // Reject if it has a decimal part + if (!Number.isInteger(num)) return null; + return num; + } + + // Quoted string containing number: "42", '3.14' + const strValue = extractQuotedStringToken(value).unwrap_or(undefined); + if (strValue !== undefined) { + const parsed = Number(strValue); + if (!isNaN(parsed) && Number.isInteger(parsed)) { + return parsed; + } + } + + return null; +} + +export const TRUTHY_VALUES = ['true', 'yes', 'y', 't', '1']; +export const FALSY_VALUES = ['false', 'no', 'n', 'f', '0']; + +// Try to extract a boolean value from a syntax node or primitive +// Example: 't', 'f', 'y', 'n', 'true', 'false', true, false, 'yes', 'no', 1, 0, '1', '0' +export function tryExtractBoolean (value: SyntaxNode | number | string | boolean | undefined | null): boolean | null { + // Handle null/undefined + if (value === null || value === undefined) return null; + + // Handle primitive types + if (typeof value === 'boolean') return value; + if (typeof value === 'number') { + if (value === 0) return false; + if (value === 1) return true; + return null; + } + if (typeof value === 'string') { + const lower = value.toLowerCase(); + if (TRUTHY_VALUES.includes(lower)) return true; + if (FALSY_VALUES.includes(lower)) return false; + return null; + } + + // Identifier: true, false + if (isExpressionAnIdentifierNode(value)) { + const varName = value.expression.variable?.value?.toLowerCase(); + if (varName === 'true') return true; + if (varName === 'false') return false; + } + + // Numeric literal: 0, 1 + const numVal = extractNumericLiteral(value); + if (numVal === 0) return false; + if (numVal === 1) return true; + + // Quoted string: 'true', 'false', 'yes', 'no', 'y', 'n', 't', 'f', '0', '1' + const strValue = extractQuotedStringToken(value)?.unwrap_or('').toLowerCase(); + if (strValue) { + if (TRUTHY_VALUES.includes(strValue)) return true; + if (FALSY_VALUES.includes(strValue)) return false; + } + + return null; +} + +// Try to extract an enum value from a syntax node or primitive +// Either enum references or string are ok +export function tryExtractEnum (value: SyntaxNode | string | undefined | null): string | null { + // Handle null/undefined + if (value === null || value === undefined) return null; + + // Handle primitive string + if (typeof value === 'string') return value; + + // Enum field reference: gender.male + const fragments = destructureComplexVariable(value).unwrap_or(undefined); + if (fragments) { + return last(fragments)!; + } + + // Quoted string: 'male' + return extractQuotedStringToken(value).unwrap_or(null); +} + +// Extract enum access with full path +// Returns { path: ['schema', 'enum'], value: 'field' } for schema.enum.field +// Returns { path: ['enum'], value: 'field' } for enum.field +// Returns { path: [], value: 'field' } for "field" (string literal) +export function extractEnumAccess (value: SyntaxNode): { path: string[]; value: string } | null { + // Enum field reference: schema.gender.male or gender.male + const fragments = destructureComplexVariable(value).unwrap_or(undefined); + if (fragments && fragments.length >= 2) { + const enumValue = last(fragments)!; + const enumPath = fragments.slice(0, -1); + return { path: enumPath, value: enumValue }; + } + + // Quoted string: 'male' + const stringValue = extractQuotedStringToken(value).unwrap_or(null); + if (stringValue !== null) { + return { path: [], value: stringValue }; + } + + return null; +} + +// Try to extract a string value from a syntax node or primitive +// Example: "abc", 'abc' +export function tryExtractString (value: SyntaxNode | string | undefined | null): string | null { + // Handle null/undefined + if (value === null || value === undefined) return null; + + // Handle primitive string + if (typeof value === 'string') return value; + + // Quoted string: 'hello', "world" + return extractQuotedStringToken(value).unwrap_or(null); +} + +// Supported datetime formats using luxon format tokens (excluding ISO 8601 which is handled separately) +const SUPPORTED_DATETIME_FORMATS = [ + 'yyyy-MM-dd', // ISO date: 2023-12-31 + 'HH:mm:ss', // Time: 23:59:59 + 'HH:mm:ss.SSS', // Time with milliseconds: 23:59:59.999 + 'yyyy-MM-dd HH:mm:ss', // ISO datetime with space: 2023-12-31 23:59:59 + 'M/d/yyyy', // MM/dd/yyyy: 12/31/2023 or 1/5/2023 + 'd MMM yyyy', // d MMM yyyy: 31 Dec 2023 or 1 Jan 2023 + 'MMM d, yyyy', // MMM d, yyyy: Dec 31, 2023 +]; + +function isDateTimeFormat (str: string): boolean { + // Try ISO 8601 format first (handles dates, times, datetimes with/without timezones) + const isoDate = DateTime.fromISO(str); + if (isoDate.isValid) { + return true; + } + + // Try other formats + for (const format of SUPPORTED_DATETIME_FORMATS) { + const dt = DateTime.fromFormat(str, format); + if (dt.isValid) { + return true; + } + } + + return false; +} + +// Try to extract a datetime value from a syntax node or primitive +// Supports: +// - ISO 8601: date (YYYY-MM-DD), time (HH:MM:SS), datetime (YYYY-MM-DDTHH:MM:SS) +// - MM/dd/yyyy: 12/31/2023 +// - d MMM yyyy: 31 Dec 2023 +// - MMM d, yyyy: Dec 31, 2023 +// - yyyy-MM-dd HH:mm:ss: 2023-12-31 23:59:59 +// Example: '2024-01-15', '10:30:00', '2024-01-15T10:30:00Z', '12/31/2023', '31 Dec 2023' +export function tryExtractDateTime (value: SyntaxNode | string | undefined | null): string | null { + // Handle null/undefined + if (value === null || value === undefined) return null; + + // Handle primitive string + if (typeof value === 'string') { + if (isDateTimeFormat(value)) { + return value; + } + return null; + } + + const strValue = extractQuotedStringToken(value).unwrap_or(null); + + if (strValue === null) return null; + + if (isDateTimeFormat(strValue)) { + return strValue; + } + + return null; +} diff --git a/packages/dbml-parse/src/core/interpreter/records/utils/index.ts b/packages/dbml-parse/src/core/interpreter/records/utils/index.ts new file mode 100644 index 000000000..5aa27560b --- /dev/null +++ b/packages/dbml-parse/src/core/interpreter/records/utils/index.ts @@ -0,0 +1,2 @@ +export * from './data'; +export * from './constraints'; diff --git a/packages/dbml-parse/src/core/interpreter/types.ts b/packages/dbml-parse/src/core/interpreter/types.ts index 554e67098..643f0a391 100644 --- a/packages/dbml-parse/src/core/interpreter/types.ts +++ b/packages/dbml-parse/src/core/interpreter/types.ts @@ -1,4 +1,4 @@ -import { ElementDeclarationNode } from '@/core/parser/nodes'; +import { ElementDeclarationNode, FunctionApplicationNode, SyntaxNode } from '@/core/parser/nodes'; import { Position } from '@/core/types'; import { CompileError } from '@/core/errors'; @@ -24,6 +24,39 @@ export interface InterpreterDatabase { tablePartials: Map; aliases: Alias[]; project: Map; + records: Map; + recordsElements: ElementDeclarationNode[]; + source: string; +} + +// Record value type +export type RecordValueType = 'string' | 'bool' | 'integer' | 'real' | 'date' | 'time' | 'datetime' | string; + +export interface RecordValue { + value: any; + type: RecordValueType; +} + +export interface TableRecordRow { + values: Record; + node: FunctionApplicationNode; + columnNodes: Record; // Map of column name to its value node +} + +export interface TableRecordsData { + table: Table; + rows: TableRecordRow[]; +} + +export interface TableRecord { + schemaName: string | undefined; + tableName: string; + columns: string[]; + values: RecordValue[][]; } export interface Database { @@ -36,13 +69,14 @@ export interface Database { aliases: Alias[]; project: Project; tablePartials: TablePartial[]; + records: TableRecord[]; } export interface Table { name: string; schemaName: null | string; alias: string | null; - fields: Column[]; + fields: Column[]; // The order of fields must match the order of declaration checks: Check[]; partials: TablePartialInjection[]; token: TokenPosition; @@ -65,6 +99,11 @@ export interface ColumnType { schemaName: string | null; type_name: string; args: string | null; + // Parsed type parameters + numericParams?: { precision: number; scale: number }; + lengthParam?: { length: number }; + // Whether this type references an enum + isEnum?: boolean; } export interface Column { @@ -216,6 +255,6 @@ export type Project = }; token: TokenPosition; [ - index: string & Omit + index: string & Omit ]: string; }; diff --git a/packages/dbml-parse/src/core/interpreter/utils.ts b/packages/dbml-parse/src/core/interpreter/utils.ts index 33b717f11..532e2582a 100644 --- a/packages/dbml-parse/src/core/interpreter/utils.ts +++ b/packages/dbml-parse/src/core/interpreter/utils.ts @@ -10,7 +10,7 @@ import { PrimaryExpressionNode, SyntaxNode, TupleExpressionNode, } from '@/core/parser/nodes'; import { - ColumnType, RelationCardinality, Table, TokenPosition, + ColumnType, RelationCardinality, Table, TokenPosition, InterpreterDatabase, Ref, } from '@/core/interpreter/types'; import { SyntaxTokenKind } from '@/core/lexer/tokens'; import { isDotDelimitedIdentifier, isExpressionAnIdentifierNode, isExpressionAQuotedString } from '@/core/parser/utils'; @@ -199,12 +199,16 @@ export function processDefaultValue (valueNode?: SyntaxNode): throw new Error('Unreachable'); } -export function processColumnType (typeNode: SyntaxNode): Report { +export function processColumnType (typeNode: SyntaxNode, env?: InterpreterDatabase): Report { let typeSuffix: string = ''; let typeArgs: string | null = null; + let numericParams: { precision: number; scale: number } | undefined; + let lengthParam: { length: number } | undefined; + let isEnum = false; + if (typeNode instanceof CallExpressionNode) { - typeArgs = typeNode - .argumentList!.elementList.map((e) => { + const argElements = typeNode.argumentList!.elementList; + typeArgs = argElements.map((e) => { if (isExpressionASignedNumberExpression(e)) { return getNumberTextFromExpression(e); } @@ -213,9 +217,33 @@ export function processColumnType (typeNode: SyntaxNode): Report 1) { return new Report( { schemaName: typeSchemaName.length === 0 ? null : typeSchemaName[0], type_name: `${typeName}${typeSuffix}`, args: typeArgs, + numericParams, + lengthParam, + isEnum, }, [new CompileError(CompileErrorCode.UNSUPPORTED, 'Nested schema is not supported', typeNode)], ); @@ -261,5 +304,99 @@ export function processColumnType (typeNode: SyntaxNode): Report p.name === name); + if (!partial) continue; + + // Merge fields (columns) + for (const c of partial.fields) { + if (fields.find((r) => r.name === c.name)) continue; + fields.push(c); + } + + // Merge indexes + indexes.push(...partial.indexes); + + // Merge checks + checks.push(...partial.checks); + + // Merge settings (later partials override) + if (partial.headerColor !== undefined) { + headerColor = partial.headerColor; + } + if (partial.note !== undefined) { + note = partial.note; + } + } + + return { + ...table, + fields, + indexes, + checks, + headerColor, + note, + }; +} + +export function extractInlineRefsFromTablePartials (table: Table, env: InterpreterDatabase): Ref[] { + const refs: Ref[] = []; + const tablePartials = [...env.tablePartials.values()]; + const originalFieldNames = new Set(table.fields.map((f) => f.name)); + + // Process partials in the same order as mergeTableAndPartials + for (const tablePartial of [...table.partials].reverse()) { + const { name } = tablePartial; + const partial = tablePartials.find((p) => p.name === name); + if (!partial) continue; + + // Extract inline refs from partial fields + for (const field of partial.fields) { + // Skip if this field is overridden by the original table + if (originalFieldNames.has(field.name)) continue; + + for (const inlineRef of field.inline_refs) { + const multiplicities = getMultiplicities(inlineRef.relation); + refs.push({ + name: null, + schemaName: null, + token: inlineRef.token, + endpoints: [ + { + schemaName: inlineRef.schemaName, + tableName: inlineRef.tableName, + fieldNames: inlineRef.fieldNames, + token: inlineRef.token, + relation: multiplicities[1], + }, + { + schemaName: table.schemaName, + tableName: table.name, + fieldNames: [field.name], + token: field.token, + relation: multiplicities[0], + }, + ], + }); + } + } + } + + return refs; +} diff --git a/packages/dbml-parse/src/core/lexer/lexer.ts b/packages/dbml-parse/src/core/lexer/lexer.ts index dc58c18eb..71827d5c1 100644 --- a/packages/dbml-parse/src/core/lexer/lexer.ts +++ b/packages/dbml-parse/src/core/lexer/lexer.ts @@ -92,7 +92,7 @@ export default class Lexer { ); } - lex (): Report { + lex (): Report { this.scanTokens(); this.tokens.push(SyntaxToken.create(SyntaxTokenKind.EOF, this.start, this.current, '', false)); this.gatherTrivia(); @@ -386,11 +386,14 @@ export default class Lexer { } // we accept identifiers starting with digits but must contain at least one char or underscore + // supports scientific notation: 1e2, 1E2, 1e+2, 1e-2, 1.5e10, 3.14e-5 numericLiteralOrIdentifier () { let nDots = 0; + if (this.isAtEnd()) { return this.addToken(SyntaxTokenKind.NUMERIC_LITERAL); } + while (!this.isAtEnd()) { const isDot = this.check('.'); nDots += isDot ? 1 : 0; @@ -398,6 +401,34 @@ export default class Lexer { break; } + // Check for scientific notation: e or E followed by optional sign and digits + // Only consume if we have a valid exponent (peek ahead first) + if (this.check('e') || this.check('E')) { + const charAfterE = this.peek(1); + const hasSign = charAfterE === '+' || charAfterE === '-'; + const digitPos = hasSign ? this.peek(2) : charAfterE; + + // Valid exponent: e/E followed by digit, or e/E followed by sign and digit + if (digitPos && isDigit(digitPos)) { + this.advance(); // consume 'e' or 'E' + if (hasSign) { + this.advance(); // consume '+' or '-' + } + // Consume exponent digits + while (!this.isAtEnd() && isDigit(this.peek()!)) { + this.advance(); + } + // After exponent, check if we can return + if (this.isAtEnd() || !isAlphaNumeric(this.peek()!)) { + return this.addToken(SyntaxTokenKind.NUMERIC_LITERAL); + } + // If there are more alphanumeric chars, it's an identifier (e.g., 1e2abc) + break; + } + // If 'e' is not followed by valid exponent, treat as identifier break + break; + } + // The first way to return a numeric literal without error: // a digit is encountered as the last character if (!isDot && this.current.offset === this.text.length - 1) { diff --git a/packages/dbml-parse/src/core/parser/nodes.ts b/packages/dbml-parse/src/core/parser/nodes.ts index 5c9d073e1..22769ccb0 100644 --- a/packages/dbml-parse/src/core/parser/nodes.ts +++ b/packages/dbml-parse/src/core/parser/nodes.ts @@ -98,10 +98,13 @@ export enum SyntaxNodeKind { CALL_EXPRESSION = '', PRIMARY_EXPRESSION = '', GROUP_EXPRESSION = '', - DUMMY = '', + COMMA_EXPRESSION = '', + EMPTY = '', ARRAY = '', } +// Form: * +// The root node of a DBML program containing top-level element declarations export class ProgramNode extends SyntaxNode { body: ElementDeclarationNode[]; @@ -117,6 +120,10 @@ export class ProgramNode extends SyntaxNode { } } +// Form: [] [as ] [] (: | { }) +// A declaration of a DBML element like Table, Ref, Enum, etc. +// e.g. Table users { ... } +// e.g. Ref: users.id > posts.user_id export class ElementDeclarationNode extends SyntaxNode { type?: SyntaxToken; @@ -181,6 +188,10 @@ export class ElementDeclarationNode extends SyntaxNode { } } +// Form: * +// A contiguous stream of identifiers (space-separated) +// e.g. primary key +// e.g. no action export class IdentiferStreamNode extends SyntaxNode { identifiers: SyntaxToken[]; @@ -190,6 +201,11 @@ export class IdentiferStreamNode extends SyntaxNode { } } +// Form: [: ] +// An attribute within a list expression (inside square brackets) +// e.g. primary key +// e.g. ref: users.id +// e.g. note: 'some note' export class AttributeNode extends SyntaxNode { name?: IdentiferStreamNode | PrimaryExpressionNode; @@ -226,10 +242,11 @@ export type NormalExpressionNode = | BlockExpressionNode | ListExpressionNode | TupleExpressionNode + | CommaExpressionNode | CallExpressionNode | PrimaryExpressionNode | FunctionExpressionNode - | DummyNode + | EmptyNode | ArrayNode; export type ExpressionNode = @@ -237,6 +254,10 @@ export type ExpressionNode = | NormalExpressionNode | FunctionApplicationNode; +// Form: +// A unary prefix expression +// e.g. -5 +// e.g. !flag export class PrefixExpressionNode extends SyntaxNode { op?: SyntaxToken; @@ -252,6 +273,11 @@ export class PrefixExpressionNode extends SyntaxNode { } } +// Form: +// A binary infix expression +// e.g. 1 + 2 +// e.g. a.b +// e.g. x > y export class InfixExpressionNode extends SyntaxNode { op?: SyntaxToken; @@ -278,6 +304,9 @@ export class InfixExpressionNode extends SyntaxNode { } } +// Form: +// A unary postfix expression +// e.g. x++ export class PostfixExpressionNode extends SyntaxNode { op?: SyntaxToken; @@ -293,6 +322,10 @@ export class PostfixExpressionNode extends SyntaxNode { } } +// Form: `` +// A backtick-quoted function/SQL expression +// e.g. `now()` +// e.g. `id * 2` export class FunctionExpressionNode extends SyntaxNode { value?: SyntaxToken; @@ -302,6 +335,11 @@ export class FunctionExpressionNode extends SyntaxNode { } } +// Form: * | +// A function application with space-separated arguments or comma-separated expressions +// e.g. id integer [primary key] +// e.g. Note 'This is a note' +// e.g. sample_data 1, 2, 3 export class FunctionApplicationNode extends SyntaxNode { callee?: ExpressionNode; @@ -317,6 +355,10 @@ export class FunctionApplicationNode extends SyntaxNode { } } +// Form: { * } +// A block containing element declarations or function applications +// e.g. { id integer } +// e.g. { Note: 'text' } export class BlockExpressionNode extends SyntaxNode { blockOpenBrace?: SyntaxToken; @@ -343,6 +385,10 @@ export class BlockExpressionNode extends SyntaxNode { } } +// Form: [ [, ]* ] +// A bracketed list of attributes +// e.g. [primary key] +// e.g. [ref: users.id, note: 'foreign key'] export class ListExpressionNode extends SyntaxNode { listOpenBracket?: SyntaxToken; @@ -378,6 +424,10 @@ export class ListExpressionNode extends SyntaxNode { } } +// Form: ( [, ]* ) +// A parenthesized comma-separated list of expressions +// e.g. (1, 2, 3) +// e.g. (a, b) export class TupleExpressionNode extends SyntaxNode { tupleOpenParen?: SyntaxToken; @@ -413,6 +463,41 @@ export class TupleExpressionNode extends SyntaxNode { } } +// Form: , [, ]* +// A comma-separated list of expressions without delimiters (CSV-like) +// Used inside function applications for multi-value arguments +// Empty fields (consecutive commas) are represented by DummyNode +// e.g. 1, 2, 3 +// e.g. 'a', 'b', 'c' +// e.g. 1, , 3 (empty field in middle) +// e.g. 1, 2, (trailing comma) +export class CommaExpressionNode extends SyntaxNode { + elementList: NormalExpressionNode[]; + + commaList: SyntaxToken[]; + + constructor ( + { + elementList = [], + commaList = [], + }: { + elementList?: NormalExpressionNode[]; + commaList?: SyntaxToken[]; + }, + id: SyntaxNodeId, + ) { + super(id, SyntaxNodeKind.COMMA_EXPRESSION, [ + ...interleave(elementList, commaList), + ]); + this.elementList = elementList; + this.commaList = commaList; + } +} + +// Form: ( ) +// A parenthesized expression (single element, no commas) +// e.g. (1 + 2) +// e.g. (a.b) export class GroupExpressionNode extends TupleExpressionNode { constructor ( { @@ -439,6 +524,10 @@ export class GroupExpressionNode extends TupleExpressionNode { } } +// Form: ( ) +// A function call with parenthesized arguments +// e.g. func(a, b, c) +// e.g. now() export class CallExpressionNode extends SyntaxNode { callee?: NormalExpressionNode; @@ -460,6 +549,11 @@ export class CallExpressionNode extends SyntaxNode { } } +// Form: | | +// A literal value +// e.g. 123 +// e.g. 'hello' +// e.g. #ff0000 export class LiteralNode extends SyntaxNode { literal?: SyntaxToken; @@ -469,6 +563,10 @@ export class LiteralNode extends SyntaxNode { } } +// Form: | +// A variable reference +// e.g. users +// e.g. "table name" export class VariableNode extends SyntaxNode { variable?: SyntaxToken; @@ -478,6 +576,10 @@ export class VariableNode extends SyntaxNode { } } +// Form: | +// A primary expression (leaf node in expression tree) +// e.g. 123 +// e.g. users export class PrimaryExpressionNode extends SyntaxNode { expression?: LiteralNode | VariableNode; @@ -487,14 +589,22 @@ export class PrimaryExpressionNode extends SyntaxNode { } } -// A placeholder for missing operands -export class DummyNode extends SyntaxNode { - constructor ({ pre }: { pre: Readonly | Readonly }, id: SyntaxNodeId) { - const nextToken = SyntaxToken.create(SyntaxTokenKind.SPACE, pre.endPos, pre.endPos, ' ', false); - super(id, SyntaxNodeKind.DUMMY, [nextToken]); +// Form: (empty) +// A placeholder node used for: +// - Missing operands during error recovery +// - Empty fields in comma expressions (e.g. 1, , 3) +// - Trailing commas in comma expressions (e.g. 1, 2,) +export class EmptyNode extends SyntaxNode { + constructor ({ prevToken }: { prevToken: Readonly | Readonly }, id: SyntaxNodeId) { + const nextToken = SyntaxToken.create(SyntaxTokenKind.SPACE, prevToken.endPos, prevToken.endPos, ' ', false); + super(id, SyntaxNodeKind.EMPTY, [nextToken]); } } +// Form: [ ] +// An array access expression +// e.g. arr[0] +// e.g. matrix[i] export class ArrayNode extends SyntaxNode { array?: NormalExpressionNode; indexer?: ListExpressionNode; diff --git a/packages/dbml-parse/src/core/parser/parser.ts b/packages/dbml-parse/src/core/parser/parser.ts index 5d3a811b5..07f99e4cf 100644 --- a/packages/dbml-parse/src/core/parser/parser.ts +++ b/packages/dbml-parse/src/core/parser/parser.ts @@ -13,8 +13,10 @@ import { AttributeNode, BlockExpressionNode, CallExpressionNode, - DummyNode, + CommaExpressionNode, + EmptyNode, ElementDeclarationNode, + ExpressionNode, FunctionApplicationNode, FunctionExpressionNode, GroupExpressionNode, @@ -168,7 +170,7 @@ export default class Parser { this.tokens = tokens; } - parse (): Report<{ ast: ProgramNode; tokens: SyntaxToken[] }, CompileError> { + parse (): Report<{ ast: ProgramNode; tokens: SyntaxToken[] }> { const body = this.program(); const eof = this.advance(); const program = this.nodeFactory.create(ProgramNode, { body, eof }); @@ -396,8 +398,8 @@ export default class Parser { // Since function application expression is the most generic form // by default, we'll interpret any expression as a function application const args: { - callee?: NormalExpressionNode; - args: NormalExpressionNode[]; + callee?: ExpressionNode; + args: ExpressionNode[]; } = { args: [] }; // Try interpreting the function application as an element declaration expression @@ -407,7 +409,7 @@ export default class Parser { ); try { - args.callee = this.normalExpression(); + args.callee = this.commaExpression(); } catch (e) { if (!(e instanceof PartialParsingError)) { throw e; @@ -425,18 +427,18 @@ export default class Parser { // Note { // 'This is a note' // } - if (this.shouldStopExpression()) { + if (this.shouldStopFunctionApplication()) { return buildExpression(); } - let prevNode = args.callee!; - while (!this.shouldStopExpression()) { + let prevNode: ExpressionNode = args.callee!; + while (!this.shouldStopFunctionApplication()) { if (!hasTrailingSpaces(this.previous())) { this.logError(prevNode, CompileErrorCode.MISSING_SPACES, 'Expect a following space'); } try { - prevNode = this.normalExpression(); + prevNode = this.commaExpression(); args.args.push(prevNode); } catch (e) { if (!(e instanceof PartialParsingError)) { @@ -451,20 +453,93 @@ export default class Parser { return buildExpression(); } - private shouldStopExpression (): boolean { + private shouldStopFunctionApplication (): boolean { if (this.isAtEnd() || hasTrailingNewLines(this.previous())) { return true; } const nextTokenKind = this.peek().kind; - return ( - nextTokenKind === SyntaxTokenKind.RBRACE - || nextTokenKind === SyntaxTokenKind.RBRACKET - || nextTokenKind === SyntaxTokenKind.RPAREN - || nextTokenKind === SyntaxTokenKind.COMMA - || nextTokenKind === SyntaxTokenKind.COLON - ); + return [ + SyntaxTokenKind.RBRACE, + SyntaxTokenKind.RBRACKET, + SyntaxTokenKind.RPAREN, + SyntaxTokenKind.COMMA, + SyntaxTokenKind.COLON, + ].includes(nextTokenKind); + } + + private commaExpression (): NormalExpressionNode | CommaExpressionNode { + // If we start with a comma, treat the first field as an empty node + const firstExpr = this.check(SyntaxTokenKind.COMMA) + ? this.nodeFactory.create(EmptyNode, { prevToken: this.previous() }) + : this.normalExpression(); + + // If there's no comma, just return the normal expression + if (!this.check(SyntaxTokenKind.COMMA)) { + return firstExpr; + } + + const args: { + elementList: NormalExpressionNode[]; + commaList: SyntaxToken[]; + } = { + elementList: [firstExpr], + commaList: [], + }; + + do { + args.commaList.push(this.advance()); + + // Check for empty field (trailing commas) + if (this.shouldStopCommaExpression()) { + args.elementList.push(this.nodeFactory.create(EmptyNode, { prevToken: this.previous() })); + break; + } + + // Check for empty field (consecutive commas) + if (this.check(SyntaxTokenKind.COMMA)) { + args.elementList.push(this.nodeFactory.create(EmptyNode, { prevToken: this.previous() })); + continue; + } + + try { + const nextExpr = this.normalExpression(); + args.elementList.push(nextExpr); + } catch (e) { + if (!(e instanceof PartialParsingError)) { + throw e; + } + if (e.partialNode) { + args.elementList.push(e.partialNode); + } + throw new PartialParsingError( + e.token, + this.nodeFactory.create(CommaExpressionNode, args), + e.handlerContext, + ); + } + } while (!this.shouldStopCommaExpression() && this.check(SyntaxTokenKind.COMMA)); + + return this.nodeFactory.create(CommaExpressionNode, args); + } + + private shouldStopCommaExpression (): boolean { + if (this.isAtEnd() || hasTrailingNewLines(this.previous())) { + return true; + } + + const nextTokenKind = this.peek().kind; + + return [ + // We do not support {} in CSV line + SyntaxTokenKind.RBRACE, SyntaxTokenKind.LBRACE, + // We do not support [] in CSV line + SyntaxTokenKind.RBRACKET, SyntaxTokenKind.LBRACKET, + // We do not support () in CSV line + SyntaxTokenKind.RPAREN, SyntaxTokenKind.LPAREN, + SyntaxTokenKind.COLON, + ].includes(nextTokenKind); } private normalExpression (): NormalExpressionNode { @@ -487,8 +562,9 @@ export default class Parser { // When '(' is encountered, // consider it part of another expression if // it's at the start of a new line + // or if there are spaces before '(' (disallow call expressions with spaces) // and we're currently not having unmatched '(' or '[' - isAtStartOfLine(this.previous(), token) + (isAtStartOfLine(this.previous(), token) || hasTrailingSpaces(this.previous())) && !this.contextStack.isWithinGroupExpressionContext() && !this.contextStack.isWithinListExpressionContext() ) { @@ -595,7 +671,7 @@ export default class Parser { throw new PartialParsingError( args.op, - this.nodeFactory.create(DummyNode, { pre: args.op }), + this.nodeFactory.create(EmptyNode, { prevToken: args.op }), this.contextStack.findHandlerContext(this.tokens, this.current), ); } @@ -618,10 +694,10 @@ export default class Parser { leftExpression = this.nodeFactory.create(PrefixExpressionNode, args); } else { leftExpression = this.extractOperand(); - if (leftExpression instanceof DummyNode) { + if (leftExpression instanceof EmptyNode) { throw new PartialParsingError( this.peek(), - this.nodeFactory.create(DummyNode, { pre: this.peek() }), + this.nodeFactory.create(EmptyNode, { prevToken: this.peek() }), this.contextStack.findHandlerContext(this.tokens, this.current), ); } @@ -683,7 +759,7 @@ export default class Parser { ); } - return this.nodeFactory.create(DummyNode, { pre: this.previous() }); + return this.nodeFactory.create(EmptyNode, { prevToken: this.previous() }); } /* Parsing FunctionExpression */ diff --git a/packages/dbml-parse/src/core/parser/utils.ts b/packages/dbml-parse/src/core/parser/utils.ts index 484891ba4..aa9b2e92d 100644 --- a/packages/dbml-parse/src/core/parser/utils.ts +++ b/packages/dbml-parse/src/core/parser/utils.ts @@ -8,7 +8,8 @@ import { AttributeNode, BlockExpressionNode, CallExpressionNode, - DummyNode, + CommaExpressionNode, + EmptyNode, ElementDeclarationNode, ExpressionNode, FunctionApplicationNode, @@ -31,8 +32,8 @@ import { destructureComplexVariable } from '@/core/analyzer/utils'; // Try to interpret a function application as an element export function convertFuncAppToElem ( - callee: ExpressionNode | undefined, - args: NormalExpressionNode[], + callee: ExpressionNode | CommaExpressionNode | undefined, + args: (NormalExpressionNode | CommaExpressionNode)[], factory: NodeFactory, ): Option { if (!callee || !isExpressionAnIdentifierNode(callee) || args.length === 0) { @@ -158,6 +159,9 @@ function markInvalidNode (node: SyntaxNode) { node.commaList.forEach(markInvalid); node.elementList.forEach(markInvalid); markInvalid(node.tupleCloseParen); + } else if (node instanceof CommaExpressionNode) { + node.commaList.forEach(markInvalid); + node.elementList.forEach(markInvalid); } else if (node instanceof CallExpressionNode) { markInvalid(node.callee); markInvalid(node.argumentList); @@ -180,7 +184,7 @@ function markInvalidNode (node: SyntaxNode) { } else if (node instanceof ProgramNode) { node.body.forEach(markInvalid); markInvalid(node.eof); - } else if (node instanceof DummyNode) { + } else if (node instanceof EmptyNode) { // DummyNode has no children to mark invalid } else { throw new Error('Unreachable case in markInvalidNode'); @@ -270,6 +274,12 @@ export function getMemberChain (node: SyntaxNode): Readonly<(SyntaxNode | Syntax ); } + if (node instanceof CommaExpressionNode) { + return filterUndefined( + ...alternateLists(node.elementList, node.commaList), + ); + } + if (node instanceof CallExpressionNode) { return filterUndefined(node.callee, node.argumentList); } @@ -388,6 +398,6 @@ export function extractStringFromIdentifierStream (stream?: IdentiferStreamNode) return new Some(name); } -export function getElementName (element: ElementDeclarationNode): Option { - return destructureComplexVariable(element.name).map((ss) => ss.join('.')); +export function getElementNameString (element?: ElementDeclarationNode): Option { + return destructureComplexVariable(element?.name).map((ss) => ss.join('.')); } diff --git a/packages/dbml-parse/src/core/report.ts b/packages/dbml-parse/src/core/report.ts index f13d731e2..e59d1e54f 100644 --- a/packages/dbml-parse/src/core/report.ts +++ b/packages/dbml-parse/src/core/report.ts @@ -1,30 +1,40 @@ -// Used to hold the result of a computation and any errors along the way -export default class Report { +import { CompileError, CompileWarning } from './errors'; + +// Used to hold the result of a computation and any errors/warnings along the way +export default class Report { private value: T; - private errors: E[]; + private errors: CompileError[]; + + private warnings: CompileWarning[]; - constructor (value: T, errors?: E[]) { + constructor (value: T, errors?: CompileError[], warnings?: CompileWarning[]) { this.value = value; this.errors = errors === undefined ? [] : errors; + this.warnings = warnings === undefined ? [] : warnings; } getValue (): T { return this.value; } - getErrors (): E[] { + getErrors (): CompileError[] { return this.errors; } - chain(fn: (_: T) => Report): Report { + getWarnings (): CompileWarning[] { + return this.warnings; + } + + chain(fn: (_: T) => Report): Report { const res = fn(this.value); const errors = [...this.errors, ...res.errors]; + const warnings = [...this.warnings, ...res.warnings]; - return new Report(res.value, errors); + return new Report(res.value, errors, warnings); } - map(fn: (_: T) => U): Report { - return new Report(fn(this.value), this.errors); + map(fn: (_: T) => U): Report { + return new Report(fn(this.value), this.errors, this.warnings); } } diff --git a/packages/dbml-parse/src/core/serialization/serialize.ts b/packages/dbml-parse/src/core/serialization/serialize.ts index 87c1ba003..0da422b9a 100644 --- a/packages/dbml-parse/src/core/serialization/serialize.ts +++ b/packages/dbml-parse/src/core/serialization/serialize.ts @@ -1,10 +1,9 @@ import { NodeSymbol } from '@/core/analyzer/symbol/symbols'; import { ProgramNode, SyntaxNode } from '@/core/parser/nodes'; import Report from '@/core/report'; -import { CompileError } from '@/core/errors'; export function serialize ( - report: Readonly>, + report: Readonly>, pretty: boolean = false, ): string { return JSON.stringify( diff --git a/packages/dbml-parse/src/core/utils.ts b/packages/dbml-parse/src/core/utils.ts index b9c0a5dd5..6f026b58f 100644 --- a/packages/dbml-parse/src/core/utils.ts +++ b/packages/dbml-parse/src/core/utils.ts @@ -34,8 +34,10 @@ export function isAlphaNumeric (char: string): boolean { return isAlphaOrUnderscore(char) || isDigit(char); } -export function addQuoteIfNeeded (s: string): string { - return s.split('').every(isAlphaNumeric) ? s : `"${s}"`; +export function addQuoteToSuggestionIfNeeded (s: string): string { + if (!s) return `"${s}"`; + const isValid = s.split('').every((char) => isAlphaOrUnderscore(char) || isDigit(char)) && !isDigit(s[0]); + return isValid ? s : `"${s}"`; } export function alternateLists (firstList: T[], secondList: S[]): (T | S)[] { diff --git a/packages/dbml-parse/src/index.ts b/packages/dbml-parse/src/index.ts index 3e6dcf27c..00b670ba2 100644 --- a/packages/dbml-parse/src/index.ts +++ b/packages/dbml-parse/src/index.ts @@ -4,10 +4,11 @@ import * as services from '@/services/index'; // Export the types that playground and other consumers need export { - // Element types from analyzer ElementKind, } from '@/core/analyzer/types'; +export * from '@/core/interpreter/records/utils'; + export { // Core AST node types SyntaxNode, @@ -34,13 +35,16 @@ export { type Position, } from '@/core/types'; -export { - addQuoteIfNeeded, -} from '@/core/utils'; - export { // Scope kinds from compiler ScopeKind, + // Utilities + splitQualifiedIdentifier, + unescapeString, + escapeString, + formatRecordValue, + isValidIdentifier, + addDoubleQuoteIfNeeded, } from '@/compiler/index'; // Export interpreted types for structured data diff --git a/packages/dbml-parse/src/services/diagnostics/README.md b/packages/dbml-parse/src/services/diagnostics/README.md new file mode 100644 index 000000000..a5fe0bdf1 --- /dev/null +++ b/packages/dbml-parse/src/services/diagnostics/README.md @@ -0,0 +1,158 @@ +# DBML Diagnostics Provider + +The Diagnostics Provider offers a unified interface to access compilation errors and warnings from DBML source code. + +## Features + +- **Unified Diagnostics**: Get all errors and warnings in a single call +- **Filtered Access**: Retrieve only errors or only warnings +- **Monaco Integration**: Convert diagnostics to Monaco editor markers +- **Rich Information**: Full position information, severity levels, and error codes + +## Usage + +### Basic Usage + +```typescript +import Compiler from '@dbml/parse'; + +const compiler = new Compiler(); +compiler.setSource(yourDBMLCode); + +const services = compiler.initMonacoServices(); +const diagnosticsProvider = services.diagnosticsProvider; + +// Get all diagnostics (errors + warnings) +const allDiagnostics = diagnosticsProvider.provideDiagnostics(); + +// Get only errors +const errors = diagnosticsProvider.provideErrors(); + +// Get only warnings +const warnings = diagnosticsProvider.provideWarnings(); + +// Get Monaco markers (for editor integration) +const markers = diagnosticsProvider.provideMarkers(); +``` + +### Diagnostic Structure + +Each diagnostic contains: + +```typescript +interface Diagnostic { + severity: 'error' | 'warning'; + message: string; + startLineNumber: number; + startColumn: number; + endLineNumber: number; + endColumn: number; + code?: string | number; + source?: string; +} +``` + +### Monaco Marker Structure + +For Monaco editor integration: + +```typescript +interface MarkerData { + severity: MarkerSeverity; // 8 = Error, 4 = Warning + message: string; + startLineNumber: number; + startColumn: number; + endLineNumber: number; + endColumn: number; + code?: string | number; + source?: string; +} +``` + +## Error vs Warning + +### Errors +Errors are critical issues that prevent proper compilation: +- Syntax errors +- Binding errors (undefined references) +- Structural issues + +### Warnings +Warnings are validation issues that don't prevent compilation but indicate potential problems: +- Constraint violations (PK, UNIQUE, FK) +- Type compatibility issues +- NOT NULL violations +- Data validation failures + +## Example + +```typescript +const compiler = new Compiler(); + +const source = ` + Table users { + id int [pk] + email varchar [unique] + } + + records users(id, email) { + 1, "user1@example.com" + 1, "user2@example.com" // Duplicate PK warning + 2, "user1@example.com" // Duplicate UNIQUE warning + } +`; + +compiler.setSource(source); + +const { diagnosticsProvider } = compiler.initMonacoServices(); +const diagnostics = diagnosticsProvider.provideDiagnostics(); + +diagnostics.forEach((diag) => { + console.log(`[${diag.severity}] Line ${diag.startLineNumber}: ${diag.message}`); +}); + +// Output: +// [warning] Line 9: Duplicate PK: users.id = 1 +// [warning] Line 10: Duplicate UNIQUE: users.email = "user1@example.com" +``` + +## Monaco Editor Integration + +```typescript +import * as monaco from 'monaco-editor'; + +const compiler = new Compiler(); +compiler.setSource(yourCode); + +const { diagnosticsProvider } = compiler.initMonacoServices(); +const markers = diagnosticsProvider.provideMarkers(); + +// Set markers in Monaco editor +monaco.editor.setModelMarkers(model, 'dbml', markers); +``` + +## Direct Compiler Access + +You can also access errors and warnings directly from the compiler: + +```typescript +const compiler = new Compiler(); +compiler.setSource(yourCode); + +// Direct access +const errors = compiler.parse.errors(); +const warnings = compiler.parse.warnings(); + +console.log(`Found ${errors.length} errors and ${warnings.length} warnings`); +``` + +## Error Codes + +Error codes are defined in `CompileErrorCode` enum and include: + +- `1000-1999`: Symbol and token errors +- `3000-3999`: Validation errors (names, settings, etc.) +- `4000-4999`: Binding errors +- `5000-5999`: Semantic errors (circular refs, unsupported operations) + +See `src/core/errors.ts` for the complete list. diff --git a/packages/dbml-parse/src/services/diagnostics/provider.ts b/packages/dbml-parse/src/services/diagnostics/provider.ts new file mode 100644 index 000000000..5b86a7aba --- /dev/null +++ b/packages/dbml-parse/src/services/diagnostics/provider.ts @@ -0,0 +1,122 @@ +import type Compiler from '@/compiler'; +import type { CompileError, CompileWarning } from '@/core/errors'; +import type { MarkerSeverity, MarkerData } from '@/services/types'; +import type { SyntaxNode } from '@/core/parser/nodes'; +import type { SyntaxToken } from '@/core/lexer/tokens'; + +export interface Diagnostic { + severity: 'error' | 'warning'; + message: string; + startLineNumber: number; + startColumn: number; + endLineNumber: number; + endColumn: number; + code?: string | number; + source?: string; +} + +export default class DBMLDiagnosticsProvider { + private compiler: Compiler; + + constructor (compiler: Compiler) { + this.compiler = compiler; + } + + /** + * Get all diagnostics (errors and warnings) from the current compilation + */ + provideDiagnostics (): Diagnostic[] { + const diagnostics: Diagnostic[] = []; + const report = this.compiler.parse._(); + + // Add errors + const errors = report.getErrors(); + for (const error of errors) { + diagnostics.push(this.createDiagnostic(error, 'error')); + } + + // Add warnings + const warnings = report.getWarnings(); + for (const warning of warnings) { + diagnostics.push(this.createDiagnostic(warning, 'warning')); + } + + return diagnostics; + } + + /** + * Get only errors from the current compilation + */ + provideErrors (): Diagnostic[] { + const errors = this.compiler.parse._().getErrors(); + return errors.map((error) => this.createDiagnostic(error, 'error')); + } + + /** + * Get only warnings from the current compilation + */ + provideWarnings (): Diagnostic[] { + const warnings = this.compiler.parse._().getWarnings(); + return warnings.map((warning) => this.createDiagnostic(warning, 'warning')); + } + + /** + * Convert Monaco markers format (for editor integration) + */ + provideMarkers (): MarkerData[] { + const diagnostics = this.provideDiagnostics(); + return diagnostics.map((diag) => { + const severity = this.getSeverityValue(diag.severity); + return { + severity, + message: diag.message, + startLineNumber: diag.startLineNumber, + startColumn: diag.startColumn, + endLineNumber: diag.endLineNumber, + endColumn: diag.endColumn, + code: diag.code ? String(diag.code) : undefined, + source: diag.source || 'dbml', + }; + }); + } + + private createDiagnostic ( + errorOrWarning: CompileError | CompileWarning, + severity: 'error' | 'warning', + ): Diagnostic { + const nodeOrToken = errorOrWarning.nodeOrToken; + + // Get position from the node or token + // Both SyntaxNode and SyntaxToken always have startPos and endPos + let startPos, endPos; + if (Array.isArray(nodeOrToken)) { + // Handle array of nodes/tokens - use first and last + const firstItem = nodeOrToken[0] as SyntaxNode | SyntaxToken; + const lastItem = nodeOrToken[nodeOrToken.length - 1] as SyntaxNode | SyntaxToken; + startPos = firstItem.startPos; + endPos = lastItem.endPos; + } else { + // Single node or token + const item = nodeOrToken as SyntaxNode | SyntaxToken; + startPos = item.startPos; + endPos = item.endPos; + } + + return { + severity, + message: errorOrWarning.diagnostic, + startLineNumber: startPos.line + 1, + startColumn: startPos.column + 1, + endLineNumber: endPos.line + 1, + endColumn: endPos.column + 1, + code: errorOrWarning.code, + source: 'dbml', + }; + } + + private getSeverityValue (severity: 'error' | 'warning'): MarkerSeverity { + // Monaco marker severity values + // Error = 8, Warning = 4, Info = 2, Hint = 1 + return severity === 'error' ? 8 : 4; + } +} diff --git a/packages/dbml-parse/src/services/index.ts b/packages/dbml-parse/src/services/index.ts index 4146d329a..55e7cb0cd 100644 --- a/packages/dbml-parse/src/services/index.ts +++ b/packages/dbml-parse/src/services/index.ts @@ -1,6 +1,7 @@ import DBMLCompletionItemProvider from './suggestions/provider'; import DBMLDefinitionProvider from './definition/provider'; import DBMLReferencesProvider from './references/provider'; +import DBMLDiagnosticsProvider from './diagnostics/provider'; export * from '@/services/types'; @@ -8,4 +9,5 @@ export { DBMLCompletionItemProvider, DBMLDefinitionProvider, DBMLReferencesProvider, + DBMLDiagnosticsProvider, }; diff --git a/packages/dbml-parse/src/services/suggestions/provider.ts b/packages/dbml-parse/src/services/suggestions/provider.ts index b55fe1f3e..4e5afd46e 100644 --- a/packages/dbml-parse/src/services/suggestions/provider.ts +++ b/packages/dbml-parse/src/services/suggestions/provider.ts @@ -1,6 +1,7 @@ import { destructureMemberAccessExpression, extractVariableFromExpression, + getElementKind, } from '@/core/analyzer/utils'; import { extractStringFromIdentifierStream, @@ -17,7 +18,7 @@ import { CompletionItemKind, CompletionItemInsertTextRule, } from '@/services/types'; -import { TableSymbol } from '@/core/analyzer/symbol/symbols'; +import { TableSymbol, type NodeSymbol } from '@/core/analyzer/symbol/symbols'; import { SymbolKind, destructureIndex } from '@/core/analyzer/symbol/symbolIndex'; import { pickCompletionItemKind, @@ -25,9 +26,16 @@ import { addQuoteIfNeeded, noSuggestions, prependSpace, + isOffsetWithinElementHeader, + excludeSuggestions, + addExpandAllColumnsSuggestion, + isTupleEmpty, } from '@/services/suggestions/utils'; +import { suggestRecordRowSnippet, FALLTHROUGH } from '@/services/suggestions/recordRowSnippet'; import { AttributeNode, + CallExpressionNode, + CommaExpressionNode, ElementDeclarationNode, FunctionApplicationNode, IdentiferStreamNode, @@ -40,7 +48,7 @@ import { } from '@/core/parser/nodes'; import { getOffsetFromMonacoPosition } from '@/services/utils'; import { isComment } from '@/core/lexer/utils'; -import { SettingName } from '@/core/analyzer/types'; +import { ElementKind, SettingName } from '@/core/analyzer/types'; export default class DBMLCompletionItemProvider implements CompletionItemProvider { private compiler: Compiler; @@ -54,6 +62,13 @@ export default class DBMLCompletionItemProvider implements CompletionItemProvide provideCompletionItems (model: TextModel, position: Position): CompletionList { const offset = getOffsetFromMonacoPosition(model, position); + + // Try to suggest record row snippet first + const recordRowSnippet = suggestRecordRowSnippet(this.compiler, model, position, offset); + if (recordRowSnippet !== FALLTHROUGH) { + return recordRowSnippet || noSuggestions(); + } + const flatStream = this.compiler.token.flatStream(); // bOc: before-or-contain const { token: bOcToken, index: bOcTokenId } = this.compiler.container.token(offset); @@ -136,10 +151,22 @@ export default class DBMLCompletionItemProvider implements CompletionItemProvide } else if (container instanceof ListExpressionNode) { return suggestInAttribute(this.compiler, offset, container); } else if (container instanceof TupleExpressionNode) { - return suggestInTuple(this.compiler, offset); + return suggestInTuple(this.compiler, offset, container); + } else if (container instanceof CommaExpressionNode) { + return suggestInCommaExpression(this.compiler, offset); + } else if (container instanceof CallExpressionNode) { + return suggestInCallExpression(this.compiler, offset, container); } else if (container instanceof FunctionApplicationNode) { return suggestInSubField(this.compiler, offset, container); } else if (container instanceof ElementDeclarationNode) { + // Check if we're in a Records element header - suggest schema.table names + if ( + getElementKind(container).unwrap_or(undefined) === ElementKind.Records + && isOffsetWithinElementHeader(offset, container) + ) { + return suggestInRecordsHeader(this.compiler, offset, container); + } + if ( (container.bodyColon && offset >= container.bodyColon.end) || (container.body && isOffsetWithinSpan(offset, container.body)) @@ -184,6 +211,26 @@ function suggestOnRelOp ( return noSuggestions(); } +function suggestMembersOfSymbol ( + compiler: Compiler, + symbol: NodeSymbol, + acceptedKinds: SymbolKind[], +): CompletionList { + return addQuoteIfNeeded({ + suggestions: compiler.symbol + .members(symbol) + .filter(({ kind }) => acceptedKinds.includes(kind)) + .map(({ name, kind }) => ({ + label: name, + insertText: name, + insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, + kind: pickCompletionItemKind(kind), + sortText: pickCompletionItemKind(kind).toString().padStart(2, '0'), + range: undefined as any, + })), + }); +} + function suggestNamesInScope ( compiler: Compiler, offset: number, @@ -200,17 +247,7 @@ function suggestNamesInScope ( if (curElement?.symbol?.symbolTable) { const { symbol } = curElement; res.suggestions.push( - ...compiler.symbol - .members(symbol) - .filter(({ kind }) => acceptedKinds.includes(kind)) - .map(({ name, kind }) => ({ - label: name, - insertText: name, - insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, - kind: pickCompletionItemKind(kind), - sortText: pickCompletionItemKind(kind).toString().padStart(2, '0'), - range: undefined as any, - })), + ...suggestMembersOfSymbol(compiler, symbol, acceptedKinds).suggestions, ); } curElement = curElement instanceof ElementDeclarationNode ? curElement.parent : undefined; @@ -219,14 +256,65 @@ function suggestNamesInScope ( return addQuoteIfNeeded(res); } -function suggestInTuple (compiler: Compiler, offset: number): CompletionList { +function suggestInTuple (compiler: Compiler, offset: number, tupleContainer: TupleExpressionNode): CompletionList { const scopeKind = compiler.container.scopeKind(offset); + const element = compiler.container.element(offset); + + // Check if we're inside a CallExpression - delegate to suggestInCallExpression + const containers = [...compiler.container.stack(offset)]; + for (const c of containers) { + if (c instanceof CallExpressionNode && c.argumentList === tupleContainer) { + return suggestInCallExpression(compiler, offset, c); + } + } + + // Check if we're in a Records element header + if ( + element instanceof ElementDeclarationNode + && getElementKind(element).unwrap_or(undefined) === ElementKind.Records + && !(element.name instanceof CallExpressionNode) + && isOffsetWithinElementHeader(offset, element) + ) { + const tableSymbol = element.parent?.symbol || element.name?.referee; + if (tableSymbol) { + let suggestions = suggestMembersOfSymbol(compiler, tableSymbol, [SymbolKind.Column]); + // If the user already typed some columns, we do not suggest "all columns" anymore + if (!isTupleEmpty(tupleContainer)) return suggestions; + suggestions = excludeSuggestions(suggestions, ['records']); + suggestions = addExpandAllColumnsSuggestion(suggestions); + return suggestions; + } + } + switch (scopeKind) { + case ScopeKind.TABLE: { + // Check if we're inside a table typing "Records (...)" + // In this case, Records is a FunctionApplicationNode + for (const c of containers) { + if ( + c instanceof FunctionApplicationNode + && isExpressionAVariableNode(c.callee) + && extractVariableFromExpression(c.callee).unwrap_or('').toLowerCase() === 'records' + && !(c.args?.[0] instanceof CallExpressionNode) + ) { + const tableSymbol = element.symbol; + if (tableSymbol) { + let suggestions = suggestMembersOfSymbol(compiler, tableSymbol, [SymbolKind.Column]); + // If the user already typed some columns, we do not suggest "all columns" anymore + if (!isTupleEmpty(tupleContainer)) return suggestions; + suggestions = excludeSuggestions(suggestions, ['records']); + suggestions = addExpandAllColumnsSuggestion(suggestions); + return suggestions; + } + break; + } + } + break; + } case ScopeKind.INDEXES: return suggestColumnNameInIndexes(compiler, offset); case ScopeKind.REF: { - const containers = [...compiler.container.stack(offset)]; while (containers.length > 0) { const container = containers.pop()!; if (container instanceof InfixExpressionNode && container.op?.value === '.') { @@ -247,6 +335,22 @@ function suggestInTuple (compiler: Compiler, offset: number): CompletionList { return noSuggestions(); } +function suggestInCommaExpression (compiler: Compiler, offset: number): CompletionList { + const scopeKind = compiler.container.scopeKind(offset); + + // CommaExpressionNode is used in records data rows + if (scopeKind === ScopeKind.RECORDS) { + // In records, suggest enum values if applicable + return suggestNamesInScope(compiler, offset, compiler.container.element(offset), [ + SymbolKind.Schema, + SymbolKind.Enum, + SymbolKind.EnumField, + ]); + } + + return noSuggestions(); +} + function suggestInAttribute ( compiler: Compiler, offset: number, @@ -506,13 +610,15 @@ function suggestInSubField ( function suggestTopLevelElementType (): CompletionList { return { - suggestions: ['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial'].map((name) => ({ - label: name, - insertText: name, - insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, - kind: CompletionItemKind.Keyword, - range: undefined as any, - })), + suggestions: [ + ...['Table', 'TableGroup', 'Enum', 'Project', 'Ref', 'TablePartial', 'Records'].map((name) => ({ + label: name, + insertText: name, + insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, + kind: CompletionItemKind.Keyword, + range: undefined as any, + })), + ], }; } @@ -543,15 +649,25 @@ function suggestInColumn ( container?: FunctionApplicationNode, ): CompletionList { const elements = ['Note', 'indexes', 'checks']; + if (!container?.callee) { return { - suggestions: elements.map((name) => ({ - label: name, - insertText: name, - insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, - kind: CompletionItemKind.Keyword, - range: undefined as any, - })), + suggestions: [ + ...elements.map((name) => ({ + label: name, + insertText: name, + insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, + kind: CompletionItemKind.Keyword, + range: undefined as any, + })), + { + label: 'Records', + insertText: 'Records', + insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, + kind: CompletionItemKind.Keyword, + range: undefined as any, + }, + ], }; } @@ -559,13 +675,22 @@ function suggestInColumn ( if (containerArgId === 0) { return { - suggestions: elements.map((name) => ({ - label: name, - insertText: name, - insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, - kind: CompletionItemKind.Keyword, - range: undefined as any, - })), + suggestions: [ + ...elements.map((name) => ({ + label: name, + insertText: name, + insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, + kind: CompletionItemKind.Keyword, + range: undefined as any, + })), + { + label: 'Records', + insertText: 'Records', + insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, + kind: CompletionItemKind.Keyword, + range: undefined as any, + }, + ], }; } if (containerArgId === 1) { @@ -618,6 +743,103 @@ function suggestInRefField (compiler: Compiler, offset: number): CompletionList ]); } +function suggestInRecordsHeader ( + compiler: Compiler, + offset: number, + container: ElementDeclarationNode, +): CompletionList { + return suggestNamesInScope(compiler, offset, container.parent, [ + SymbolKind.Schema, + SymbolKind.Table, + ]); +} + +function suggestInCallExpression ( + compiler: Compiler, + offset: number, + container: CallExpressionNode, +): CompletionList { + const element = compiler.container.element(offset); + + // Determine if we're in the callee or in the arguments + const inCallee = container.callee && isOffsetWithinSpan(offset, container.callee); + const inArgs = container.argumentList && isOffsetWithinSpan(offset, container.argumentList); + + // Check if we're in a Records element header (top-level Records) + if ( + element instanceof ElementDeclarationNode + && getElementKind(element).unwrap_or(undefined) === ElementKind.Records + && isOffsetWithinElementHeader(offset, element) + ) { + if (inCallee) { + return suggestNamesInScope(compiler, offset, element.parent, [ + SymbolKind.Schema, + SymbolKind.Table, + ]); + } + + if (inArgs) { + const callee = container.callee; + if (callee) { + const fragments = destructureMemberAccessExpression(callee).unwrap_or([callee]); + const rightmostExpr = fragments[fragments.length - 1]; + const tableSymbol = rightmostExpr?.referee; + + if (tableSymbol) { + let suggestions = suggestMembersOfSymbol(compiler, tableSymbol, [SymbolKind.Column]); + const { argumentList } = container; + // If the user already typed some columns, we do not suggest "all columns" anymore + if (!argumentList || !isTupleEmpty(argumentList)) return suggestions; + suggestions = excludeSuggestions(suggestions, ['records']); + suggestions = addExpandAllColumnsSuggestion(suggestions); + return suggestions; + } + } + } + } + + // Check if we're inside a Records FunctionApplicationNode (e.g., typing "Records ()") + const containers = [...compiler.container.stack(offset)]; + for (const c of containers) { + if ( + c instanceof FunctionApplicationNode + && isExpressionAVariableNode(c.callee) + && extractVariableFromExpression(c.callee).unwrap_or('').toLowerCase() === 'records' + ) { + // If in callee, suggest schema and table names + if (inCallee) { + return suggestNamesInScope(compiler, offset, element, [ + SymbolKind.Schema, + SymbolKind.Table, + ]); + } + + // If in args, suggest column names from the table referenced in the callee + if (inArgs) { + const callee = container.callee; + if (callee) { + const fragments = destructureMemberAccessExpression(callee).unwrap_or([callee]); + const rightmostExpr = fragments[fragments.length - 1]; + const tableSymbol = rightmostExpr?.referee; + + if (tableSymbol) { + let suggestions = suggestMembersOfSymbol(compiler, tableSymbol, [SymbolKind.Column]); + const { argumentList } = container; + // If the user already typed some columns, we do not suggest "all columns" anymore + if (!argumentList || !isTupleEmpty(argumentList)) return suggestions; + suggestions = excludeSuggestions(suggestions, ['records']); + suggestions = addExpandAllColumnsSuggestion(suggestions); + return suggestions; + } + } + } + break; + } + } + + return noSuggestions(); +} + function suggestInTableGroupField (compiler: Compiler): CompletionList { return { suggestions: [ diff --git a/packages/dbml-parse/src/services/suggestions/recordRowSnippet.ts b/packages/dbml-parse/src/services/suggestions/recordRowSnippet.ts new file mode 100644 index 000000000..d88d31c89 --- /dev/null +++ b/packages/dbml-parse/src/services/suggestions/recordRowSnippet.ts @@ -0,0 +1,187 @@ +import { + extractReferee, + extractVariableFromExpression, + getElementKind, +} from '@/core/analyzer/utils'; +import { + BlockExpressionNode, + CallExpressionNode, + ElementDeclarationNode, + ProgramNode, + TupleExpressionNode, +} from '@/core/parser/nodes'; +import { + type CompletionList, + type TextModel, + type Position, + CompletionItemKind, + CompletionItemInsertTextRule, +} from '@/services/types'; +import { ColumnSymbol, TablePartialInjectedColumnSymbol } from '@/core/analyzer/symbol/symbols'; +import { ElementKind } from '@/core/analyzer/types'; +import Compiler from '@/compiler'; +import { + noSuggestions, + isOffsetWithinElementHeader, + getColumnsFromTableSymbol, + extractColumnNameAndType, +} from '@/services/suggestions/utils'; +import { isOffsetWithinSpan } from '@/core/utils'; + +const FALLTHROUGH = Symbol('fallthrough'); + +export function suggestRecordRowSnippet ( + compiler: Compiler, + model: TextModel, + position: Position, + offset: number, +): CompletionList | null | typeof FALLTHROUGH { + const element = compiler.container.element(offset); + + // If not in an ElementDeclarationNode, fallthrough + if (!(element instanceof ElementDeclarationNode)) { + return FALLTHROUGH; + } + + const elementKind = getElementKind(element).unwrap_or(undefined); + + // If not in a Records element, fallthrough + if (elementKind !== ElementKind.Records || !(element.body instanceof BlockExpressionNode)) { + return FALLTHROUGH; + } + + // If we're in the header (not the body), fallthrough + if (isOffsetWithinElementHeader(offset, element)) { + return FALLTHROUGH; + } + + // If we're not within the body, fallthrough + if (!element.body || !isOffsetWithinSpan(offset, element.body)) { + return FALLTHROUGH; + } + + // Check if cursor is at the start of a line (only whitespace before it) + const lineContent = model.getLineContent(position.lineNumber); + if (lineContent.trim() !== '') { + // Not on an empty line - fallthrough to allow other completions in Records body + return FALLTHROUGH; + } + + // On an empty line in Records body - provide record row snippet + if (element.parent instanceof ProgramNode) { + return suggestRecordRowInTopLevelRecords(compiler, element); + } else { + return suggestRecordRowInNestedRecords(compiler, element); + } +} + +export { FALLTHROUGH }; + +function suggestRecordRowInTopLevelRecords ( + compiler: Compiler, + recordsElement: ElementDeclarationNode, +): CompletionList { + // Top-level Records only work with explicit column list: Records users(id, name) { } + if (!(recordsElement.name instanceof CallExpressionNode)) return noSuggestions(); + + const columnElements = recordsElement.name.argumentList?.elementList || []; + const columnSymbols = columnElements.map((e) => extractReferee(e)); + if (!columnSymbols || columnSymbols.length === 0) return noSuggestions(); + + const columns = columnElements + .map((element, index) => { + const symbol = columnSymbols[index]; + if (!symbol || !(symbol instanceof ColumnSymbol || symbol instanceof TablePartialInjectedColumnSymbol)) { + return null; + } + const columnName = extractVariableFromExpression(element).unwrap_or(undefined); + const result = extractColumnNameAndType(symbol, columnName); + return result; + }) + .filter((col) => col !== null) as Array<{ name: string; type: string }>; + + if (columns.length === 0) return noSuggestions(); + + // Generate the snippet with tab stops for completion + const snippet = columns.map((col, index) => `\${${index + 1}:${col.name} (${col.type})}`).join(', '); + + return { + suggestions: [ + { + label: 'Record row snippet', + insertText: snippet, + insertTextRules: CompletionItemInsertTextRule.InsertAsSnippet, + kind: CompletionItemKind.Snippet, + range: undefined as any, + }, + ], + }; +} + +function suggestRecordRowInNestedRecords ( + compiler: Compiler, + recordsElement: ElementDeclarationNode, +): CompletionList { + // Get parent table element + const parent = recordsElement.parent; + if (!(parent instanceof ElementDeclarationNode)) { + return noSuggestions(); + } + + const parentKind = getElementKind(parent).unwrap_or(undefined); + if (parentKind !== ElementKind.Table) { + return noSuggestions(); + } + + const tableSymbol = parent.symbol; + if (!tableSymbol?.symbolTable) { + return noSuggestions(); + } + + let columns: Array<{ name: string; type: string }>; + + if (recordsElement.name instanceof TupleExpressionNode) { + // Explicit columns from tuple: records (col1, col2) + const columnElements = recordsElement.name.elementList; + const columnSymbols = columnElements + .map((e) => extractReferee(e)) + .filter((s) => s !== undefined); + + columns = columnElements + .map((element, index) => { + const symbol = columnSymbols[index]; + if (!symbol || !(symbol instanceof ColumnSymbol || symbol instanceof TablePartialInjectedColumnSymbol)) { + return null; + } + const columnName = extractVariableFromExpression(element).unwrap_or(undefined); + return extractColumnNameAndType(symbol, columnName); + }) + .filter((col) => col !== null) as Array<{ name: string; type: string }>; + } else { + // Implicit columns - use all columns from parent table + const result = getColumnsFromTableSymbol(tableSymbol, compiler); + if (!result) { + return noSuggestions(); + } + columns = result; + } + + if (columns.length === 0) { + return noSuggestions(); + } + + // Generate the snippet with tab stops for completion + const snippet = columns.map((col, index) => `\${${index + 1}:${col.name} (${col.type})}`).join(', '); + + return { + suggestions: [ + { + label: 'Record row snippet', + insertText: snippet, + insertTextRules: CompletionItemInsertTextRule.InsertAsSnippet, + kind: CompletionItemKind.Snippet, + range: undefined as any, + }, + ], + }; +} diff --git a/packages/dbml-parse/src/services/suggestions/utils.ts b/packages/dbml-parse/src/services/suggestions/utils.ts index 20ad606cc..f4cbf39cf 100644 --- a/packages/dbml-parse/src/services/suggestions/utils.ts +++ b/packages/dbml-parse/src/services/suggestions/utils.ts @@ -1,8 +1,12 @@ -import { SymbolKind } from '@/core/analyzer/symbol/symbolIndex'; -import { CompletionItemKind, type CompletionList } from '@/services/types'; +import { SymbolKind, destructureIndex } from '@/core/analyzer/symbol/symbolIndex'; +import { CompletionItemKind, CompletionItemInsertTextRule, type CompletionList } from '@/services/types'; import { SyntaxToken, SyntaxTokenKind } from '@/core/lexer/tokens'; import { hasTrailingSpaces } from '@/core/lexer/utils'; -import { isAlphaOrUnderscore } from '@/core/utils'; +import { SyntaxNode, TupleExpressionNode, FunctionApplicationNode } from '@/core/parser/nodes'; +import Compiler from '@/compiler'; +import { ColumnSymbol, TablePartialInjectedColumnSymbol } from '@/core/analyzer/symbol/symbols'; +import { extractVariableFromExpression } from '@/core/analyzer/utils'; +import { addDoubleQuoteIfNeeded } from '@/compiler/queries/utils'; export function pickCompletionItemKind (symbolKind: SymbolKind): CompletionItemKind { switch (symbolKind) { @@ -69,7 +73,162 @@ export function addQuoteIfNeeded (completionList: CompletionList): CompletionLis ...completionList, suggestions: completionList.suggestions.map((s) => ({ ...s, - insertText: (!s.insertText || !s.insertText.split('').every(isAlphaOrUnderscore)) ? `"${s.insertText ?? ''}"` : s.insertText, + insertText: addDoubleQuoteIfNeeded(s.insertText ?? ''), })), }; } + +export function excludeSuggestions (completionList: CompletionList, excludeLabels: string[]): CompletionList { + return { + ...completionList, + suggestions: completionList.suggestions.filter((s) => { + const label = typeof s.label === 'string' ? s.label : s.label.label; + return !excludeLabels.includes(label.toLowerCase()); + }), + }; +} + +export function addExpandAllColumnsSuggestion (completionList: CompletionList): CompletionList { + const allColumns = completionList.suggestions + .map((s) => typeof s.label === 'string' ? s.label : s.label.label) + .join(', '); + + if (!allColumns) { + return completionList; + } + + return { + ...completionList, + suggestions: [ + { + label: '* (all columns)', + insertText: allColumns, + insertTextRules: CompletionItemInsertTextRule.KeepWhitespace, + kind: CompletionItemKind.Snippet, + sortText: '00', + range: undefined as any, + }, + ...completionList.suggestions, + ], + }; +} + +export function getSource (compiler: Compiler, tokenOrNode: SyntaxToken | SyntaxNode): string { + return compiler.parse.source().slice(tokenOrNode.start, tokenOrNode.end); +} + +/** + * Checks if the offset is within the element's header + * (within the element, but outside the body) + */ +export function isOffsetWithinElementHeader (offset: number, element: SyntaxNode & { body?: SyntaxNode }): boolean { + // Check if offset is within the element at all + if (offset < element.start || offset > element.end) { + return false; + } + + // If element has a body, check if offset is outside it + if (element.body) { + return offset < element.body.start || offset > element.body.end; + } + + // Element has no body, so entire element is considered header + return true; +} + +export function isTupleEmpty (tuple: TupleExpressionNode): boolean { + return tuple.commaList.length + tuple.elementList.length === 0; +} + +/** + * Get columns from a table symbol + * @param tableSymbol The table symbol to extract columns from + * @param compiler Optional compiler instance to extract type names from source + * @returns Array of column objects with name and type information + */ +export function getColumnsFromTableSymbol ( + tableSymbol: any, + compiler?: Compiler, +): Array<{ name: string; type: string }> | null { + const columns: Array<{ name: string; type: string }> = []; + + for (const [index] of tableSymbol.symbolTable.entries()) { + const res = destructureIndex(index).unwrap_or(undefined); + if (res === undefined || res.kind !== SymbolKind.Column) continue; + + const columnSymbol = tableSymbol.symbolTable.get(index); + if (!columnSymbol) { + // If any column symbol is missing, return null + return null; + } + + // Use extractColumnNameAndType for proper handling of injected columns + const columnInfo = extractColumnNameAndType(columnSymbol, res.name); + + if (!columnInfo) { + // If we can't extract column info, return null + return null; + } + + columns.push(columnInfo); + } + + return columns; +} + +export function extractColumnNameAndType ( + columnSymbol: ColumnSymbol | TablePartialInjectedColumnSymbol, + columnName?: string, +): { name: string; type: string } | null { + // Handle table partial injected columns + if (columnSymbol instanceof TablePartialInjectedColumnSymbol) { + const tablePartialSymbol = columnSymbol.tablePartialSymbol; + if (!tablePartialSymbol?.symbolTable || !columnName) { + return null; + } + + // Look up the column in the table partial's symbol table + const columnIndex = `column:${columnName}`; + const actualColumnSymbol = tablePartialSymbol.symbolTable.get(columnIndex); + if (!actualColumnSymbol?.declaration || !(actualColumnSymbol.declaration instanceof FunctionApplicationNode)) { + return null; + } + + // Extract type from the actual column declaration + const type = extractVariableFromExpression(actualColumnSymbol.declaration.args[0]).unwrap_or(null); + if (!type) { + return null; + } + + return { name: columnName, type }; + } + + // Handle regular column symbols + if (!(columnSymbol?.declaration instanceof FunctionApplicationNode)) { + return null; + } + const declaration = columnSymbol.declaration as FunctionApplicationNode; + const name = extractVariableFromExpression(declaration.callee).unwrap_or(null); + const type = extractVariableFromExpression(declaration.args[0]).unwrap_or(null); + + if (!name || !type) { + return null; + } + + return { name, type }; +} + +/** + * Generate a snippet for entering a record entry with placeholders for each column + * @param columns Array of column objects with name and type information + * @returns A snippet string with placeholders like: ${1:id (int)}, ${2:name (varchar)}, ${3:email (varchar)} + */ +export function generateRecordEntrySnippet (columns: Array<{ name: string; type: string }>): string { + if (columns.length === 0) { + return ''; + } + + return columns + .map((col, index) => `\${${index + 1}:${col.name} (${col.type})}`) + .join(', '); +} diff --git a/packages/dbml-parse/src/services/types.ts b/packages/dbml-parse/src/services/types.ts index 369c8aeb1..db29190d0 100644 --- a/packages/dbml-parse/src/services/types.ts +++ b/packages/dbml-parse/src/services/types.ts @@ -1,4 +1,4 @@ -import type { +import { IPosition, editor, languages, IRange, IDisposable, CancellationToken as ICancellationToken, } from 'monaco-editor-core'; @@ -77,3 +77,18 @@ export type SignatureHelpResult = languages.SignatureHelpResult; // Show references export type ReferenceProvider = languages.ReferenceProvider; + +// Code actions +export type CodeActionProvider = languages.CodeActionProvider; +export type CodeAction = languages.CodeAction; +export type CodeActionContext = languages.CodeActionContext; +export type WorkspaceEdit = languages.WorkspaceEdit; + +// Diagnostics/Markers +export type MarkerSeverity = 1 | 2 | 4 | 8; // Hint = 1, Info = 2, Warning = 4, Error = 8 +export type MarkerData = editor.IMarkerData; + +// Inline completion types +export type InlineCompletionItemProvider = languages.InlineCompletionsProvider; +export type InlineCompletionItem = languages.InlineCompletion; +export type InlineCompletions = languages.InlineCompletions; diff --git a/yarn.lock b/yarn.lock index 4711e2a73..0669c8546 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5173,6 +5173,11 @@ resolved "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.16.tgz" integrity sha512-HX7Em5NYQAXKW+1T+FiuG27NGwzJfCX3s1GjOa7ujxZa52kjJLOr4FUxT+giF6Tgxv1e+/czV/iTtBw27WTU9g== +"@types/luxon@^3.7.1": + version "3.7.1" + resolved "https://registry.yarnpkg.com/@types/luxon/-/luxon-3.7.1.tgz#ef51b960ff86801e4e2de80c68813a96e529d531" + integrity sha512-H3iskjFIAn5SlJU7OuxUmTEpebK6TKB8rxZShDslBMZJ5u9S//KM1sbdAisiSrqwLQncVjnpi2OK2J51h+4lsg== + "@types/minimatch@^3.0.3": version "3.0.5" resolved "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz" @@ -10751,6 +10756,11 @@ lru-cache@^8.0.0: resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-8.0.5.tgz" integrity sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA== +luxon@^3.7.2: + version "3.7.2" + resolved "https://registry.yarnpkg.com/luxon/-/luxon-3.7.2.tgz#d697e48f478553cca187a0f8436aff468e3ba0ba" + integrity sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew== + magic-string@^0.30.17: version "0.30.17" resolved "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz"