Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,6 @@ members = [
"crates/oxabl_schema",
"crates/oxabl_semantic",
"crates/oxabl_lint",
"crates/oxabl_analyze",
]
resolver = "2"
3 changes: 3 additions & 0 deletions crates/oxabl/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ oxabl_common = { path = "../oxabl_common", version = "0.5.0" }
oxabl_ast = { path = "../oxabl_ast", version = "0.5.0" }
oxabl_workspace = { path = "../oxabl_workspace", version = "0.4.0" }
oxabl_preprocessor = { path = "../oxabl_preprocessor", version = "0.3.1" }
oxabl_semantic = { path = "../oxabl_semantic", version = "0.1.0" }
oxabl_schema = { path = "../oxabl_schema", version = "0.1.0" }
oxabl_analyze = { path = "../oxabl_analyze", version = "0.1.0" }
clap = { version = "4", features = ["derive"] }
walkdir = "2"
indicatif = "0.17"
Expand Down
115 changes: 115 additions & 0 deletions crates/oxabl/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,13 @@ use std::time::Instant;

use clap::Parser as ClapParser;
use indicatif::{ProgressBar, ProgressStyle};
use oxabl_analyze::{dump_json, dump_text};
use oxabl_common::{FileId, SourceMap};
use oxabl_lexer::tokenize;
use oxabl_parser::Parser;
use oxabl_preprocessor::Preprocessor;
use oxabl_schema::Schema;
use oxabl_semantic::{AnalysisContext, analyze_file};
use oxabl_workspace::RealFileSystem;
use serde::Serialize;
use walkdir::WalkDir;
Expand Down Expand Up @@ -40,6 +43,27 @@ enum Cli {
#[arg(long)]
debug: bool,
},
/// Parse + semantic-analyze a single ABL file and dump the resolved model.
Analyze {
/// Path to the ABL source file to analyze.
path: PathBuf,

/// Output format: `json` (stable, versioned) or `text` (human-oriented).
#[arg(long, default_value = "json")]
format: String,

/// Skip the lint pass (semantic-layer diagnostics only).
#[arg(long)]
no_lint: bool,

/// Enable preprocessing (include expansion, &IF evaluation).
#[arg(long)]
preprocess: bool,

/// Include search paths (can be specified multiple times).
#[arg(long = "include-path", short = 'I')]
include_paths: Vec<PathBuf>,
},
}

enum FileResult {
Expand Down Expand Up @@ -98,9 +122,100 @@ fn main() -> ExitCode {
include_paths,
debug,
} => run_check(&path, json, preprocess, &include_paths, debug),
Cli::Analyze {
path,
format,
no_lint,
preprocess,
include_paths,
} => run_analyze(&path, &format, no_lint, preprocess, &include_paths),
}
}

fn run_analyze(
path: &Path,
format: &str,
no_lint: bool,
preprocess: bool,
include_paths: &[PathBuf],
) -> ExitCode {
let source = match std::fs::read_to_string(path) {
Ok(s) => s,
Err(e) => {
eprintln!("error: cannot read {}: {e}", path.display());
return ExitCode::from(2);
}
};

// Run preprocess or take raw source as expanded source.
let expanded = if preprocess {
let fs = RealFileSystem;
let preprocessor = Preprocessor::new(&fs, include_paths);
let file_id = FileId::new(1);
match preprocessor.process(file_id, &source) {
Ok(pf) => pf.to_text().to_string(),
Err(diags) => {
eprintln!(
"error: preprocessing failed: {}",
diags
.first()
.map(|d| d.message.as_str())
.unwrap_or("unknown")
);
return ExitCode::from(3);
}
}
} else {
source
};

let tokens =
match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| tokenize(&expanded))) {
Ok(t) => t,
Err(_) => {
eprintln!("error: lexer panicked");
return ExitCode::from(4);
}
};

let mut parser = Parser::new(&tokens, &expanded);
let program = match parser.parse_statements() {
Ok(p) => p,
Err(e) => {
let sm = SourceMap::new(&expanded);
let (line, col) = sm.lookup(e.span.start as usize);
eprintln!("parse error at {line}:{col}: {}", e.message);
return ExitCode::from(5);
}
};

let schema = Schema::empty();
let ctx = AnalysisContext::new(FileId::new(1), &expanded, &schema);
let sem = analyze_file(&program, &ctx);

match format {
"json" => {
let v = dump_json(&program, &sem, &ctx, !no_lint);
match serde_json::to_string_pretty(&v) {
Ok(s) => println!("{s}"),
Err(e) => {
eprintln!("error: json serialize: {e}");
return ExitCode::from(6);
}
}
}
"text" => {
print!("{}", dump_text(&program, &sem, &ctx));
}
other => {
eprintln!("error: unsupported format `{other}` (use `json` or `text`)");
return ExitCode::from(7);
}
}

ExitCode::SUCCESS
}

fn run_check(
path: &Path,
json_output: bool,
Expand Down
20 changes: 20 additions & 0 deletions crates/oxabl_analyze/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
[package]
name = "oxabl_analyze"
version = "0.1.0"
edition = "2024"
license = "MIT"
description = "JSON dump + analyze CLI wiring for the oxabl semantic model"
repository = "https://github.com/oxabl-project/oxabl/crates/oxabl_analyze"

[dependencies]
oxabl_ast = { path = "../oxabl_ast", version = "0.5.0" }
oxabl_common = { path = "../oxabl_common", version = "0.5.0" }
oxabl_lint = { path = "../oxabl_lint", version = "0.1.0" }
oxabl_schema = { path = "../oxabl_schema", version = "0.1.0" }
oxabl_semantic = { path = "../oxabl_semantic", version = "0.1.0" }
serde = { version = "1", features = ["derive"] }
serde_json = "1"

[dev-dependencies]
oxabl_lexer = { path = "../oxabl_lexer", version = "0.4.1" }
oxabl_parser = { path = "../oxabl_parser", version = "0.6.0" }
Loading