Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"rust-analyzer.linkedProjects": [
"./Cargo.toml",
],
"rust-analyzer.cargo.buildScripts.enable": true,
"sqltools.format": {
"language": "sql",
"uppercaseKeywords": true,
"linesBetweenQueries": 2,
"keywordCase": "upper",
"identifierCase": "unchanged",
},
"sqltools.formatLanguages": [
"sql"
],
}
40 changes: 39 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,18 @@ edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[[bin]] # Bin to run the engine
name = "event"
path = "src/components/event.rs"

[[bin]] # Bin to run the engine
name = "task"
path = "src/components/task.rs"

[[bin]] # Bin to run the engine
name = "cli"
path = "src/cli.rs"

[dependencies]
serde_json = { version = "1.0.104", features = ["preserve_order"] }
clap = { version = "4.3.5", features = ["derive"] }
Expand All @@ -23,5 +35,31 @@ serde_derive = "1.0.164"
diesel = { version = "2.1.0", features = ["chrono", "postgres"] }
diesel_migrations = { version = "2.1.0", features = ["postgres"] }
tracing = "0.1.37"
tracing-subscriber = "0.3.17"
prettytable-rs = "^0.10.0"
pnet = "0.34.0"
pnet = "0.34.0"
tonic = "0.9.2"
prost = "0.11.9"
tokio = { version = "1.32", features = ["rt-multi-thread", "macros", "sync", "time", "io-std", "io-util"] }
# flume = { version = "0.11.0", features = ["async"] }
rand = "0.8.5"
async-stream = "0.3.5"
tokio-stream = "0.1.14"
crossbeam = "0.8.2"
crossbeam-channel = "0.5.8"
crossbeam-utils = "0.8.16"
bus = "2.4.1"

[build-dependencies]
tonic-build = "0.9.0"

# [unstable]
# profile-rustflags = true

# [profile.dev]
# # Enable the missing_docs warning for development builds
# rustflags = ["-C", "warn=missing_docs"]

# [profile.release]
# # Enable the missing_docs warning for release builds
# rustflags = ["-C", "warn=missing_docs"]
8 changes: 4 additions & 4 deletions Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,13 +118,13 @@ LRANGE tasks 0 -1
- [ ] Make it distributed
- [ ] Add support for multiple engines
- [x] Add a testing environment with multiple engines using docker compose
- [ ] Create a network attached storage for sharing workflow files
- [ ] Implement gRPC to stream output of tasks and events
<!-- - [ ] Create a network attached storage for sharing workflow files -->
- [ ] Implement gRPC to stream output of task and event processes
- [ ] Implement round robin algorithm for distributing workflows to engines
- [ ] Automate container deployment using
- [ ] Kubernetes
- [ ] Ansible
- [ ] LLM integration
<!-- - [ ] Ansible -->
<!-- - [ ] LLM integration -->

---

Expand Down
4 changes: 4 additions & 0 deletions build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
fn main() -> Result<(), Box<dyn std::error::Error>> {
tonic_build::compile_protos("proto/grpc.proto")?;
Ok(())
}
3 changes: 3 additions & 0 deletions deploy/ansible/inventory.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[container_hosts]
container1_ip_address ansible_ssh_port=22
container2_ip_address ansible_ssh_port=22
28 changes: 28 additions & 0 deletions deploy/ansible/playbook.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@

- hosts: localhost
gather_facts: false
tasks:
- name: Ensure the Docker image is available
docker_image:
name: your_docker_image_name
source: pull

- name: Create Docker containers
docker_container:
name: "{{ item }}"
image: your_docker_image_name
state: started
with_items:
- container1
- container2

- hosts: container_hosts
gather_facts: false
tasks:
- name: Copy binary file to Docker containers
copy:
src: /path/to/your/binary_file
dest: /path/where/you/want/to/copy/binary_file
mode: '0755'
become: true
remote_user: your_ssh_username
12 changes: 11 additions & 1 deletion docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ services:
networks:
- postgres_ntw
- engine3_ntw
volumes:
- workflow_files:/app/workflow_files

networks:
postgres_ntw:
Expand All @@ -85,4 +87,12 @@ networks:
engine2_ntw:
driver: bridge
engine3_ntw:
driver: bridge
driver: bridge

volumes:
workflow_files:
driver: local
driver_opts:
type: none
o: bind
device: ./workflow_files
16 changes: 16 additions & 0 deletions justfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
build:
#!/usr/bin/env sh
cargo build -j 6

start-containers:
#!/usr/bin/env sh

POSTGRES_PASSWORD=$(grep -oP '(?<=POSTGRES_PASSWORD=).*' .env)
docker run --name workflow-redis -d redis redis-server --save 60 1 --loglevel warning
docker run --name workflow-postgres -e POSTGRES_PASSWORD="$POSTGRES_PASSWORD" -p 5432:5432 -d postgres

stop-containers:
#!/usr/bin/env sh
docker container stop workflow-redis workflow-postgres | true
docker container rm workflow-redis workflow-postgres | true

Empty file added lazy/config.yml
Empty file.
14 changes: 14 additions & 0 deletions proto/grpc.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
syntax = "proto3";
package grpc;

service OutputStreaming {
rpc StreamOutput(OutputChunk) returns ( stream Response);
}

message OutputChunk {
string content = 1;
}

message Response {
string message = 1;
}
99 changes: 80 additions & 19 deletions src/cli.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use anyhow::{anyhow, Error as AnyError, Result};
//!

use anyhow::{anyhow, Error as AnyError, Ok, Result};
use clap::{Parser, Subcommand};
use diesel::{ExpressionMethods, PgConnection, QueryDsl, RunQueryDsl, SelectableHelper};
use dotenv::dotenv;
Expand All @@ -9,13 +11,20 @@ use std::env;
use std::fs::File;
use std::process::Command;
use tracing::field;
use workflow::engine::{create_new_engine_entry, handle_stop, run_event_process};
use workflow::engine::{run_task_process, update_engine_status};
use workflow::engine_utils::{create_new_engine_entry, handle_stop, update_engine_status};
use workflow::models::{Engine, EngineStatus, Event, Task};
use workflow::parser::process_yaml_file;
use workflow::utils::establish_pg_connection;
use workflow::utils::run_migrations;

pub mod grpc {
tonic::include_proto!("grpc");
}
use grpc::output_streaming_client::OutputStreamingClient;
use grpc::{OutputChunk, Response as GrpcResponse};

use tonic::transport::Channel;

const PRETTY_TABLE_MAX_CELL_LEN: usize = 50;
const ENGINE_NAME: &str = "workflow-engine";
const ENGINE_IP_ADDRESS: &str = "0.0.0.0";
Expand All @@ -41,6 +50,11 @@ enum Commands {
StartEventProcess {
engine_uid: i32,
},
// show logs for event or task process
Logs {
#[clap(subcommand)]
subcommand: LogsSubcommands,
},
Stop {},
/// Adds workflow to the queue
Add {
Expand Down Expand Up @@ -96,6 +110,16 @@ enum ShowSubcommands {
Engine { uid: i32 },
}

#[derive(Subcommand)]
enum LogsSubcommands {
// Lists all tasks
Task,
// Lists all events
Event,
// Lists all engines
// Engine { uid: i32 },
}

#[derive(PartialEq)]
enum ProcessType {
Task,
Expand All @@ -109,7 +133,7 @@ fn create_and_clear_log_file(file_path: &str) -> Result<File, AnyError> {
}

fn start_process(
subcommand_name: &str,
binary_name: &str,
process_type: ProcessType,
engine_uid: i32,
) -> Result<(), AnyError> {
Expand Down Expand Up @@ -141,8 +165,8 @@ fn start_process(
}

let command = binding
.arg(subcommand_name)
.arg("--")
.arg("--bin")
.arg(binary_name)
.arg(engine_uid.to_string())
.stdout(stdout)
.stderr(stderr);
Expand All @@ -151,7 +175,7 @@ fn start_process(
Ok(())
}

pub fn cli() {
pub async fn cli() {
let cli = Cli::parse();

match &cli.command {
Expand All @@ -174,15 +198,22 @@ pub fn cli() {
}
Commands::StartEventProcess { engine_uid } => {
println!("StartEventProcess");
if let Err(e) = run_event_process(*engine_uid) {
println!("Failed to start event process, {}", e);
std::process::exit(1);
};
// if let Err(e) = run_event_process(*engine_uid) {
// println!("Failed to start event process, {}", e);
// std::process::exit(1);
// };
}
Commands::StartTaskProcess { engine_uid } => {
println!("StartTaskProcess");
if let Err(e) = run_task_process(*engine_uid) {
println!("Failed to start task process, {}", e);
// if let Err(e) = run_task_process(*engine_uid) {
// println!("Failed to start task process, {}", e);
// std::process::exit(1);
// };
}
Commands::Logs { subcommand } => {
println!("Logs");
if let Err(e) = process_log_command(subcommand).await {
println!("Failed to stop the engine, {}", e);
std::process::exit(1);
};
}
Expand Down Expand Up @@ -229,6 +260,29 @@ pub fn cli() {
std::process::exit(0);
}

async fn process_log_command(subcommand: &LogsSubcommands) -> Result<(), AnyError> {
match subcommand {
LogsSubcommands::Task => show_log("10000".to_owned()).await?,
LogsSubcommands::Event => show_log("10001".to_owned()).await?,
};
Ok(())
}

async fn show_log(server_port: String) -> Result<(), AnyError> {
let mut client =
OutputStreamingClient::connect(format!("http://[::1]:{}", server_port)).await?;

let mut stream = client
.stream_output(OutputChunk::default())
.await?
.into_inner();

while let Some(log_message) = stream.message().await? {
println!("NOTE = {:?}", log_message);
}
Ok(())
}

fn get_system_ip_address() -> Result<String, AnyError> {
// Get a vector with all network interfaces found
let all_interfaces = interfaces();
Expand Down Expand Up @@ -262,13 +316,13 @@ fn process_start_command() -> Result<(), AnyError> {
)?;
println!("created new engine entry with uid: {}", engine_uid);

if let Err(e) = start_process("start-event-process", ProcessType::Event, engine_uid) {
eprintln!("Failed to start Event process: {}", e);
eprintln!("exiting...");
std::process::exit(1);
}
// if let Err(e) = start_process("event", ProcessType::Event, engine_uid) {
// eprintln!("Failed to start Event process: {}", e);
// eprintln!("exiting...");
// std::process::exit(1);
// }

if let Err(e) = start_process("start-task-process", ProcessType::Task, engine_uid) {
if let Err(e) = start_process("task", ProcessType::Task, engine_uid) {
eprintln!("Failed to start Task process: {}", e);
eprintln!("exiting...");
std::process::exit(1);
Expand Down Expand Up @@ -416,6 +470,13 @@ fn list_items<T: serde::ser::Serialize>(items: Vec<T>) -> Result<(), AnyError> {
Ok(())
}

#[tokio::main]

async fn main() -> Result<(), AnyError> {
cli().await;
Ok(())
}

// fn is_redis_running() -> bool {
// let redis_result = create_redis_connection();
// if let Err(e) = redis_result {
Expand Down
Loading