();
+ for (int i = 300; i <= 333; i++)
+ {
+ docks.Add($"Dock {i}");
+ }
+ return docks;
+ }
+ }
+
+ private async Task ChangeStatus(WAMSOrder order)
+ {
+ // For MVP, cycle through statuses in sequence
+ var nextStatus = GetNextStatus(order.StatusEnum);
+
+ if (nextStatus == order.StatusEnum)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Info,
+ Summary = "Info",
+ Detail = "No further status transitions available",
+ Duration = 3000
+ });
+ return;
+ }
+
+ var confirmed = await DialogService.Confirm(
+ $"Change status from '{order.Status}' to '{WAMSColorCodingService.GetStatusDisplayName(nextStatus)}'?",
+ "Confirm Status Change",
+ new ConfirmOptions { OkButtonText = "Yes", CancelButtonText = "No" });
+
+ if (confirmed == true)
+ {
+ try
+ {
+ var changeResult = await WAMSService.ChangeStatusAsync(order.OrderNumber, nextStatus, selectedWarehouseId);
+
+ if (changeResult.Success)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Success,
+ Summary = "Success",
+ Detail = changeResult.Message,
+ Duration = 3000
+ });
+ await LoadOrders();
+ }
+ else
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = changeResult.Message,
+ Duration = 4000
+ });
+ }
+ }
+ catch (Exception ex)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = $"Failed to change status: {ex.Message}",
+ Duration = 4000
+ });
+ }
+ }
+ }
+
+ private DockStatus GetNextStatus(DockStatus current)
+ {
+ return current switch
+ {
+ DockStatus.NA => DockStatus.CheckIn,
+ DockStatus.CheckIn => DockStatus.Loading,
+ DockStatus.Loading => DockStatus.Unloading,
+ DockStatus.Unloading => DockStatus.Shipped,
+ DockStatus.Shipped => DockStatus.Received,
+ DockStatus.Received => DockStatus.Received, // Terminal
+ _ => DockStatus.NA
+ };
+ }
+
+ private async Task ClearCheckIn(WAMSOrder order)
+ {
+ var confirmed = await DialogService.Confirm("Clear check-in date and revert status to N/A?", "Confirm",
+ new ConfirmOptions { OkButtonText = "Yes", CancelButtonText = "No" });
+
+ if (confirmed == true)
+ {
+ try
+ {
+ var success = await WAMSService.ClearCheckInAsync(order.OrderNumber, selectedWarehouseId);
+
+ if (success)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Success,
+ Summary = "Success",
+ Detail = "Check-in cleared",
+ Duration = 3000
+ });
+ await LoadOrders();
+ }
+ else
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = "Failed to clear check-in",
+ Duration = 4000
+ });
+ }
+ }
+ catch (Exception ex)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = $"Failed to clear check-in: {ex.Message}",
+ Duration = 4000
+ });
+ }
+ }
+ }
+
+ private async Task OnOperatorChanged(WAMSOrder order)
+ {
+ // Check if there are other orders with the same AppID
+ var ordersWithSameAppId = orders.Where(o => o.AppId == order.AppId && !string.IsNullOrWhiteSpace(o.AppId)).ToList();
+
+ if (ordersWithSameAppId.Count > 1)
+ {
+ var confirmed = await DialogService.Confirm(
+ $"Update operator for all {ordersWithSameAppId.Count} orders with AppID '{order.AppId}'?",
+ "Batch Update",
+ new ConfirmOptions { OkButtonText = "Yes, All", CancelButtonText = "No, Just This One" });
+
+ if (confirmed == true)
+ {
+ // Batch update
+ try
+ {
+ var success = await WAMSService.BatchUpdateForkliftByAppIDAsync(order.AppId!, order.ForkliftOperator!, selectedWarehouseId);
+
+ if (success)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Success,
+ Summary = "Success",
+ Detail = $"Updated {ordersWithSameAppId.Count} orders",
+ Duration = 3000
+ });
+ await LoadOrders();
+ }
+ }
+ catch (Exception ex)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = $"Failed to batch update: {ex.Message}",
+ Duration = 4000
+ });
+ }
+ }
+ }
+ }
+
+ private void UpdateOrder(WAMSOrder order)
+ {
+ // Note: In full implementation, this would call an update endpoint
+ // For MVP, individual field updates happen through other methods
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Info,
+ Summary = "Info",
+ Detail = "Use field-specific actions to update order (status, operator, etc.)",
+ Duration = 3000
+ });
+ }
+
+ private async Task DeleteOrder(WAMSOrder order)
+ {
+ if (order.StatusEnum != DockStatus.NA)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Warning,
+ Summary = "Cannot Delete",
+ Detail = "Orders can only be deleted when status is N/A",
+ Duration = 4000
+ });
+ return;
+ }
+
+ var confirmed = await DialogService.Confirm(
+ $"Delete order {order.OrderNumber}? This will soft-delete the order (type will change to deleted).",
+ "Confirm Delete",
+ new ConfirmOptions { OkButtonText = "Yes, Delete", CancelButtonText = "Cancel" });
+
+ if (confirmed == true)
+ {
+ try
+ {
+ // Assume security level 1 (admin) for this demo
+ var success = await WAMSService.DeleteOrderAsync(order.OrderNumber, 1, selectedWarehouseId);
+
+ if (success)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Success,
+ Summary = "Success",
+ Detail = "Order deleted",
+ Duration = 3000
+ });
+ await LoadOrders();
+ }
+ else
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = "Failed to delete order",
+ Duration = 4000
+ });
+ }
+ }
+ catch (Exception ex)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = $"Failed to delete order: {ex.Message}",
+ Duration = 4000
+ });
+ }
+ }
+ }
+
+ private async Task UndeleteOrder(WAMSOrder order)
+ {
+ var confirmed = await DialogService.Confirm(
+ $"Restore order {order.OrderNumber}?",
+ "Confirm Undelete",
+ new ConfirmOptions { OkButtonText = "Yes, Restore", CancelButtonText = "Cancel" });
+
+ if (confirmed == true)
+ {
+ try
+ {
+ var success = await WAMSService.UndeleteOrderAsync(order.OrderNumber, selectedWarehouseId);
+
+ if (success)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Success,
+ Summary = "Success",
+ Detail = "Order restored",
+ Duration = 3000
+ });
+ await LoadOrders();
+ }
+ else
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = "Failed to restore order",
+ Duration = 4000
+ });
+ }
+ }
+ catch (Exception ex)
+ {
+ NotificationService.Notify(new NotificationMessage
+ {
+ Severity = NotificationSeverity.Error,
+ Summary = "Error",
+ Detail = $"Failed to restore order: {ex.Message}",
+ Duration = 4000
+ });
+ }
+ }
+ }
+
+ private class WarehouseOption
+ {
+ public int Id { get; set; }
+ public string Name { get; set; } = string.Empty;
+ }
+}
diff --git a/Components/Shared/ApplicationSwitcher.razor b/Components/Shared/ApplicationSwitcher.razor
index 632f4e6..67c63ec 100644
--- a/Components/Shared/ApplicationSwitcher.razor
+++ b/Components/Shared/ApplicationSwitcher.razor
@@ -65,7 +65,12 @@
var path = uri.AbsolutePath;
var segments = path.Split('/', System.StringSplitOptions.RemoveEmptyEntries);
- if (segments.Length > 0)
+ // Check if URL starts with /app/
+ if (segments.Length > 1 && segments[0] == "app")
+ {
+ selectedAppName = segments[1];
+ }
+ else if (segments.Length > 0)
{
selectedAppName = segments[0];
}
@@ -86,7 +91,7 @@
if (app != null)
{
// Navigate to the application's dashboard
- NavigationManager.NavigateTo($"/{app.Name}/dashboard");
+ NavigationManager.NavigateTo($"/app/{app.Name}/dashboard");
}
}
return Task.CompletedTask;
diff --git a/Components/Shared/SmartDataGridObject.razor b/Components/Shared/SmartDataGridObject.razor
index 6d8602b..25e101d 100644
--- a/Components/Shared/SmartDataGridObject.razor
+++ b/Components/Shared/SmartDataGridObject.razor
@@ -45,12 +45,14 @@
else
{
+ Filterable="@column.Filterable">
+
+ @GetPropertyValue(row, column.Property)
+
+
}
}
}
@@ -102,12 +104,14 @@
@if (!column.Hidden)
{
+ Filterable="@column.Filterable">
+
+ @GetPropertyValue(row, column.Property)
+
+
}
}
@@ -147,6 +151,12 @@
{
Showing @Data.Count() records
+ @if (hiddenColumnCount > 0)
+ {
+
+ (@(totalColumnCount - hiddenColumnCount) of @totalColumnCount columns shown)
+
+ }
}
}
@@ -188,6 +198,13 @@
[Parameter]
public IEnumerable? ColumnOverrides { get; set; }
+ ///
+ /// Maximum number of columns to display. Set to 0 for unlimited.
+ /// Default is 15 columns to ensure usable grid layout.
+ ///
+ [Parameter]
+ public int MaxVisibleColumns { get; set; } = 15;
+
[Parameter]
public EventCallback OnRowUpdate { get; set; }
@@ -201,11 +218,15 @@
public EventCallback OnRowEdit { get; set; }
private RadzenDataGrid? grid;
+ private int hiddenColumnCount = 0;
+ private int totalColumnCount = 0;
private IEnumerable GetColumnConfig()
{
if (ColumnOverrides != null && ColumnOverrides.Any())
{
+ hiddenColumnCount = 0;
+ totalColumnCount = ColumnOverrides.Count();
return ColumnOverrides;
}
@@ -219,7 +240,25 @@
dataType = firstItem.GetType();
}
- return dataType.GetProperties(System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.Instance)
+ var allProperties = dataType.GetProperties(System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.Instance);
+ totalColumnCount = allProperties.Length;
+
+ // Prioritize columns: ID first, then name/code/description, then others
+ var prioritizedProperties = allProperties
+ .OrderBy(p => GetColumnPriority(p.Name))
+ .ThenBy(p => p.Name)
+ .ToList();
+
+ // Apply column limit if MaxVisibleColumns > 0
+ var visibleProperties = MaxVisibleColumns > 0
+ ? prioritizedProperties.Take(MaxVisibleColumns)
+ : prioritizedProperties;
+
+ hiddenColumnCount = MaxVisibleColumns > 0
+ ? Math.Max(0, totalColumnCount - MaxVisibleColumns)
+ : 0;
+
+ return visibleProperties
.Select(p => new ColumnConfig
{
Property = p.Name,
@@ -238,6 +277,34 @@
}
}
+ ///
+ /// Returns priority for column ordering (lower = higher priority).
+ /// ID columns first, then name/code/description, then everything else.
+ ///
+ private static int GetColumnPriority(string propertyName)
+ {
+ var lower = propertyName.ToLowerInvariant();
+
+ // Primary key columns first (usually end with _id or Id and are the first property)
+ if (lower.EndsWith("_id") || lower == "id")
+ return 0;
+
+ // Code/number columns (product codes, part numbers, etc.)
+ if (lower.Contains("code") || lower.Contains("codenum") || lower.Contains("number"))
+ return 1;
+
+ // Name columns
+ if (lower.Contains("name") || lower.Contains("descrip") || lower.Contains("title"))
+ return 2;
+
+ // Status/active columns
+ if (lower.Contains("active") || lower.Contains("status") || lower.Contains("enabled"))
+ return 3;
+
+ // Everything else
+ return 10;
+ }
+
private async Task OnRowUpdated(object row)
{
try
@@ -333,4 +400,24 @@
});
}
}
+
+ ///
+ /// Gets property value from an object using reflection.
+ /// Required because Radzen's Property binding doesn't work with TItem="object".
+ ///
+ private object? GetPropertyValue(object? obj, string propertyName)
+ {
+ if (obj == null || string.IsNullOrEmpty(propertyName))
+ return null;
+
+ try
+ {
+ var property = obj.GetType().GetProperty(propertyName, BindingFlags.Public | BindingFlags.Instance);
+ return property?.GetValue(obj);
+ }
+ catch
+ {
+ return null;
+ }
+ }
}
diff --git a/Controllers/InventoryAllocationController.cs b/Controllers/InventoryAllocationController.cs
new file mode 100644
index 0000000..c9737d0
--- /dev/null
+++ b/Controllers/InventoryAllocationController.cs
@@ -0,0 +1,294 @@
+using DotNetWebApp.Services.InventoryAllocation;
+using DotNetWebApp.Services.InventoryAllocation.Models;
+using Microsoft.AspNetCore.Mvc;
+
+namespace DotNetWebApp.Controllers;
+
+///
+/// API controller for inventory allocation operations.
+/// Provides endpoints for order allocation, deallocation, and inventory lookup.
+///
+[ApiController]
+[Route("api/[controller]")]
+public class InventoryAllocationController : ControllerBase
+{
+ private readonly IInventoryAllocationService _inventoryAllocationService;
+ private readonly ILockService _lockService;
+ private readonly ILogger _logger;
+
+ public InventoryAllocationController(
+ IInventoryAllocationService inventoryAllocationService,
+ ILockService lockService,
+ ILogger logger)
+ {
+ _inventoryAllocationService = inventoryAllocationService;
+ _lockService = lockService;
+ _logger = logger;
+ }
+
+ ///
+ /// Get orders available for allocation.
+ ///
+ [HttpGet("orders")]
+ public async Task>> GetOrders(
+ [FromQuery] int warehouseId = 3,
+ [FromQuery] DateTime? shipDate = null)
+ {
+ var orders = await _inventoryAllocationService.GetOrdersForAllocationAsync(warehouseId, shipDate);
+ return Ok(orders);
+ }
+
+ ///
+ /// Get order detail with allocation status.
+ ///
+ [HttpGet("orders/{orderNumber}")]
+ public async Task> GetOrder(
+ string orderNumber,
+ [FromQuery] int warehouseId = 3)
+ {
+ var order = await _inventoryAllocationService.GetOrderDetailAsync(orderNumber, warehouseId);
+
+ if (order == null)
+ return NotFound($"Order {orderNumber} not found");
+
+ return Ok(order);
+ }
+
+ ///
+ /// Get line items for order with allocation details.
+ ///
+ [HttpGet("orders/{orderNumber}/lines")]
+ public async Task>> GetOrderLines(
+ string orderNumber,
+ [FromQuery] int warehouseId = 3)
+ {
+ var lines = await _inventoryAllocationService.GetOrderLineItemsAsync(orderNumber, warehouseId);
+ return Ok(lines);
+ }
+
+ ///
+ /// Check order eligibility for allocation.
+ ///
+ [HttpGet("orders/{orderNumber}/eligibility")]
+ public async Task> CheckEligibility(
+ string orderNumber,
+ [FromQuery] int warehouseId = 3)
+ {
+ var result = await _inventoryAllocationService.CheckEligibilityAsync(orderNumber, warehouseId);
+ return Ok(result);
+ }
+
+ ///
+ /// Allocate inventory to order.
+ ///
+ [HttpPost("orders/{orderNumber}/allocate")]
+ public async Task> AllocateOrder(
+ string orderNumber,
+ [FromBody] AllocateOrderRequest request,
+ [FromQuery] int warehouseId = 3)
+ {
+ if (string.IsNullOrEmpty(request.Username))
+ return BadRequest("Username is required");
+
+ // Check permission
+ if (!await _inventoryAllocationService.HasAllocatorPermissionAsync(request.Username))
+ {
+ return Forbid($"{request.Username} does not have allocator permission (674)");
+ }
+
+ var result = await _inventoryAllocationService.AllocateOrderAsync(orderNumber, request.Username, warehouseId);
+
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+
+ ///
+ /// Allocate inventory to specific line item.
+ ///
+ [HttpPost("orders/{orderNumber}/lines/{productCode}/allocate")]
+ public async Task> AllocateLineItem(
+ string orderNumber,
+ string productCode,
+ [FromBody] AllocateOrderRequest request,
+ [FromQuery] decimal quantity,
+ [FromQuery] int warehouseId = 3)
+ {
+ if (string.IsNullOrEmpty(request.Username))
+ return BadRequest("Username is required");
+
+ if (!await _inventoryAllocationService.HasAllocatorPermissionAsync(request.Username))
+ {
+ return Forbid($"{request.Username} does not have allocator permission (674)");
+ }
+
+ var result = await _inventoryAllocationService.AllocateLineItemAsync(
+ orderNumber, productCode, quantity, request.Username, warehouseId);
+
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+
+ ///
+ /// Deallocate inventory from order.
+ ///
+ [HttpPost("orders/{orderNumber}/deallocate")]
+ public async Task DeallocateOrder(
+ string orderNumber,
+ [FromBody] DeallocateOrderRequest request,
+ [FromQuery] int warehouseId = 3)
+ {
+ if (string.IsNullOrEmpty(request.Username))
+ return BadRequest("Username is required");
+
+ // Check permission
+ if (!await _inventoryAllocationService.HasDeallocatorPermissionAsync(request.Username))
+ {
+ return Forbid($"{request.Username} does not have deallocator permission (681)");
+ }
+
+ var success = await _inventoryAllocationService.DeallocateOrderAsync(orderNumber, request.Username, warehouseId);
+
+ if (!success)
+ return BadRequest("Failed to deallocate order");
+
+ return Ok(new { Message = "Order deallocated successfully" });
+ }
+
+ ///
+ /// Deallocate specific line item.
+ ///
+ [HttpPost("orders/{orderNumber}/lines/{productCode}/deallocate")]
+ public async Task DeallocateLineItem(
+ string orderNumber,
+ string productCode,
+ [FromBody] DeallocateOrderRequest request,
+ [FromQuery] int warehouseId = 3)
+ {
+ if (string.IsNullOrEmpty(request.Username))
+ return BadRequest("Username is required");
+
+ if (!await _inventoryAllocationService.HasDeallocatorPermissionAsync(request.Username))
+ {
+ return Forbid($"{request.Username} does not have deallocator permission (681)");
+ }
+
+ var success = await _inventoryAllocationService.DeallocateLineItemAsync(
+ orderNumber, productCode, request.Username, warehouseId);
+
+ if (!success)
+ return BadRequest("Failed to deallocate line item");
+
+ return Ok(new { Message = "Line item deallocated successfully" });
+ }
+
+ ///
+ /// Get allocation status for order.
+ ///
+ [HttpGet("orders/{orderNumber}/status")]
+ public async Task> GetOrderStatus(
+ string orderNumber,
+ [FromQuery] int warehouseId = 3)
+ {
+ var status = await _inventoryAllocationService.GetOrderStatusAsync(orderNumber, warehouseId);
+ return Ok(new { Status = status, StatusText = InventoryAllocationStatusColorService.GetStatusText(status) });
+ }
+
+ ///
+ /// Get available inventory for product.
+ ///
+ [HttpGet("inventory/{productCode}")]
+ public async Task>> GetAvailableInventory(
+ string productCode,
+ [FromQuery] int warehouseId = 3)
+ {
+ var inventory = await _inventoryAllocationService.GetAvailableInventoryAsync(productCode, warehouseId);
+ return Ok(inventory);
+ }
+
+ ///
+ /// Acquire lock for order.
+ ///
+ [HttpPost("locks/{orderNumber}/acquire")]
+ public async Task> AcquireLock(
+ string orderNumber,
+ [FromBody] LockRequest request,
+ [FromQuery] int warehouseId = 3)
+ {
+ if (string.IsNullOrEmpty(request.Username))
+ return BadRequest("Username is required");
+
+ var result = await _lockService.AcquireLockAsync(orderNumber, request.Username, warehouseId);
+ return Ok(result);
+ }
+
+ ///
+ /// Release lock for order.
+ ///
+ [HttpPost("locks/{orderNumber}/release")]
+ public async Task ReleaseLock(
+ string orderNumber,
+ [FromBody] LockRequest request,
+ [FromQuery] int warehouseId = 3)
+ {
+ if (string.IsNullOrEmpty(request.Username))
+ return BadRequest("Username is required");
+
+ var success = await _lockService.ReleaseLockAsync(orderNumber, request.Username, warehouseId);
+
+ if (!success)
+ return BadRequest("Failed to release lock - may be owned by another user");
+
+ return Ok(new { Message = "Lock released successfully" });
+ }
+
+ ///
+ /// Get lock status for order.
+ ///
+ [HttpGet("locks/{orderNumber}")]
+ public async Task> GetLock(
+ string orderNumber,
+ [FromQuery] int warehouseId = 3)
+ {
+ var lockInfo = await _lockService.GetLockAsync(orderNumber, warehouseId);
+
+ if (lockInfo == null)
+ return Ok(new { IsLocked = false });
+
+ return Ok(new { IsLocked = true, Lock = lockInfo });
+ }
+
+ ///
+ /// Release all locks for user.
+ ///
+ [HttpPost("locks/release-all")]
+ public async Task ReleaseAllUserLocks([FromBody] LockRequest request)
+ {
+ if (string.IsNullOrEmpty(request.Username))
+ return BadRequest("Username is required");
+
+ await _lockService.ReleaseAllUserLocksAsync(request.Username);
+ return Ok(new { Message = "All locks released successfully" });
+ }
+
+ ///
+ /// Release expired locks (maintenance endpoint).
+ ///
+ [HttpPost("locks/release-expired")]
+ public async Task ReleaseExpiredLocks()
+ {
+ var count = await _lockService.ReleaseExpiredLocksAsync();
+ return Ok(new { Message = $"Released {count} expired locks" });
+ }
+}
+
+///
+/// Request model for lock operations.
+///
+public class LockRequest
+{
+ public string Username { get; set; } = string.Empty;
+}
diff --git a/Controllers/InventoryPickingController.cs b/Controllers/InventoryPickingController.cs
new file mode 100644
index 0000000..e068bc8
--- /dev/null
+++ b/Controllers/InventoryPickingController.cs
@@ -0,0 +1,328 @@
+using DotNetWebApp.Services.InventoryPicking;
+using DotNetWebApp.Services.InventoryPicking.Models;
+using Microsoft.AspNetCore.Mvc;
+
+namespace DotNetWebApp.Controllers;
+
+///
+/// API controller for InventoryPicking workflow operations.
+///
+[ApiController]
+[Route("api/[controller]")]
+public class InventoryPickingController : ControllerBase
+{
+ private readonly IInventoryPickingService _inventoryPickingService;
+ private readonly ILogger _logger;
+
+ public InventoryPickingController(
+ IInventoryPickingService inventoryPickingService,
+ ILogger logger)
+ {
+ _inventoryPickingService = inventoryPickingService;
+ _logger = logger;
+ }
+
+ ///
+ /// GET /api/inventorypicking/orders?warehouseId=X&startDate=Y&endDate=Z
+ /// List InventoryPicking orders for a warehouse.
+ ///
+ [HttpGet("orders")]
+ public async Task>> ListOrders(
+ [FromQuery] int warehouseId,
+ [FromQuery] DateTime? startDate = null,
+ [FromQuery] DateTime? endDate = null)
+ {
+ try
+ {
+ var orders = await _inventoryPickingService.ListOrdersAsync(warehouseId, startDate, endDate);
+ return Ok(orders);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to list InventoryPicking orders");
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// GET /api/inventorypicking/orders/{orderNumber}?warehouseId=X
+ /// Get a single InventoryPicking order.
+ ///
+ [HttpGet("orders/{orderNumber}")]
+ public async Task> GetOrder(
+ string orderNumber,
+ [FromQuery] int warehouseId)
+ {
+ try
+ {
+ var order = await _inventoryPickingService.GetOrderAsync(orderNumber, warehouseId);
+ if (order == null)
+ return NotFound(new { error = $"Order {orderNumber} not found" });
+
+ return Ok(order);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to get InventoryPicking order {OrderNumber}", orderNumber);
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// POST /api/inventorypicking/orders/{orderNumber}/picker
+ /// Assign a picker to an order.
+ ///
+ [HttpPost("orders/{orderNumber}/picker")]
+ public async Task> AssignPicker(
+ string orderNumber,
+ [FromQuery] int warehouseId,
+ [FromBody] AssignPickerRequest request)
+ {
+ request.OrderNumber = orderNumber;
+
+ try
+ {
+ var result = await _inventoryPickingService.AssignPickerAsync(request, warehouseId);
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to assign picker to order {OrderNumber}", orderNumber);
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// POST /api/inventorypicking/orders/{orderNumber}/auditor
+ /// Assign an auditor to an order.
+ ///
+ [HttpPost("orders/{orderNumber}/auditor")]
+ public async Task> AssignAuditor(
+ string orderNumber,
+ [FromQuery] int warehouseId,
+ [FromBody] AssignAuditorRequest request)
+ {
+ request.OrderNumber = orderNumber;
+
+ try
+ {
+ var result = await _inventoryPickingService.AssignAuditorAsync(request, warehouseId);
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to assign auditor to order {OrderNumber}", orderNumber);
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// POST /api/inventorypicking/orders/{orderNumber}/assembler
+ /// Assign an assembler to an order.
+ ///
+ [HttpPost("orders/{orderNumber}/assembler")]
+ public async Task> AssignAssembler(
+ string orderNumber,
+ [FromQuery] int warehouseId,
+ [FromBody] AssignAssemblerRequest request)
+ {
+ request.OrderNumber = orderNumber;
+
+ try
+ {
+ var result = await _inventoryPickingService.AssignAssemblerAsync(request, warehouseId);
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to assign assembler to order {OrderNumber}", orderNumber);
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// POST /api/inventorypicking/orders/{orderNumber}/clockout
+ /// Clock out a worker.
+ ///
+ [HttpPost("orders/{orderNumber}/clockout")]
+ public async Task> ClockOut(
+ string orderNumber,
+ [FromQuery] int warehouseId,
+ [FromBody] ClockOutRequest request)
+ {
+ request.OrderNumber = orderNumber;
+
+ try
+ {
+ var result = await _inventoryPickingService.ClockOutAsync(request, warehouseId);
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to clock out worker for order {OrderNumber}", orderNumber);
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// PUT /api/inventorypicking/orders/{orderNumber}
+ /// Update order details (allocator, team leader, notes, status).
+ ///
+ [HttpPut("orders/{orderNumber}")]
+ public async Task> UpdateOrder(
+ string orderNumber,
+ [FromQuery] int warehouseId,
+ [FromBody] UpdateOrderRequest request)
+ {
+ request.OrderNumber = orderNumber;
+
+ try
+ {
+ var result = await _inventoryPickingService.UpdateOrderAsync(request, warehouseId);
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to update order {OrderNumber}", orderNumber);
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// GET /api/inventorypicking/operators?warehouseId=X
+ /// Get available operators for a warehouse.
+ ///
+ [HttpGet("operators")]
+ public async Task>> GetOperators(
+ [FromQuery] int warehouseId)
+ {
+ try
+ {
+ var operators = await _inventoryPickingService.GetAvailableOperatorsAsync(warehouseId);
+ return Ok(operators);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to get operators");
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// GET /api/inventorypicking/allocators
+ /// Get available allocators.
+ ///
+ [HttpGet("allocators")]
+ public async Task>> GetAllocators()
+ {
+ try
+ {
+ var allocators = await _inventoryPickingService.GetAvailableAllocatorsAsync();
+ return Ok(allocators);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to get allocators");
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// GET /api/inventorypicking/teamleaders
+ /// Get available team leaders.
+ ///
+ [HttpGet("teamleaders")]
+ public async Task>> GetTeamLeaders()
+ {
+ try
+ {
+ var teamLeaders = await _inventoryPickingService.GetAvailableTeamLeadersAsync();
+ return Ok(teamLeaders);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to get team leaders");
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// POST /api/inventorypicking/batch/allocator
+ /// Batch update allocator for multiple orders.
+ ///
+ [HttpPost("batch/allocator")]
+ public async Task> BatchUpdateAllocator(
+ [FromQuery] int warehouseId,
+ [FromBody] BatchUpdateRequest request)
+ {
+ try
+ {
+ var result = await _inventoryPickingService.BatchUpdateAllocatorAsync(
+ request.OrderNumbers,
+ request.Value,
+ warehouseId);
+
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to batch update allocator");
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+
+ ///
+ /// POST /api/inventorypicking/batch/teamleader
+ /// Batch update team leader for multiple orders.
+ ///
+ [HttpPost("batch/teamleader")]
+ public async Task> BatchUpdateTeamLeader(
+ [FromQuery] int warehouseId,
+ [FromBody] BatchUpdateRequest request)
+ {
+ try
+ {
+ var result = await _inventoryPickingService.BatchUpdateTeamLeaderAsync(
+ request.OrderNumbers,
+ request.Value,
+ warehouseId);
+
+ if (!result.Success)
+ return BadRequest(result);
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to batch update team leader");
+ return StatusCode(500, new { error = ex.Message });
+ }
+ }
+}
+
+///
+/// Request for batch update operations.
+///
+public class BatchUpdateRequest
+{
+ public IEnumerable OrderNumbers { get; set; } = [];
+ public string Value { get; set; } = string.Empty;
+}
diff --git a/Controllers/WAMSController.cs b/Controllers/WAMSController.cs
new file mode 100644
index 0000000..79b376d
--- /dev/null
+++ b/Controllers/WAMSController.cs
@@ -0,0 +1,527 @@
+using DotNetWebApp.Services.WAMS;
+using DotNetWebApp.Services.WAMS.Models;
+using Microsoft.AspNetCore.Mvc;
+
+namespace DotNetWebApp.Controllers;
+
+///
+/// REST API controller for WAMS (Web App Management System) operations
+/// Provides endpoints for order management, status workflow, and personnel assignment
+///
+[ApiController]
+[Route("api/wams")]
+[Produces("application/json")]
+public class WAMSController : ControllerBase
+{
+ private readonly IWAMSService _wamsService;
+ private readonly ILogger _logger;
+
+ public WAMSController(
+ IWAMSService wamsService,
+ ILogger logger)
+ {
+ _wamsService = wamsService;
+ _logger = logger;
+ }
+
+ #region Order Retrieval
+
+ ///
+ /// Lists all orders for a specific warehouse and date
+ ///
+ /// Warehouse ID (3=Chicago, 92=NYC)
+ /// Date to filter orders (defaults to today)
+ /// Optional status filter (e.g., "Check In", "Loading")
+ /// List of WAMS orders
+ /// Returns the list of orders
+ /// If parameters are invalid
+ /// If an error occurs
+ [HttpGet("orders")]
+ [ProducesResponseType(typeof(IEnumerable), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task>> ListOrders(
+ [FromQuery] int warehouseId,
+ [FromQuery] DateTime? date = null,
+ [FromQuery] string? statusFilter = null)
+ {
+ try
+ {
+ if (warehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ var filterDate = date ?? DateTime.Today;
+ var orders = await _wamsService.ListOrdersAsync(warehouseId, filterDate, statusFilter);
+
+ _logger.LogInformation(
+ "Listed {Count} orders for warehouse {WarehouseId} on date {Date}",
+ orders.Count(), warehouseId, filterDate);
+
+ return Ok(orders);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error listing orders for warehouse {WarehouseId}", warehouseId);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ ///
+ /// Gets a single order by order number
+ ///
+ /// Order number
+ /// Warehouse ID
+ /// The order if found
+ /// Returns the order
+ /// If order not found
+ /// If parameters are invalid
+ /// If an error occurs
+ [HttpGet("orders/{orderNumber}")]
+ [ProducesResponseType(typeof(WAMSOrder), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status404NotFound)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> GetOrder(
+ [FromRoute] long orderNumber,
+ [FromQuery] int warehouseId)
+ {
+ try
+ {
+ if (orderNumber <= 0)
+ {
+ return BadRequest("Order number must be greater than 0");
+ }
+
+ if (warehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ var order = await _wamsService.GetOrderAsync(orderNumber, warehouseId);
+
+ if (order == null)
+ {
+ return NotFound($"Order {orderNumber} not found in warehouse {warehouseId}");
+ }
+
+ return Ok(order);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error getting order {OrderNumber}", orderNumber);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ #endregion
+
+ #region Status Management
+
+ ///
+ /// Changes the status of a single order
+ ///
+ /// Order number
+ /// Status change request with new status and warehouse ID
+ /// Status change result
+ /// Status changed successfully
+ /// If transition is invalid
+ /// If order not found
+ /// If an error occurs
+ [HttpPost("orders/{orderNumber}/status")]
+ [ProducesResponseType(typeof(StatusChangeResult), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status404NotFound)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> ChangeStatus(
+ [FromRoute] long orderNumber,
+ [FromBody] ChangeStatusRequest request)
+ {
+ try
+ {
+ if (orderNumber <= 0)
+ {
+ return BadRequest("Order number must be greater than 0");
+ }
+
+ if (request.WarehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ // Override order number from route
+ request.OrderNumber = orderNumber;
+
+ var result = await _wamsService.ChangeStatusAsync(
+ request.OrderNumber,
+ request.NewStatus,
+ request.WarehouseId);
+
+ if (!result.Success)
+ {
+ if (result.Message.Contains("not found", StringComparison.OrdinalIgnoreCase))
+ {
+ return NotFound(result);
+ }
+ return BadRequest(result);
+ }
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error changing status for order {OrderNumber}", orderNumber);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ ///
+ /// Batch changes status for all orders with the same AppID
+ ///
+ /// Batch status change request with AppID, new status, and warehouse ID
+ /// Status change result with count of updated orders
+ /// Status changed successfully for all orders
+ /// If transition is invalid or no orders found
+ /// If an error occurs
+ [HttpPost("orders/batch/status")]
+ [ProducesResponseType(typeof(StatusChangeResult), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> BatchChangeStatusByAppID(
+ [FromBody] BatchStatusByAppIDRequest request)
+ {
+ try
+ {
+ if (string.IsNullOrWhiteSpace(request.AppId))
+ {
+ return BadRequest("AppID cannot be empty");
+ }
+
+ if (request.WarehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ var result = await _wamsService.BatchChangeStatusByAppIDAsync(
+ request.AppId,
+ request.NewStatus,
+ request.WarehouseId);
+
+ if (!result.Success)
+ {
+ return BadRequest(result);
+ }
+
+ return Ok(result);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error batch changing status for AppID {AppId}", request.AppId);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ ///
+ /// Clears check-in date for an order (reverts to N/A status)
+ ///
+ /// Order number
+ /// Warehouse ID
+ /// Success status
+ /// Check-in cleared successfully
+ /// If order not found
+ /// If parameters are invalid
+ /// If an error occurs
+ [HttpPost("orders/{orderNumber}/checkin/clear")]
+ [ProducesResponseType(typeof(bool), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status404NotFound)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> ClearCheckIn(
+ [FromRoute] long orderNumber,
+ [FromQuery] int warehouseId)
+ {
+ try
+ {
+ if (orderNumber <= 0)
+ {
+ return BadRequest("Order number must be greater than 0");
+ }
+
+ if (warehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ var success = await _wamsService.ClearCheckInAsync(orderNumber, warehouseId);
+
+ if (!success)
+ {
+ return NotFound($"Order {orderNumber} not found or could not be updated");
+ }
+
+ return Ok(success);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error clearing check-in for order {OrderNumber}", orderNumber);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ #endregion
+
+ #region Delete/Undelete
+
+ ///
+ /// Soft deletes an order by adding 10 to type (e.g., SO type 1 becomes Deleted_SO type 11)
+ /// Requires status = N/A and security level <= 2
+ ///
+ /// Order number
+ /// Delete request with security level and warehouse ID
+ /// Success status
+ /// Order deleted successfully
+ /// If deletion constraints not met
+ /// If insufficient security level
+ /// If order not found
+ /// If an error occurs
+ [HttpDelete("orders/{orderNumber}")]
+ [ProducesResponseType(typeof(bool), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status403Forbidden)]
+ [ProducesResponseType(StatusCodes.Status404NotFound)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> DeleteOrder(
+ [FromRoute] long orderNumber,
+ [FromQuery] int securityLevel,
+ [FromQuery] int warehouseId)
+ {
+ try
+ {
+ if (orderNumber <= 0)
+ {
+ return BadRequest("Order number must be greater than 0");
+ }
+
+ if (warehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ if (securityLevel > 2)
+ {
+ return StatusCode(403, "Insufficient security level. Only Admin/User roles can delete orders.");
+ }
+
+ var success = await _wamsService.DeleteOrderAsync(orderNumber, securityLevel, warehouseId);
+
+ if (!success)
+ {
+ return BadRequest($"Order {orderNumber} cannot be deleted. Check status is N/A and order is not already deleted.");
+ }
+
+ return Ok(success);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error deleting order {OrderNumber}", orderNumber);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ ///
+ /// Undeletes an order by subtracting 10 from type (e.g., Deleted_SO type 11 becomes SO type 1)
+ ///
+ /// Order number
+ /// Warehouse ID
+ /// Success status
+ /// Order undeleted successfully
+ /// If order is not deleted
+ /// If order not found
+ /// If an error occurs
+ [HttpPost("orders/{orderNumber}/undelete")]
+ [ProducesResponseType(typeof(bool), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status404NotFound)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> UndeleteOrder(
+ [FromRoute] long orderNumber,
+ [FromQuery] int warehouseId)
+ {
+ try
+ {
+ if (orderNumber <= 0)
+ {
+ return BadRequest("Order number must be greater than 0");
+ }
+
+ if (warehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ var success = await _wamsService.UndeleteOrderAsync(orderNumber, warehouseId);
+
+ if (!success)
+ {
+ return BadRequest($"Order {orderNumber} cannot be undeleted. Order may not be deleted.");
+ }
+
+ return Ok(success);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error undeleting order {OrderNumber}", orderNumber);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ #endregion
+
+ #region Personnel Management
+
+ ///
+ /// Updates forklift operator for a single order
+ ///
+ /// Order number
+ /// Update request with operator username and warehouse ID
+ /// Success status
+ /// Operator updated successfully
+ /// If order not found
+ /// If parameters are invalid
+ /// If an error occurs
+ [HttpPut("orders/{orderNumber}/operator")]
+ [ProducesResponseType(typeof(bool), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status404NotFound)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> UpdateForkliftOperator(
+ [FromRoute] long orderNumber,
+ [FromBody] UpdateOperatorRequest request)
+ {
+ try
+ {
+ if (orderNumber <= 0)
+ {
+ return BadRequest("Order number must be greater than 0");
+ }
+
+ if (string.IsNullOrWhiteSpace(request.OperatorUsername))
+ {
+ return BadRequest("Operator username cannot be empty");
+ }
+
+ if (request.WarehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ // Override order number from route
+ request.OrderNumber = orderNumber;
+
+ var success = await _wamsService.UpdateForkliftOperatorAsync(
+ request.OrderNumber,
+ request.OperatorUsername,
+ request.WarehouseId);
+
+ if (!success)
+ {
+ return NotFound($"Order {orderNumber} not found or could not be updated");
+ }
+
+ return Ok(success);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error updating operator for order {OrderNumber}", orderNumber);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ ///
+ /// Batch updates forklift operator for all orders with the same AppID
+ ///
+ /// Batch update request with AppID, operator username, and warehouse ID
+ /// Success status
+ /// Operator updated successfully for all orders
+ /// If no orders found or parameters invalid
+ /// If an error occurs
+ [HttpPut("orders/batch/operator")]
+ [ProducesResponseType(typeof(bool), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task> BatchUpdateForkliftByAppID(
+ [FromBody] BatchUpdateOperatorRequest request)
+ {
+ try
+ {
+ if (string.IsNullOrWhiteSpace(request.AppId))
+ {
+ return BadRequest("AppID cannot be empty");
+ }
+
+ if (string.IsNullOrWhiteSpace(request.OperatorUsername))
+ {
+ return BadRequest("Operator username cannot be empty");
+ }
+
+ if (request.WarehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ var success = await _wamsService.BatchUpdateForkliftByAppIDAsync(
+ request.AppId,
+ request.OperatorUsername,
+ request.WarehouseId);
+
+ if (!success)
+ {
+ return BadRequest($"No orders found with AppID {request.AppId} or could not be updated");
+ }
+
+ return Ok(success);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error batch updating operator for AppID {AppId}", request.AppId);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ ///
+ /// Gets list of available forklift operators for a warehouse
+ /// Filters by d2_d1id=687 for Chicago users
+ ///
+ /// Warehouse ID
+ /// List of operators
+ /// Returns the list of operators
+ /// If warehouse ID is invalid
+ /// If an error occurs
+ [HttpGet("operators")]
+ [ProducesResponseType(typeof(IEnumerable), StatusCodes.Status200OK)]
+ [ProducesResponseType(StatusCodes.Status400BadRequest)]
+ [ProducesResponseType(StatusCodes.Status500InternalServerError)]
+ public async Task>> GetAvailableOperators(
+ [FromQuery] int warehouseId)
+ {
+ try
+ {
+ if (warehouseId <= 0)
+ {
+ return BadRequest("Warehouse ID must be greater than 0");
+ }
+
+ var operators = await _wamsService.GetAvailableOperatorsAsync(warehouseId);
+
+ return Ok(operators);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Error getting available operators for warehouse {WarehouseId}", warehouseId);
+ return StatusCode(500, $"Internal server error: {ex.Message}");
+ }
+ }
+
+ #endregion
+}
diff --git a/DOCKER.md b/DOCKER.md
new file mode 100644
index 0000000..07a8b7e
--- /dev/null
+++ b/DOCKER.md
@@ -0,0 +1,191 @@
+# Docker Database Setup Guide
+
+## Overview
+
+This project uses **Docker SQL Server** for local development. All database operations (migrations, seeding, CRUD) happen through the containerized SQL Server.
+
+## Quick Connection Reference
+
+**Container Details:**
+```
+Name: sqlserver-dev
+Port: 1433 (Docker → Host)
+User: sa
+Database: DotNetWebAppDb
+Password: Set via SA_PASSWORD environment variable
+```
+
+**Connection String:**
+```
+Server=localhost,1433;Database=DotNetWebAppDb;User Id=sa;Password=;TrustServerCertificate=True;
+```
+
+## Essential Commands
+
+| Task | Command |
+|------|---------|
+| **Create container** | `export SA_PASSWORD="YourStrongPassword123!" && make db-create` |
+| **Start container** | `make db-start` |
+| **Stop container** | `make db-stop` |
+| **View logs** | `make db-logs` |
+| **Apply migrations** | `make db-migrate` or `make migrate` |
+| **Seed data** | `make db-seed` or `make seed` |
+| **Drop database** | `make db-drop` (keeps container running) |
+| **Destroy container** | `make db-destroy` (full cleanup) |
+| **Interactive SQL shell** | `docker exec -it sqlserver-dev /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "$SA_PASSWORD"` |
+
+## Initial Setup (One-Time)
+
+### 1. Set the SA Password
+
+```bash
+export SA_PASSWORD="YourStrongPassword123!"
+```
+
+**Password requirements:** min 8 chars, uppercase, lowercase, digits, symbols.
+
+### 2. Create the Docker Container
+
+```bash
+make db-create
+```
+
+This pulls the SQL Server 2022 image, starts the container, and waits 30s for startup.
+
+### 3. Store Password in User Secrets (Optional)
+
+```bash
+./setup.sh
+```
+
+Optionally provides User Secrets storage so you don't re-export `SA_PASSWORD` in every session.
+
+## Full Development Workflow
+
+```bash
+# 1. Set password (or retrieve from User Secrets if already stored)
+export SA_PASSWORD="YourStrongPassword123!"
+
+# 2. Create container (one-time)
+make db-create
+
+# 3. Generate models from DDL
+make run-ddl-pipeline
+
+# 4. Apply migrations to database
+make db-migrate
+
+# 5. Seed sample data
+make db-seed
+
+# 6. Start dev server with hot reload
+make dev
+```
+
+## One-Line Database Connection
+
+Open an interactive SQL shell inside the container:
+
+```bash
+export SA_PASSWORD="YourStrongPassword123!"
+docker exec -it sqlserver-dev /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "$SA_PASSWORD"
+```
+
+Type `GO` to execute statements, `EXIT` to quit.
+
+## Verify Database Connection
+
+```bash
+# Check if container is running
+docker ps | grep sqlserver-dev
+
+# View container logs
+make db-logs
+
+# Check if database exists
+export SA_PASSWORD="YourStrongPassword123!"
+docker exec -i sqlserver-dev /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "$SA_PASSWORD" \
+ -Q "SELECT name FROM sys.databases WHERE name = 'DotNetWebAppDb';"
+```
+
+## Troubleshooting
+
+### "Cannot connect to database"
+
+- Verify container is running: `docker ps | grep sqlserver-dev`
+- Verify `SA_PASSWORD` is exported: `echo $SA_PASSWORD`
+- Check container logs: `make db-logs`
+- If stuck, destroy and recreate: `make db-destroy && make db-create`
+
+### "SA_PASSWORD required" error in make commands
+
+- Export in current shell: `export SA_PASSWORD="YourStrongPassword123!"`
+- Or use setup.sh to store in User Secrets: `./setup.sh`
+
+### Container exits immediately
+
+- Check logs for startup errors: `docker logs sqlserver-dev`
+- Verify SA_PASSWORD meets requirements (min 8 chars, mixed case, digits, symbols)
+- Try recreating: `make db-destroy && make db-create`
+
+### Database connection from .NET app
+
+- The app reads from User Secrets or environment variables during development
+- See `SECRETS.md` for details
+- Connection string is automatically built with `localhost,1433` and stored password
+
+## Migration Patterns
+
+### Standard EF Core Migration
+
+```bash
+make migrate
+```
+
+Use for most development scenarios. Requires SQL Server running and valid connection string.
+
+### Docker-Based Idempotent Migration
+
+```bash
+make db-migrate
+```
+
+Safe to run against existing databases with tables. Automatically initializes schema from `sql/schema.sql` first.
+
+**Requires:** Docker container running, `SA_PASSWORD` environment variable set
+
+## Database Reset Pipeline
+
+### Standard Reset Flow
+
+```bash
+make db-drop && make run-ddl-pipeline && make migrate && make seed
+```
+
+Use this for clean slate after schema.sql changes.
+
+### End-to-End Testing
+
+```bash
+./verify.sh
+```
+
+Use instead of running commands individually. Handles the full pipeline, starts the dev server, and runs comprehensive CRUD tests with automatic cleanup.
+
+**Verify.sh stages:**
+1. `make check` - Build validation
+2. `make test` - Unit tests (192+ tests)
+3. `make db-drop` - Database reset
+4. `make run-ddl-pipeline` - Regenerate models
+5. `make migrate` - Apply migrations
+6. `make seed` - Populate data
+7. `make dev` - Start dev server
+8. Run 14 integration tests
+
+## Secrets Management
+
+- Project uses **User Secrets** for local development
+- Connection strings stored in `~/.microsoft/usersecrets/`, never in git
+- `setup.sh` script automatically configures User Secrets when setting up SQL Server
+- Manual management: `dotnet user-secrets list`, `dotnet user-secrets set`, etc.
+- See `SECRETS.md` for full details
diff --git a/Data/AppDbContext.cs b/Data/AppDbContext.cs
index 827f94d..aec1b4f 100644
--- a/Data/AppDbContext.cs
+++ b/Data/AppDbContext.cs
@@ -1,6 +1,7 @@
using DotNetWebApp.Data.Tenancy;
using DotNetWebApp.Models;
using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.Options;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Reflection;
@@ -9,11 +10,15 @@ namespace DotNetWebApp.Data
{
public class AppDbContext : DbContext
{
+ private readonly DatabaseMappingOptions _mappingOptions;
+
public AppDbContext(
DbContextOptions options,
- ITenantSchemaAccessor tenantSchemaAccessor) : base(options)
+ ITenantSchemaAccessor tenantSchemaAccessor,
+ IOptions mappingOptions) : base(options)
{
Schema = tenantSchemaAccessor.Schema;
+ _mappingOptions = mappingOptions.Value;
}
public string Schema { get; }
@@ -39,38 +44,46 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
// Extract schema from [Table] attribute if present
var tableAttr = type.GetCustomAttribute();
- var tableName = ToPlural(type.Name);
+
+ // Use table name from [Table] attribute if available, otherwise derive from class name
+ // NOTE: The ModelGenerator generates [Table] names with PascalCase (e.g., "Dmbill"),
+ // but schema.sql uses lowercase (e.g., "dmbill"). Convert to lowercase for consistency.
+ var tableName = (tableAttr?.Name ?? type.Name).ToLower();
var tableSchema = tableAttr?.Schema;
// Apply table name and schema (schema takes precedence from attribute)
- if (!string.IsNullOrWhiteSpace(tableSchema))
+ // Map database names to actual schema names via configuration
+ var effectiveSchema = _mappingOptions.GetEffectiveSchema(tableSchema);
+
+ if (!string.IsNullOrWhiteSpace(effectiveSchema))
{
- entity.ToTable(tableName, tableSchema);
+ entity.ToTable(tableName, effectiveSchema);
}
else
{
entity.ToTable(tableName);
}
- }
- }
- private static string ToPlural(string name)
- {
- if (string.IsNullOrWhiteSpace(name))
- {
- return name;
- }
+ // Configure keyless entities (tables with no primary key defined)
+ var hasKeyProperties = type.GetProperties()
+ .Any(p => p.GetCustomAttribute() != null);
- if (name.EndsWith("y", StringComparison.OrdinalIgnoreCase) && name.Length > 1)
- {
- var beforeY = name[name.Length - 2];
- if (!"aeiou".Contains(char.ToLowerInvariant(beforeY)))
+ if (!hasKeyProperties && type.GetCustomAttribute() == null)
{
- return name[..^1] + "ies";
+ entity.HasNoKey();
}
}
- return name.EndsWith("s", StringComparison.OrdinalIgnoreCase) ? name : $"{name}s";
+ // Configure webapp_lock identity column (gl_index)
+ // The table has a composite primary key with an identity column, which requires explicit configuration
+ var webappLockType = entityTypes.FirstOrDefault(t => t.Name == "Webapp_lock");
+ if (webappLockType != null)
+ {
+ modelBuilder.Entity(webappLockType)
+ .Property("gl_index")
+ .ValueGeneratedOnAdd();
+ }
}
+
}
}
diff --git a/Data/AppDbContextFactory.cs b/Data/AppDbContextFactory.cs
index 76a19e4..4a7029c 100644
--- a/Data/AppDbContextFactory.cs
+++ b/Data/AppDbContextFactory.cs
@@ -2,6 +2,7 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design;
using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.Options;
namespace DotNetWebApp.Data
{
@@ -16,12 +17,20 @@ public AppDbContext CreateDbContext(string[] args)
.Build();
var optionsBuilder = new DbContextOptionsBuilder();
- var connectionString = configuration.GetConnectionString("DefaultConnection")
+ var connectionString = configuration.GetConnectionString("PrimaryDatabase")
+ ?? configuration.GetConnectionString("DefaultConnection")
?? "Server=localhost;Database=DotNetWebAppDb;Trusted_Connection=true;Encrypt=False;";
optionsBuilder.UseSqlServer(connectionString);
- return new AppDbContext(optionsBuilder.Options, new DesignTimeSchemaAccessor());
+ // Load DatabaseMappingOptions from configuration
+ var mappingOptions = new DatabaseMappingOptions();
+ configuration.GetSection(DatabaseMappingOptions.SectionName).Bind(mappingOptions);
+
+ return new AppDbContext(
+ optionsBuilder.Options,
+ new DesignTimeSchemaAccessor(),
+ Options.Create(mappingOptions));
}
private sealed class DesignTimeSchemaAccessor : ITenantSchemaAccessor
diff --git a/Data/Dapper/SecondaryDapperQueryService.cs b/Data/Dapper/SecondaryDapperQueryService.cs
new file mode 100644
index 0000000..e638c6c
--- /dev/null
+++ b/Data/Dapper/SecondaryDapperQueryService.cs
@@ -0,0 +1,205 @@
+using System.Data;
+using Dapper;
+using DotNetWebApp.Constants;
+using Microsoft.Data.SqlClient;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.Logging;
+
+namespace DotNetWebApp.Data.Dapper;
+
+///
+/// Read-only Dapper service for the secondary database (WEBAPPMisc).
+/// Uses SecondaryDbContext's connection for queries to webapp_scheduler, webapp_allocate, etc.
+/// Automatically inherits tenant schema from EF Core context.
+///
+public class SecondaryDapperQueryService : IDapperQueryService
+{
+ private readonly SecondaryDbContext _dbContext;
+ private readonly ILogger _logger;
+
+ ///
+ /// Initializes a new instance of SecondaryDapperQueryService.
+ /// Uses the SecondaryDbContext's connection for all queries.
+ ///
+ /// EF Core secondary database context
+ /// Logger instance
+ public SecondaryDapperQueryService(SecondaryDbContext dbContext, ILogger logger)
+ {
+ _dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext));
+ _logger = logger ?? throw new ArgumentNullException(nameof(logger));
+ }
+
+ ///
+ /// Executes a query and returns multiple results.
+ /// Automatically handles connection state (opens if closed).
+ ///
+ public async Task> QueryAsync(string sql, object? param = null)
+ {
+ // Parameter validation
+ if (string.IsNullOrWhiteSpace(sql))
+ throw new ArgumentException($"[{ErrorIds.QueryInvalidParameter}] SQL query cannot be null or empty", nameof(sql));
+
+ var connection = _dbContext.Database.GetDbConnection();
+
+ try
+ {
+ _logger.LogDebug(
+ "Executing Dapper query on SECONDARY database (Type: {ResultType}): {Sql}",
+ typeof(T).Name,
+ TruncateSql(sql));
+
+ // Connection state may be closed; Dapper will handle opening it
+ return await connection.QueryAsync(sql, param);
+ }
+ catch (SqlException ex)
+ {
+ _logger.LogError(
+ ex,
+ "[{ErrorId}] SQL Server error executing query on SECONDARY database (Type: {ResultType}, ErrorCode: {ErrorCode}): {Sql}",
+ ErrorIds.SqlError,
+ typeof(T).Name,
+ ex.Number,
+ TruncateSql(sql));
+
+ var friendlyMessage = ErrorIds.GetFriendlySqlErrorMessage(ex.Number, ex.Message);
+ throw new InvalidOperationException(
+ $"[{ErrorIds.SqlError}] {friendlyMessage} (Code: {ex.Number})", ex);
+ }
+ catch (OperationCanceledException ex)
+ {
+ _logger.LogError(
+ ex,
+ "[{ErrorId}] Query timeout for type {ResultType} on SECONDARY database",
+ ErrorIds.QueryTimeout,
+ typeof(T).Name);
+
+ throw new InvalidOperationException(
+ $"[{ErrorIds.QueryTimeout}] Query execution timed out. The database is responding slowly. Please try again.", ex);
+ }
+ catch (ArgumentException ex)
+ {
+ // This indicates a bug in SQL or parameter mapping - re-throw unchanged
+ _logger.LogError(
+ ex,
+ "[{ErrorId}] Invalid query or parameters for type {ResultType}: {Sql}",
+ ErrorIds.QueryInvalidParameter,
+ typeof(T).Name,
+ TruncateSql(sql));
+
+ throw;
+ }
+ catch (OutOfMemoryException ex)
+ {
+ // Critical condition - log and re-throw unchanged
+ _logger.LogCritical(
+ ex,
+ "[{ErrorId}] Out of memory executing query - result set may be too large (Type: {ResultType})",
+ ErrorIds.QueryOutOfMemory,
+ typeof(T).Name);
+
+ throw;
+ }
+ finally
+ {
+ // Explicitly close the connection if it was opened
+ // This ensures connection is returned to pool even on error
+ if (connection.State == ConnectionState.Open)
+ {
+ await connection.CloseAsync();
+ }
+ }
+ }
+
+ ///
+ /// Executes a query and returns a single result or null.
+ /// Throws if multiple rows would be returned.
+ ///
+ public async Task QuerySingleAsync(string sql, object? param = null)
+ {
+ // Parameter validation
+ if (string.IsNullOrWhiteSpace(sql))
+ throw new ArgumentException($"[{ErrorIds.QueryInvalidParameter}] SQL query cannot be null or empty", nameof(sql));
+
+ var connection = _dbContext.Database.GetDbConnection();
+
+ try
+ {
+ _logger.LogDebug(
+ "Executing Dapper single query on SECONDARY database (Type: {ResultType}): {Sql}",
+ typeof(T).Name,
+ TruncateSql(sql));
+
+ return await connection.QuerySingleOrDefaultAsync(sql, param);
+ }
+ catch (SqlException ex)
+ {
+ _logger.LogError(
+ ex,
+ "[{ErrorId}] SQL Server error executing single query on SECONDARY database (Type: {ResultType}, ErrorCode: {ErrorCode}): {Sql}",
+ ErrorIds.SqlError,
+ typeof(T).Name,
+ ex.Number,
+ TruncateSql(sql));
+
+ var friendlyMessage = ErrorIds.GetFriendlySqlErrorMessage(ex.Number, ex.Message);
+ throw new InvalidOperationException(
+ $"[{ErrorIds.SqlError}] {friendlyMessage} (Code: {ex.Number})", ex);
+ }
+ catch (OperationCanceledException ex)
+ {
+ _logger.LogError(
+ ex,
+ "[{ErrorId}] Single query timeout for type {ResultType} on SECONDARY database",
+ ErrorIds.QueryTimeout,
+ typeof(T).Name);
+
+ throw new InvalidOperationException(
+ $"[{ErrorIds.QueryTimeout}] Query execution timed out. The database is responding slowly. Please try again.", ex);
+ }
+ catch (ArgumentException ex)
+ {
+ // This indicates a bug in SQL or parameter mapping - re-throw unchanged
+ _logger.LogError(
+ ex,
+ "[{ErrorId}] Invalid query or parameters for type {ResultType}: {Sql}",
+ ErrorIds.QueryInvalidParameter,
+ typeof(T).Name,
+ TruncateSql(sql));
+
+ throw;
+ }
+ catch (OutOfMemoryException ex)
+ {
+ // Critical condition - log and re-throw unchanged
+ _logger.LogCritical(
+ ex,
+ "[{ErrorId}] Out of memory executing single query - result set may be too large (Type: {ResultType})",
+ ErrorIds.QueryOutOfMemory,
+ typeof(T).Name);
+
+ throw;
+ }
+ finally
+ {
+ // Explicitly close the connection if it was opened
+ // This ensures connection is returned to pool even on error
+ if (connection.State == ConnectionState.Open)
+ {
+ await connection.CloseAsync();
+ }
+ }
+ }
+
+ ///
+ /// Truncates long SQL strings for logging.
+ /// Prevents log spam while preserving enough context for debugging.
+ ///
+ private static string TruncateSql(string sql)
+ {
+ const int maxLength = 150;
+ if (string.IsNullOrEmpty(sql))
+ return string.Empty;
+
+ return sql.Length > maxLength ? sql[..maxLength] + "..." : sql;
+ }
+}
diff --git a/Data/DatabaseMappingOptions.cs b/Data/DatabaseMappingOptions.cs
new file mode 100644
index 0000000..5db255d
--- /dev/null
+++ b/Data/DatabaseMappingOptions.cs
@@ -0,0 +1,53 @@
+namespace DotNetWebApp.Data
+{
+ ///
+ /// Configuration options for database and schema mappings.
+ /// Allows database names (like WEBAPP, WEBAPPMisc) to be mapped to actual schema names (like dbo).
+ ///
+ public class DatabaseMappingOptions
+ {
+ public const string SectionName = "DatabaseMapping";
+
+ ///
+ /// Maps schema names from [Table] attributes to actual database schema names.
+ /// Key: Schema name in generated code (e.g., "WEBAPP")
+ /// Value: Actual schema name in database (e.g., "dbo")
+ ///
+ public Dictionary SchemaMappings { get; set; } = new();
+
+ ///
+ /// Namespace pattern to identify entities that belong to the secondary database.
+ /// Entities with this pattern in their namespace will use SecondaryDbContext.
+ ///
+ public string SecondaryNamespacePattern { get; set; } = string.Empty;
+
+ ///
+ /// Gets the effective schema for a given schema name from [Table] attribute.
+ /// Returns the mapped schema if a mapping exists, otherwise returns the original.
+ ///
+ public string GetEffectiveSchema(string? tableSchema)
+ {
+ if (string.IsNullOrWhiteSpace(tableSchema))
+ {
+ return tableSchema ?? string.Empty;
+ }
+
+ return SchemaMappings.TryGetValue(tableSchema, out var mappedSchema)
+ ? mappedSchema
+ : tableSchema;
+ }
+
+ ///
+ /// Determines if an entity type belongs to the secondary database based on namespace.
+ ///
+ public bool IsSecondaryDatabase(Type entityType)
+ {
+ if (string.IsNullOrWhiteSpace(SecondaryNamespacePattern))
+ {
+ return false;
+ }
+
+ return entityType.Namespace != null && entityType.Namespace.Contains(SecondaryNamespacePattern);
+ }
+ }
+}
diff --git a/Data/IDbContextResolver.cs b/Data/IDbContextResolver.cs
new file mode 100644
index 0000000..3a1d576
--- /dev/null
+++ b/Data/IDbContextResolver.cs
@@ -0,0 +1,43 @@
+using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.Options;
+
+namespace DotNetWebApp.Data
+{
+ ///
+ /// Resolves the appropriate DbContext based on entity type.
+ /// Routes entities to SecondaryDbContext based on configured namespace pattern.
+ ///
+ public interface IDbContextResolver
+ {
+ DbContext GetContextForEntity(Type entityType);
+ }
+
+ public class DbContextResolver : IDbContextResolver
+ {
+ private readonly AppDbContext _primaryContext;
+ private readonly SecondaryDbContext _secondaryContext;
+ private readonly DatabaseMappingOptions _mappingOptions;
+
+ public DbContextResolver(
+ AppDbContext primaryContext,
+ SecondaryDbContext secondaryContext,
+ IOptions mappingOptions)
+ {
+ _primaryContext = primaryContext;
+ _secondaryContext = secondaryContext;
+ _mappingOptions = mappingOptions.Value;
+ }
+
+ public DbContext GetContextForEntity(Type entityType)
+ {
+ // Route to SecondaryDbContext based on configured namespace pattern
+ if (_mappingOptions.IsSecondaryDatabase(entityType))
+ {
+ return _secondaryContext;
+ }
+
+ // All other entities use AppDbContext (PrimaryDatabase)
+ return _primaryContext;
+ }
+ }
+}
diff --git a/Data/SecondaryDbContext.cs b/Data/SecondaryDbContext.cs
new file mode 100644
index 0000000..2eee2c5
--- /dev/null
+++ b/Data/SecondaryDbContext.cs
@@ -0,0 +1,67 @@
+using DotNetWebApp.Models;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.Options;
+using System.ComponentModel.DataAnnotations.Schema;
+using System.Linq;
+using System.Reflection;
+
+namespace DotNetWebApp.Data
+{
+ ///
+ /// Secondary DbContext for the secondary database.
+ /// Only registers entities matching the configured SecondaryNamespacePattern.
+ ///
+ public class SecondaryDbContext : DbContext
+ {
+ private readonly DatabaseMappingOptions _mappingOptions;
+
+ public SecondaryDbContext(
+ DbContextOptions options,
+ IOptions mappingOptions) : base(options)
+ {
+ _mappingOptions = mappingOptions.Value;
+ }
+
+ protected override void OnModelCreating(ModelBuilder modelBuilder)
+ {
+ base.OnModelCreating(modelBuilder);
+
+ // Default to dbo schema
+ modelBuilder.HasDefaultSchema("dbo");
+
+ // Only register entities matching the secondary namespace pattern
+ var modelsAssembly = typeof(EntityMetadata).Assembly;
+ var namespacePattern = _mappingOptions.SecondaryNamespacePattern;
+ var entityTypes = modelsAssembly.GetTypes()
+ .Where(t => t.IsClass && t.Namespace != null &&
+ !string.IsNullOrWhiteSpace(namespacePattern) &&
+ t.Namespace.Contains(namespacePattern));
+
+ foreach (var type in entityTypes)
+ {
+ var entity = modelBuilder.Entity(type);
+
+ var tableAttr = type.GetCustomAttribute();
+ var tableName = (tableAttr?.Name ?? type.Name).ToLower();
+
+ // Map schema via configuration
+ var effectiveSchema = _mappingOptions.GetEffectiveSchema(tableAttr?.Schema);
+ if (string.IsNullOrWhiteSpace(effectiveSchema))
+ {
+ effectiveSchema = "dbo";
+ }
+
+ entity.ToTable(tableName, effectiveSchema);
+
+ // Configure keyless entities
+ var hasKeyProperties = type.GetProperties()
+ .Any(p => p.GetCustomAttribute() != null);
+
+ if (!hasKeyProperties && type.GetCustomAttribute() == null)
+ {
+ entity.HasNoKey();
+ }
+ }
+ }
+ }
+}
diff --git a/DdlParser/CreateTableVisitor.cs b/DdlParser/CreateTableVisitor.cs
index dda42f4..2b5c363 100644
--- a/DdlParser/CreateTableVisitor.cs
+++ b/DdlParser/CreateTableVisitor.cs
@@ -6,12 +6,30 @@ public class CreateTableVisitor : TSqlFragmentVisitor
{
public List Tables { get; } = new();
+ // Track current database context from USE statements
+ // This becomes the database in app.yaml (e.g., USE [acme] -> database: acme)
+ private string _currentDatabase = string.Empty;
+
+ public override void Visit(UseStatement node)
+ {
+ // Track the current database context from USE [database] statements
+ if (node.DatabaseName != null)
+ {
+ _currentDatabase = node.DatabaseName.Value;
+ }
+ base.Visit(node);
+ }
+
public override void Visit(CreateTableStatement node)
{
- // Get table name and schema from SchemaObjectName
+ // Get table name from SchemaObjectName
var tableName = GetIdentifierValue(node.SchemaObjectName) ?? "UnknownTable";
- var schema = GetSchemaName(node.SchemaObjectName) ?? string.Empty;
- var table = new TableMetadata { Name = tableName, Schema = schema };
+
+ // Database comes from the USE [database] statement (for namespace/DbContext routing)
+ // Schema comes from the CREATE TABLE [schema].[table] syntax (actual SQL schema for [Table] attribute)
+ var database = _currentDatabase;
+ var sqlSchema = GetSchemaName(node.SchemaObjectName) ?? string.Empty;
+ var table = new TableMetadata { Name = tableName, Database = database, Schema = sqlSchema };
// Extract columns
if (node.Definition?.ColumnDefinitions != null)
diff --git a/DdlParser/SqlDdlParser.cs b/DdlParser/SqlDdlParser.cs
index 7e37c5c..75786e4 100644
--- a/DdlParser/SqlDdlParser.cs
+++ b/DdlParser/SqlDdlParser.cs
@@ -5,6 +5,7 @@ namespace DdlParser;
public class TableMetadata
{
public string Name { get; set; } = string.Empty;
+ public string Database { get; set; } = string.Empty;
public string Schema { get; set; } = string.Empty;
public List Columns { get; set; } = new();
public List ForeignKeys { get; set; } = new();
diff --git a/DdlParser/YamlGenerator.cs b/DdlParser/YamlGenerator.cs
index 89ab4a9..f0bd38a 100644
--- a/DdlParser/YamlGenerator.cs
+++ b/DdlParser/YamlGenerator.cs
@@ -47,6 +47,7 @@ private List ConvertTablesToEntities(List tables)
var entity = new Entity
{
Name = SingularizeName(table.Name),
+ Database = table.Database,
Schema = table.Schema,
Properties = ConvertColumnsToProperties(table.Columns),
Relationships = ConvertForeignKeysToRelationships(table.ForeignKeys)
@@ -102,22 +103,8 @@ private List ConvertForeignKeysToRelationships(List Properties { get; set; }
public List Relationships { get; set; }
diff --git a/DotNetWebApp.Models/DotNetWebApp.Models.csproj b/DotNetWebApp.Models/DotNetWebApp.Models.csproj
index 1d00652..4be7dc7 100644
--- a/DotNetWebApp.Models/DotNetWebApp.Models.csproj
+++ b/DotNetWebApp.Models/DotNetWebApp.Models.csproj
@@ -7,5 +7,6 @@
+
diff --git a/DotNetWebApp.csproj b/DotNetWebApp.csproj
index 5266e3d..89b046c 100644
--- a/DotNetWebApp.csproj
+++ b/DotNetWebApp.csproj
@@ -8,6 +8,7 @@
+
diff --git a/DotNetWebApp.sln b/DotNetWebApp.sln
index 02427ca..6eb73de 100644
--- a/DotNetWebApp.sln
+++ b/DotNetWebApp.sln
@@ -24,6 +24,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotNetWebApp.Models", "DotN
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DdlParser.Tests", "tests\DdlParser.Tests\DdlParser.Tests.csproj", "{AB65500C-C886-4A9D-A5BA-0010DBBB317C}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "YamlMerger.Tests", "tests\YamlMerger.Tests\YamlMerger.Tests.csproj", "{8557BD00-C3EF-40E1-9979-BB01045DEC8E}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -66,6 +68,10 @@ Global
{AB65500C-C886-4A9D-A5BA-0010DBBB317C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AB65500C-C886-4A9D-A5BA-0010DBBB317C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AB65500C-C886-4A9D-A5BA-0010DBBB317C}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8557BD00-C3EF-40E1-9979-BB01045DEC8E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8557BD00-C3EF-40E1-9979-BB01045DEC8E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8557BD00-C3EF-40E1-9979-BB01045DEC8E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8557BD00-C3EF-40E1-9979-BB01045DEC8E}.Release|Any CPU.Build.0 = Release|Any CPU
{D2E3F4G5-H6I7-4D7E-0E1F-2G3H4I5J6K7L}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D2E3F4G5-H6I7-4D7E-0E1F-2G3H4I5J6K7L}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D2E3F4G5-H6I7-4D7E-0E1F-2G3H4I5J6K7L}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -80,6 +86,7 @@ Global
GlobalSection(NestedProjects) = preSolution
{6F83E7CB-3C85-4D2D-97C5-D9C6DEEB85DD} = {54E20A9E-ED73-485E-BAD6-C2FC3290BBDC}
{AB65500C-C886-4A9D-A5BA-0010DBBB317C} = {54E20A9E-ED73-485E-BAD6-C2FC3290BBDC}
+ {8557BD00-C3EF-40E1-9979-BB01045DEC8E} = {54E20A9E-ED73-485E-BAD6-C2FC3290BBDC}
{D2E3F4G5-H6I7-4D7E-0E1F-2G3H4I5J6K7L} = {54E20A9E-ED73-485E-BAD6-C2FC3290BBDC}
EndGlobalSection
EndGlobal
diff --git a/GEMINI.md b/GEMINI.md
index 5985480..9a94fd0 100644
--- a/GEMINI.md
+++ b/GEMINI.md
@@ -1,113 +1,95 @@
# GEMINI Project Context: DotNetWebApp
-## Project Overview
-
-This is a .NET 8 web application built with a Blazor Server frontend and a Web API backend. It provides a SPA experience and supports multi-tenancy. Data access is via Entity Framework Core against SQL Server. The UI is built with Radzen Blazor components.
-
-**Key Technologies:**
-
-* **.NET 8:** Core framework for the application.
-* **ASP.NET Core:** For the web server and API.
-* **Blazor Server:** Reactive frontend UI.
-* **Entity Framework Core:** Data access (migrations generated from DDL pipeline).
-* **SQL Server:** Relational database.
-* **Radzen.Blazor:** UI component library.
-* **Docker:** Used for dev containers (app + database).
-
-**Architecture:**
-
-* **`Program.cs`:** Entry point and service registration.
-* **`Components/`:** Blazor UI components.
-* **`Controllers/`:** API controllers.
-* **`Data/`:** `AppDbContext`, tenancy helpers, and dynamic model wiring.
-* **`Models/`:** Entity models (including `Models/Generated`).
-* **`Services/`:** Business logic and DI services.
-* **`Migrations/`:** Generated EF Core migration files (ignored in repo).
-
-## Current Direction (DDL-first)
-
-The app uses SQL DDL as the source of truth, generating `app.yaml` that drives:
-* app branding + theme
-* dynamic model generation (`ModelGenerator`)
-* API and UI entity navigation
-
-Generated entities live in `Models/Generated` and are wired into `AppDbContext` via reflection. Table names are pluralized (e.g., `Product` -> `Products`) to align with existing SQL tables.
-
-## Current State / Recent Fixes
-
-* DDL-driven metadata and model definitions are generated into `app.yaml`.
-* `ModelGenerator` creates `Models/Generated`; optional value types are nullable to avoid forced defaults.
-* `AppDictionaryService` exposes YAML metadata to the UI and navigation.
-* UI uses Radzen panel menu components and includes a dynamic "Data" section.
-* Generic entity pages load data via `GenericEntityPage.razor` with the route `api/{entity.Name}` and singular controllers.
-
-## Database / Migrations
-
-Migrations are generated by `make run-ddl-pipeline` from the SQL DDL schema. If you see errors like:
-* `Invalid object name 'dbo.Category'`
-* `Invalid column name 'CategoryId'`
-
-the database schema does not match the current DDL. Run `make db-start`, `make run-ddl-pipeline`, then `make migrate`.
-
-## Building and Running
-
-The project uses a `Makefile` to simplify common development tasks.
-
-### Prerequisites
-
-1. **Install SQL Server:** Run `./setup.sh` to install SQL Server via Docker or on the host machine.
-2. **Install .NET EF Tools:** `dotnet tool install --global dotnet-ef --version 8.*`
-3. **Use the wrapper:** `make` targets call `./dotnet-build.sh`, which sets `DOTNET_ROOT` for global tools and bypasses `global.json` locally.
-
-### Key Commands
-
-* **Check and Restore Dependencies:**
- ```bash
- make check
- ```
-
-* **Generate Schema Migration:**
- ```bash
- make run-ddl-pipeline
- make migrate
- ```
-
-* **Build the Application:**
- ```bash
- make build
- ```
-
-* **Run in Development Mode (with hot reload):**
- ```bash
- make dev
- ```
-
-* **Run in Production-like Mode:**
- ```bash
- make run
- ```
-
-* **Run Tests:**
- ```bash
- make test
- ```
-
-* **Build Docker Image:**
- ```bash
- make docker-build
- ```
-
-## Development Conventions
-
-* **Dependency Injection:** Services are registered in `Program.cs` and injected into constructors. This is the standard pattern for .NET Core applications.
-* **Async/Await:** Asynchronous programming is used for I/O operations, particularly in the service layer and controllers when interacting with the database.
-* **Separation of Concerns:** The project is organized into distinct layers (UI, API, Services, Data) to keep the codebase clean and maintainable.
-* **Configuration:** Application settings are managed in `appsettings.json` and `appsettings.Development.json`. Secrets are managed using the .NET User Secrets manager (see `SECRETS.md`).
-* **Multi-Tenancy:** The `Data/Tenancy` folder and the `AppDbContext` show a mechanism for supporting multiple tenants with different database schemas.
-
-## Guardrails (Do Not Break)
-
-* `make check` runs `shellcheck` on `setup.sh` and `dotnet-build.sh` before the build.
-* Do not modify or reinstall the system .NET runtime; use the `dotnet-build.sh` wrapper via `make`.
-* Keep Radzen UI wiring intact (NavMenu and theme CSS).
-* Ensure the DDL pipeline and migration are applied before debugging 500s in entity pages.
+## 🤖 Role & Profile
+You are an expert .NET/C# engineer acting as the Gemini CLI agent. You specialize in:
+- **ASP.NET Core 8 Web API** + **Entity Framework Core**
+- **Blazor Server** + **Radzen UI Components**
+- **Hybrid Data Access:** EF Core (writes) + Dapper (reads)
+- **SQL-First Development:** DDL-driven schema & views
+
+## 🚫 CRITICAL RULES
+
+### 1. Git Operations
+- **FORBIDDEN:** `git add`, `git commit`, `git push`, `git checkout`, etc.
+- **ALLOWED:** `git status`, `git log`, `git diff`, `git show`.
+- **Reason:** You must NOT perform write operations on the git repository.
+
+### 2. UI Development (Radzen)
+- **Enum Prefix:** ALWAYS use `@` prefix for enums (e.g., `ButtonStyle="@ButtonStyle.Primary"`).
+- **Components:** Use Radzen components (e.g., ``), NOT HTML tags.
+- **Directives:** Ensure ` ` is in `MainLayout.razor`.
+- **Render Mode:** Do NOT use `@rendermode InteractiveServer` (this is Blazor Server, not Web App).
+- **DataGrid:** For `TItem="object"`, use `` with reflection, NOT property binding.
+
+## 🏗️ Architecture Overview
+
+### Hybrid Data Access
+- **Writes (EF Core):** All CRUD operations go through `IEntityOperationService`. 200+ entities generated from SQL DDL.
+- **Reads (Dapper):** Complex queries, reports, and dashboards use `IViewService` with Dapper. SQL views are the source of truth.
+
+### SQL-First Pipelines
+1. **Entities:** `sql/schema.sql` → `app.yaml` → `Models/Generated/*.cs` → EF Core.
+2. **Views:** `sql/views/*.sql` → `views.yaml` → `app.yaml` → `Models/ViewModels/*.cs` → Dapper.
+3. **Unified Command:** `make run-ddl-pipeline` handles both.
+
+### Multi-Tenancy
+- **Strategy:** Schema-based isolation derived from `USE [database]` in `sql/schema.sql`.
+- **Implementation:** Finbuckle.MultiTenant identifies tenant via `X-Customer-Schema` header.
+- **Propagation:** EF Core connection is schema-aware; Dapper shares this connection automatically.
+
+## 🛠️ Workflows & Commands
+
+### Build & Run
+- `make check`: Full restore, build, and validation (slow).
+- `make build`: Fast debug build (skips tests).
+- `make dev`: Run with hot reload.
+- `make test`: Run all unit tests (MANDATORY before confirming tasks).
+
+### Pipelines
+- `make run-ddl-pipeline`: Regenerate all models (entities & views) from SQL.
+- `./verify.sh`: End-to-end verification (Build + Test + Pipeline + Seed + Integration). **Single Source of Truth.**
+
+### Database
+- `make seed`: Reset DB and apply `sql/seed.sql`.
+- `make migrate`: Apply EF Core migrations.
+
+## 📂 Project Structure
+
+```
+DotNetWebApp/
+├── sql/
+│ ├── schema.sql # DDL Source (Entities)
+│ ├── views/ # SQL Views (Read Models)
+│ └── seed.sql # Seed Data
+├── app.yaml # Generated Metadata
+├── appsettings.json # Configuration + ViewDefinitions
+├── DotNetWebApp.Models/ # Separate Assembly
+│ ├── Generated/ # EF Entities
+│ ├── ViewModels/ # Dapper DTOs
+│ └── AppDictionary/ # Metadata Models
+├── Services/
+│ ├── IEntityOperationService.cs # EF Core Abstraction
+│ └── Views/ # Dapper/View Abstractions
+├── Components/
+│ ├── Pages/ # Blazor Pages
+│ └── Shared/ # Reusable Components (ViewSection, etc.)
+├── Controllers/ # API Endpoints
+└── DdlParser/ # Pipeline Tooling
+```
+
+## ✅ Current Status
+
+**Completed Phases:**
+1. **Foundation:** Blazor Server, Docker, API setup.
+2. **Data Model:** DDL → YAML → C# generation pipeline.
+3. **Phase 1 (Refactor):** `IEntityOperationService` with compiled delegates (High Perf).
+4. **Phase 2 (Views):** SQL-First View Pipeline (`IViewService`, `IViewRegistry`).
+5. **Phase 3+4 (UI Patterns):** Generic `ViewSection`, `ApplicationSwitcher`, and editable grid patterns.
+6. **WAMS MVP:** Web App Management System with 39-column grid, state machine, and soft delete.
+
+**Tests:** 580+ tests passing. High coverage on service layer.
+
+## 📚 References & Skills
+- **Skills:** Use `.claude/skills/radzen-blazor` for UI work.
+- **Architecture:** See `ARCHITECTURE_SUMMARY.md` and `HYBRID_ARCHITECTURE.md`.
+- **Multi-Tenancy:** See `MULTI_TENANT.md`.
\ No newline at end of file
diff --git a/MULTI_TENANT.md b/MULTI_TENANT.md
new file mode 100644
index 0000000..9ae29e2
--- /dev/null
+++ b/MULTI_TENANT.md
@@ -0,0 +1,160 @@
+# Multi-Schema Support Guide
+
+## Overview
+
+**This project is DDL-driven.** Everything flows from `sql/schema.sql` → `app.yaml` → generated C# models.
+
+## Critical: Schema Derivation from SQL
+
+**Schemas are derived from `USE [database]` statements in `sql/schema.sql`**, NOT from the `[schema].[table]` syntax in CREATE TABLE statements.
+
+### Example
+
+```sql
+USE [acme] -- Sets schema to "acme" for following tables
+CREATE TABLE [dbo].[Product](...) -- Table name: Product, Schema: acme (not dbo!)
+
+USE [initech] -- Sets schema to "initech" for following tables
+CREATE TABLE [dbo].[Company](...) -- Table name: Company, Schema: initech
+```
+
+### Schema Mapping Table
+
+| SQL Statement | EF Core Schema | Result |
+|---------------|----------------|--------|
+| `USE [acme]` | `acme` | Tables become `acme:TableName` |
+| `USE [initech]` | `initech` | Tables become `initech:TableName` |
+
+## Critical: appsettings.json MUST Match Schemas
+
+**Applications in `appsettings.json` MUST reference entities that exist in `app.yaml`!**
+
+When schema.sql changes (new `USE [database]` statements), you MUST update:
+1. `appsettings.json` → Applications → Schema field
+2. `appsettings.json` → Applications → Entities list
+3. `verify.sh` → Test URLs to match new schema/entity names
+
+### Example Application Configuration
+
+```json
+{
+ "Name": "admin",
+ "Schema": "acme",
+ "Entities": ["acme:Product", "acme:Category", "acme:Company", ...]
+},
+{
+ "Name": "metrics",
+ "Schema": "initech",
+ "Entities": ["initech:Company", "initech:User", ...]
+}
+```
+
+## Schema-Qualified Name Formats
+
+Different parts of the system use different formats for schema-qualified names:
+
+- **Browser URLs:** `schema/TableName` (e.g., `/entity/acme/Product`) - uses slash, URL-safe
+- **API endpoints:** `schema:TableName` (e.g., `/api/admin/entities/acme/Product`)
+- **C# Namespaces:** `DotNetWebApp.Models.Generated.{Schema}.{TableName}` (e.g., `...Generated.Acme.Product`)
+- **YAML (app.yaml):** `schema:` field matches database name (e.g., `schema: acme`)
+
+**Important:** Colons in URLs are interpreted by browsers as protocol schemes (like `mailto:`), causing `xdg-open` popups. Always use slashes for browser-facing URLs and convert to colons for API calls.
+
+## Critical Patterns by File
+
+| File | What to use | NOT this |
+|------|-------------|----------|
+| `EntityMetadataService.cs` | Pascal-cased schema in namespace: `Generated.Acme.Product` | `Generated.acme.Product` |
+| `DashboardService.cs` | `$"{schema}:{name}"` in both try AND catch blocks | `entity.Definition.Name` |
+| `EntitySection.razor` | `EntityName` parameter (colon format for API) | `metadata.Definition.Name` |
+| `GenericEntityPage.razor` | Convert URL `Schema/EntityName` to API `schema:name` | Using URL format for API |
+| `NavMenu.razor` | Build path as `entity/{schema}/{name}` (slash for URLs) | Colons in browser URLs |
+| `SpaSectionService.cs` | RouteSegment=`schema/name`, EntityName=`schema:name` | Same format for both |
+| `SpaApp.razor` | Convert URL slash format to API colon format | Using slash format for API |
+
+## Code Examples
+
+### URL Routing (Use Slashes)
+
+```csharp
+// ✅ CORRECT - slash-separated for browser URLs
+var path = $"entity/{entity.Schema}/{entity.Name}"; // "/entity/acme/Product"
+
+// ❌ WRONG - colon triggers browser protocol handler popup
+var path = $"{entity.Schema}:{entity.Name}"; // "acme:Product" causes xdg-open!
+```
+
+### API Calls (Use Colons)
+
+```csharp
+// ✅ CORRECT - colon-separated for API calls
+var qualifiedName = $"{schema}:{entityName}"; // "acme:Product"
+var result = await EntityApiService.GetEntitiesAsync(qualifiedName);
+
+// ❌ WRONG - strips schema, returns wrong data when duplicate table names exist
+var result = await EntityApiService.GetEntitiesAsync(metadata.Definition.Name);
+```
+
+## Multi-Tenancy Implementation
+
+### HTTP Header
+
+Schema switching via `X-Customer-Schema` HTTP header (defaults to `dbo`)
+
+### Finbuckle.MultiTenant
+
+- Automatic schema inheritance for Dapper queries
+- Schema isolation at database level
+- See `HYBRID_ARCHITECTURE.md` for full details
+
+## Regression Testing
+
+`verify.sh` Test 12 validates multi-schema isolation by checking that:
+- `acme:Company` returns `name` field
+- `initech:Company` returns `companyName` field
+
+These entities have different schemas with different properties, ensuring schema isolation works correctly.
+
+## Common Pitfalls
+
+### 1. Using Colons in Browser URLs
+
+**Problem:** Colons trigger browser protocol handlers (like `mailto:`)
+
+**Solution:** Use slashes in all browser-facing URLs, convert to colons for API calls
+
+### 2. Not Updating verify.sh After Schema Changes
+
+**Problem:** Tests reference old entity names that no longer exist
+
+**Solution:** Always update verify.sh test URLs when modifying schema.sql
+
+### 3. Missing Schema Prefix in API Calls
+
+**Problem:** API calls use bare entity name instead of schema-qualified name
+
+**Solution:** Always use `schema:EntityName` format for API calls
+
+### 4. Wrong Schema in appsettings.json Applications
+
+**Problem:** Application references entity that doesn't exist in app.yaml
+
+**Solution:** Ensure Applications.Schema and Applications.Entities match generated app.yaml
+
+## Verification Checklist
+
+When adding or modifying schemas:
+
+- [ ] Update `sql/schema.sql` with `USE [schema]` statements
+- [ ] Run `make run-ddl-pipeline` to regenerate models
+- [ ] Update `appsettings.json` Applications to reference correct schemas
+- [ ] Update `verify.sh` test URLs to match new entity names
+- [ ] Run `./verify.sh` to verify end-to-end
+- [ ] Commit schema.sql, appsettings.json, and verify.sh together
+
+## Proprietary Data Warning
+
+- **`sql/schema.sql` and `sql/seed.sql` may contain proprietary client database schemas**
+- **DO NOT commit real client data to git**
+- **Documentation uses example schemas** (`acme`, `initech`)
+- **Actual runtime schemas** are derived from whatever `USE [database]` statements exist in schema.sql
diff --git a/Makefile b/Makefile
index 8d3c915..b694559 100644
--- a/Makefile
+++ b/Makefile
@@ -1,11 +1,17 @@
# shellcheck shell=bash
-# shellcheck disable=SC2034,SC1089,SC2288,SC2046,SC1072,SC1073
+# shellcheck disable=SC2034,SC1089,SC2288,SC2046,SC1072,SC1073,SC1090,SC1091,SC2171
DOTNET=./dotnet-build.sh
# shellcheck disable=SC2034
IMAGE_NAME=dotnetwebapp
# shellcheck disable=SC2034
TAG=latest
+
+# [FIXME:Use env vars] Database names - configure these to match your sql/schema.sql USE statements
+# shellcheck disable=SC2034
+PRIMARY_DB=WEBAPP
+# shellcheck disable=SC2034
+SECONDARY_DB=WEBAPPMisc
# shellcheck disable=SC2211,SC2276
DOTNET_ENVIRONMENT?=Development
# shellcheck disable=SC2211,SC2276
@@ -20,12 +26,35 @@ export SKIP_GLOBAL_JSON_HANDLING?=true
# shellcheck disable=SC2211,SC2276
BUILD_CONFIGURATION?=Debug
-.PHONY: clean check restore build build-release https migrate test run-ddl-pipeline docker-build run dev stop-dev db-start db-stop db-logs db-drop ms-logs ms-drop cleanup-nested-dirs shutdown-build-servers
+.PHONY: clean check restore build build-release https db-migrate db-seed ms-migrate ms-seed test run-ddl-pipeline docker-build run dev stop-dev db-start db-stop db-logs db-destroy db-create db-drop db-check ms-status ms-start ms-stop ms-check ms-logs ms-drop cleanup-nested-dirs shutdown-build-servers all _ensure-pipeline _incremental-pipeline ms-init-schema compose-up compose-down _compose-build
clean:
$(DOTNET) clean DotNetWebApp.sln
@$(MAKE) cleanup-nested-dirs
rm -f msbuild.binlog
+ @# Remove all generated files
+ rm -rf DotNetWebApp.Models/Generated/*
+ rm -rf DotNetWebApp.Models/ViewModels/*.generated.cs
+ rm -f Migrations/*.cs
+ rm -f app.yaml
+ rm -f data.yaml
+ rm -f sql/idempotent-migration.sql
+
+# Full rebuild from scratch: clean, drop databases, regenerate models, build, test, migrate, and seed
+# This is the definitive target for a complete fresh start
+all: clean db-drop run-ddl-pipeline test db-migrate db-seed
+ @echo ""
+ @echo "╔════════════════════════════════════════════════════╗"
+ @echo "║ ✅ FULL PIPELINE RUN SUCCESSFUL ║"
+ @echo "╚════════════════════════════════════════════════════╝"
+ @echo ""
+ @echo "🚀 Next steps:"
+ @echo " make dev - Start dev server with hot reload"
+ @echo " ./verify.sh - Run end-to-end integration tests"
+ @echo ""
+
+ms-all: clean ms-drop run-ddl-pipeline test ms-migrate ms-seed
+ @echo "MSSQL all completed!"
# Internal helper: Remove nested project directories created by MSBuild during build/test
# Prevents inotify watch exhaustion on Linux (limit: 65,536)
@@ -43,45 +72,115 @@ shutdown-build-servers:
@echo "Build servers stopped."
https:
- $(DOTNET) dev-certs https
+ $(DOTNET) dev-certs https -ep ./dotnetwebapp.crt --format PEM --no-password
+ @echo "✅ Certificates exported to ./dotnetwebapp.crt and ./dotnetwebapp.key"
+ @echo ""
+ @echo "To use with Nginx, move them to /etc/nginx/ssl/:"
+ @echo " sudo mkdir -p /etc/nginx/ssl"
+ @echo " sudo mv dotnetwebapp.crt /etc/nginx/ssl/"
+ @echo " sudo mv dotnetwebapp.key /etc/nginx/ssl/"
+ @echo " sudo nginx -t && sudo systemctl reload nginx"
check:
shellcheck setup.sh
shellcheck dotnet-build.sh
shellcheck verify.sh
shellcheck Makefile
+ shellcheck scripts/seed-full-month.sh
+ shellcheck scripts/docker.sh
+ shellcheck scripts/mssql.sh
+ shellcheck docker/entrypoint.sh
$(DOTNET) format whitespace DotNetWebApp.csproj
$(DOTNET) format style DotNetWebApp.csproj
+ @# Regenerate if generated files are missing - check for app.yaml as indicator
+ @test -f app.yaml || $(MAKE) run-ddl-pipeline
$(MAKE) restore
$(MAKE) build
restore:
$(DOTNET) restore DotNetWebApp.sln
+# Internal helper: Ensure generated files exist without full clean
+# Checks for app.yaml as indicator - if missing, regenerate everything
+_ensure-pipeline:
+ @if [ ! -f app.yaml ] || [ ! -d DotNetWebApp.Models/Generated ] || [ ! -d Migrations ] || [ -z "$$(find Migrations -name '*InitialCreate*.cs' 2>/dev/null)" ]; then \
+ echo "Generated files missing, running incremental regeneration..."; \
+ $(MAKE) _incremental-pipeline; \
+ fi
+
+# Internal helper: Incremental pipeline without full clean
+# Only deletes generated models, not build artifacts
+# Steps 1-7 from run-ddl-pipeline but without clean dependency or final build call
+_incremental-pipeline:
+ @echo "Starting incremental DDL pipeline..."
+ @echo " -- Step 1: Parsing DDL to data.yaml (intermediate, dataModel only)..."
+ cd DdlParser && "../$(DOTNET)" run -- ../sql/schema.sql ../data.yaml
+ @echo ""
+ @echo " -- Step 2: Merging ViewDefinitions from appsettings.json into data.yaml (modifies in place; intermediate now contains dataModel + views)..."
+ cd YamlMerger && "../$(DOTNET)" run ../data.yaml ../appsettings.json
+ @echo ""
+ @echo " -- Step 3: Cleaning old Generated models and generating C# models from data.yaml..."
+ rm -rf DotNetWebApp.Models/Generated/*
+ cd ModelGenerator && "../$(DOTNET)" run ../data.yaml
+ @echo ""
+ @echo " -- Step 4: Generating view models from data.yaml..."
+ cd ModelGenerator && "../$(DOTNET)" run -- --mode=views --views-yaml=../data.yaml --output-dir=../DotNetWebApp.Models/ViewModels
+ @echo ""
+ @echo " -- Step 5: Merging appsettings.json + data.yaml → app.yaml (final)..."
+ cd AppsYamlGenerator && "../$(DOTNET)" run -- ../appsettings.json ../data.yaml ../app.yaml
+ @echo ""
+ @echo " -- Step 6: Cleaning up intermediate data.yaml..."
+ rm -f data.yaml
+ @echo ""
+ @echo " -- Step 7: Regenerating EF Core migration..."
+ rm -f Migrations/*.cs
+ $(DOTNET) build DotNetWebApp.csproj --configuration "$(BUILD_CONFIGURATION)" -maxcpucount:2 --nologo
+ $(DOTNET) ef migrations add InitialCreate --output-dir Migrations --context AppDbContext --no-build
+ @echo ""
+ @echo "✅ Incremental pipeline completed!"
+
# Build with configurable configuration (Debug by default for fast dev iteration)
# Builds entire solution including test projects with reduced parallelism
# Note: Reduced parallelism (-maxcpucount:2) to prevent memory exhaustion
# If error(s) contain "Run a NuGet package restore", try 'make restore'
-build:
- $(DOTNET) build DotNetWebApp.sln --configuration "$(BUILD_CONFIGURATION)" --no-restore -maxcpucount:2 --nologo
+build: _ensure-pipeline
+ $(DOTNET) build DotNetWebApp.sln --configuration "$(BUILD_CONFIGURATION)" -maxcpucount:2 --nologo
@$(MAKE) cleanup-nested-dirs
# Build with Release configuration for production deployments
# This target always uses Release regardless of BUILD_CONFIGURATION variable
-build-release:
- $(DOTNET) build DotNetWebApp.sln --configuration Release --no-restore -maxcpucount:2 --nologo
+build-release: _ensure-pipeline
+ $(DOTNET) build DotNetWebApp.sln --configuration Release
@$(MAKE) cleanup-nested-dirs
-migrate: build
- ASPNETCORE_ENVIRONMENT=$(ASPNETCORE_ENVIRONMENT) DOTNET_ENVIRONMENT=$(DOTNET_ENVIRONMENT) $(DOTNET) ef database update
+# Idempotent migration via Docker - safe to run multiple times against existing databases
+# For databases with existing tables, marks migration as applied without re-creating tables
+# Uses container's sqlcmd (same pattern as db-drop)
+# Automatically initializes database schema from sql/schema.sql before applying EF migrations
+db-migrate: build
+ @echo "Generating idempotent migration script..."
+ $(DOTNET) ef migrations script --idempotent --output sql/idempotent-migration.sql --context AppDbContext
+ @# Remove UTF-8 BOM if present - sqlcmd does not handle it
+ @sed -i '1s/^\xEF\xBB\xBF//' sql/idempotent-migration.sql 2>/dev/null || true
+ @if docker inspect sqlserver-dev > /dev/null 2>&1; then \
+ bash scripts/docker.sh init-schema && bash scripts/docker.sh migrate; \
+ else \
+ echo "⚠️ Container sqlserver-dev not running, skipping DB migration. Run 'make db-create' or 'make compose-up'."; \
+ fi
-seed:
- $(DOTNET) run --project DotNetWebApp.csproj -- --seed
+# Seed database via Docker sqlcmd - runs sql/seed.sql against Docker SQL Server
+# Note: seed.sql contains USE [$(PRIMARY_DB)] and USE [$(SECONDARY_DB)] statements, so we connect to master
+db-seed:
+ @if docker inspect sqlserver-dev > /dev/null 2>&1; then \
+ bash scripts/docker.sh seed; \
+ else \
+ echo "⚠️ Container sqlserver-dev not running, skipping seed. Run 'make db-create' or 'make compose-up'."; \
+ fi
# Run tests with same configuration as build target for consistency
# Builds and runs test projects sequentially to avoid memory exhaustion
# Note: Cleans up nested project directories after build to prevent inotify exhaustion on Linux
-test:
+test: build
$(DOTNET) build tests/DotNetWebApp.Tests/DotNetWebApp.Tests.csproj --configuration "$(BUILD_CONFIGURATION)" --no-restore --nologo
$(DOTNET) test tests/DotNetWebApp.Tests/DotNetWebApp.Tests.csproj --configuration "$(BUILD_CONFIGURATION)" --no-build --no-restore --nologo
$(DOTNET) build tests/ModelGenerator.Tests/ModelGenerator.Tests.csproj --configuration "$(BUILD_CONFIGURATION)" --no-restore --nologo
@@ -104,7 +203,8 @@ run-ddl-pipeline: clean
@echo " -- Step 2: Merging ViewDefinitions from appsettings.json into data.yaml (modifies in place; intermediate now contains dataModel + views)..."
cd YamlMerger && "../$(DOTNET)" run ../data.yaml ../appsettings.json
@echo ""
- @echo " -- Step 3: Generating C# models from data.yaml..."
+ @echo " -- Step 3: Cleaning old Generated models and generating C# models from data.yaml..."
+ rm -rf DotNetWebApp.Models/Generated/*
cd ModelGenerator && "../$(DOTNET)" run ../data.yaml
@echo ""
@echo " -- Step 4: Generating view models from data.yaml..."
@@ -118,22 +218,54 @@ run-ddl-pipeline: clean
@echo ""
@echo " -- Step 7: Regenerating EF Core migration..."
rm -f Migrations/*.cs
- $(DOTNET) build DotNetWebApp.csproj --configuration "$(BUILD_CONFIGURATION)" --no-restore -maxcpucount:2 --nologo
+ $(DOTNET) build DotNetWebApp.csproj --configuration "$(BUILD_CONFIGURATION)" -maxcpucount:2 --nologo
$(DOTNET) ef migrations add InitialCreate --output-dir Migrations --context AppDbContext --no-build
@echo ""
- @echo " -- Step 8: Building project..."
- $(MAKE) build
- @echo ""
@echo "✅ DDL pipeline completed!"
@echo ""
- @echo "🚀 Next: Run 'make dev' to start the application"
+ @echo "🚀 Next: Run 'make db-migrate', then 'make db-seed' (or MSSQL equivalents), then 'make dev'"
docker-build:
docker build -t "$(IMAGE_NAME):$(TAG)" .
-# Run the application once without hot reload (uses Debug by default unless BUILD_CONFIGURATION=Release)
-run:
- $(DOTNET) run --project DotNetWebApp.csproj --configuration "$(BUILD_CONFIGURATION)"
+# Build Docker Compose application image - always bypasses cache to ensure generated files are baked in
+_compose-build:
+ @docker compose build --no-cache dotnetwebapp
+
+# Start the full Docker Compose stack with database initialization
+# Starts SQL Server first, waits for health check, initializes schema + migrations, seeds data, then starts app
+# Requires SA_PASSWORD environment variable (load via: source .envrc or export SA_PASSWORD=...)
+compose-up: _compose-build
+ @[ -n "$$SA_PASSWORD" ] || { echo "Error: SA_PASSWORD environment variable required" >&2; echo " export SA_PASSWORD='YourStrongPassword123!'" >&2; exit 1; }
+ @echo "Removing any pre-existing standalone containers..."
+ @docker rm -f sqlserver-dev 2>/dev/null || true
+ @docker rm -f dotnetwebapp 2>/dev/null || true
+ @echo "Starting SQL Server and waiting for health check..."
+ @docker compose up -d --wait sqlserver
+ @echo "Initializing databases..."
+ $(MAKE) db-migrate
+ @echo "Seeding data..."
+ $(MAKE) db-seed
+ @echo "Starting application container..."
+ @docker compose up -d dotnetwebapp
+ @echo ""
+ @echo "╔════════════════════════════════════════════════════╗"
+ @echo "║ ✅ Docker stack is up ║"
+ @echo "╚════════════════════════════════════════════════════╝"
+ @echo ""
+ @echo " App: http://localhost:5210"
+ @echo " Logs: docker compose logs -f dotnetwebapp"
+ @echo ""
+
+# Stop and remove all Docker Compose containers (preserves volumes/data)
+# Drops databases first while container is still running to ensure clean state on next compose-up
+compose-down:
+ @bash scripts/docker.sh drop 2>/dev/null || true
+ @docker compose down
+
+# Run in Release (production-like) mode
+run: build-release
+ $(DOTNET) run --project DotNetWebApp.csproj --configuration Release
# Run the application with hot reload (use for active development - auto-reloads on file changes)
# Always uses Debug configuration for fastest rebuild times during watch mode
@@ -160,34 +292,45 @@ db-stop:
db-logs:
@docker logs -f sqlserver-dev
+# Completely destroy the SQL Server Docker container (for clean slate)
+db-destroy:
+ @echo "Stopping and removing sqlserver-dev container..."
+ @docker stop sqlserver-dev 2>/dev/null || true
+ @docker rm sqlserver-dev 2>/dev/null || true
+ @rm -f Migrations/*.cs && echo "Cleared old EF Core migrations."
+ @echo "Container destroyed. Run 'make db-create' to recreate."
+
+# Create a fresh SQL Server Docker container (requires SA_PASSWORD env var)
+db-create:
+ @[ -n "$$SA_PASSWORD" ] || { echo "Error: SA_PASSWORD environment variable required" >&2; echo " export SA_PASSWORD='YourStrongPassword123!'" >&2; exit 1; }
+ @echo "Creating sqlserver-dev container..."
+ @docker run -e "ACCEPT_EULA=Y" \
+ -e "MSSQL_SA_PASSWORD=$$SA_PASSWORD" \
+ -p 1433:1433 \
+ --name sqlserver-dev \
+ --hostname sqlserver \
+ -d mcr.microsoft.com/mssql/server:2022-latest
+ @echo "Waiting for SQL Server to start (30s)..."
+ @sleep 30
+ @echo "Container created. Run 'make migrate' to initialize database."
+
# Tail native SQL Server logs (systemd + errorlog)
ms-logs:
@echo "Tailing systemd and errorlog (Ctrl+C to stop)..."
@sudo sh -c 'journalctl -u mssql-server -f --no-pager & tail -f /var/opt/mssql/log/errorlog; wait'
-# Drop the local dev database (uses SA_PASSWORD or container MSSQL_SA_PASSWORD)
+# Drop the local dev databases (uses SA_PASSWORD or container MSSQL_SA_PASSWORD)
+# Drops $(PRIMARY_DB), $(SECONDARY_DB), and DotNetWebAppDb databases
+# Also removes EF Core migrations to ensure clean slate when schema.sql changes
db-drop:
- # shellcheck disable=SC2016
- @docker exec -i -e SA_PASSWORD="$$SA_PASSWORD" sqlserver-dev /bin/sh -c '\
- PASSWORD="$$SA_PASSWORD"; \
- if [ -z "$$PASSWORD" ] && [ -n "$$MSSQL_SA_PASSWORD" ]; then \
- PASSWORD="$$MSSQL_SA_PASSWORD"; \
- fi; \
- if [ -z "$$PASSWORD" ]; then \
- echo "SA_PASSWORD is required (export SA_PASSWORD=...)" >&2; \
- exit 1; \
- fi; \
- if [ -x /opt/mssql-tools/bin/sqlcmd ]; then \
- SQLCMD=/opt/mssql-tools/bin/sqlcmd; \
- elif [ -x /opt/mssql-tools18/bin/sqlcmd ]; then \
- SQLCMD=/opt/mssql-tools18/bin/sqlcmd; \
- else \
- echo "sqlcmd not found in container." >&2; \
- exit 1; \
- fi; \
- $$SQLCMD -S localhost -U sa -P "$$PASSWORD" -C \
- -Q "IF DB_ID('"'"'DotNetWebAppDb'"'"') IS NOT NULL BEGIN ALTER DATABASE [DotNetWebAppDb] SET SINGLE_USER WITH ROLLBACK IMMEDIATE; DROP DATABASE [DotNetWebAppDb]; END"; \
- echo "Dropped database DotNetWebAppDb (if it existed)."'
+ @rm -f Migrations/*.cs && echo "Cleared old EF Core migrations."
+ @bash scripts/docker.sh drop
+
+# Check and create required databases if they don't exist
+# Useful for verifying database health and recreating them without full reset
+# Can be extended in the future with more checks (backups, index verification, etc.)
+db-check:
+ @bash scripts/docker.sh check
# Local install of MSSQL (no Docker)
ms-status:
@@ -197,8 +340,20 @@ ms-status:
ms-start:
sudo systemctl start mssql-server
-# Drop the database from native MSSQL instance on Linux
-ms-drop:
+ms-stop:
+ sudo systemctl stop mssql-server
+
+# Check and create required databases if they don't exist (native MSSQL)
+# Useful for verifying database health and recreating them without full reset
+ms-check:
+ @bash scripts/mssql.sh check
+
+# Internal helper: Initialize database schema from sql/schema.sql via native sqlcmd
+# Creates required databases ($(PRIMARY_DB), $(SECONDARY_DB)) and all tables defined in sql/schema.sql
+# Called automatically by ms-migrate - do not call directly
+ms-init-schema:
+ @bash scripts/mssql.sh check > /dev/null
+ @echo "Initializing database schema from sql/schema.sql..."
# shellcheck disable=SC2016
@/bin/sh -c '\
PASSWORD="$$SA_PASSWORD"; \
@@ -209,6 +364,27 @@ ms-drop:
echo "SA_PASSWORD is required (export SA_PASSWORD=...)" >&2; \
exit 1; \
fi; \
- sqlcmd -S localhost -U sa -P "$$PASSWORD" -C \
- -Q "IF DB_ID('"'"'DotNetWebAppDb'"'"') IS NOT NULL BEGIN ALTER DATABASE [DotNetWebAppDb] SET SINGLE_USER WITH ROLLBACK IMMEDIATE; DROP DATABASE [DotNetWebAppDb]; END"; \
- echo "Dropped database DotNetWebAppDb (if it existed)."'
+ echo "Applying schema from sql/schema.sql..."; \
+ sqlcmd -S localhost -U sa -P "$$PASSWORD" -C -i sql/schema.sql'
+ @echo "✅ Database schema initialized successfully"
+
+# Idempotent migration via native sqlcmd - safe to run multiple times against existing databases
+# For production MSSQL Server environments
+ms-migrate: build ms-init-schema
+ @echo "Generating idempotent migration script..."
+ $(DOTNET) ef migrations script --idempotent --output sql/idempotent-migration.sql --context AppDbContext
+ @# Remove UTF-8 BOM if present - sqlcmd does not handle it
+ @sed -i '1s/^\xEF\xBB\xBF//' sql/idempotent-migration.sql 2>/dev/null || true
+ @bash scripts/mssql.sh migrate
+
+# Seed database via native sqlcmd - runs sql/seed.sql against native MSSQL Server
+# Note: seed.sql contains USE [$(PRIMARY_DB)] and USE [$(SECONDARY_DB)] statements, so we connect to master
+ms-seed:
+ @bash scripts/mssql.sh seed
+
+# Drop the databases from native MSSQL instance on Linux
+# Drops $(PRIMARY_DB), $(SECONDARY_DB), and DotNetWebAppDb databases
+# Also removes EF Core migrations to ensure clean slate when schema.sql changes
+ms-drop:
+ @rm -f Migrations/*.cs && echo "Cleared old EF Core migrations."
+ @bash scripts/mssql.sh drop
diff --git a/Migrations/.gitignore b/Migrations/.gitignore
deleted file mode 100644
index 377ccd3..0000000
--- a/Migrations/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*
-!.gitkeep
diff --git a/Migrations/.gitkeep b/Migrations/.gitkeep
deleted file mode 100644
index 8b13789..0000000
--- a/Migrations/.gitkeep
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/Migrations/20260317234153_InitialCreate.Designer.cs b/Migrations/20260317234153_InitialCreate.Designer.cs
new file mode 100644
index 0000000..404f3c8
--- /dev/null
+++ b/Migrations/20260317234153_InitialCreate.Designer.cs
@@ -0,0 +1,1012 @@
+//
+using System;
+using DotNetWebApp.Data;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Metadata;
+using Microsoft.EntityFrameworkCore.Migrations;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+
+#nullable disable
+
+namespace DotNetWebApp.Migrations
+{
+ [DbContext(typeof(AppDbContext))]
+ [Migration("20260317234153_InitialCreate")]
+ partial class InitialCreate
+ {
+ ///
+ protected override void BuildTargetModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasDefaultSchema("dbo")
+ .HasAnnotation("ProductVersion", "8.0.25")
+ .HasAnnotation("Relational:MaxIdentifierLength", 128);
+
+ SqlServerModelBuilderExtensions.UseIdentityColumns(modelBuilder);
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Customers", b =>
+ {
+ b.Property("cu_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("cu_id"));
+
+ b.Property("cu_active")
+ .HasColumnType("bit");
+
+ b.Property("cu_city")
+ .IsRequired()
+ .HasMaxLength(40)
+ .HasColumnType("nvarchar(40)");
+
+ b.Property("cu_code")
+ .IsRequired()
+ .HasMaxLength(30)
+ .HasColumnType("nvarchar(30)");
+
+ b.Property("cu_contact")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("cu_country")
+ .IsRequired()
+ .HasMaxLength(40)
+ .HasColumnType("nvarchar(40)");
+
+ b.Property("cu_created")
+ .HasColumnType("datetime2");
+
+ b.Property("cu_email")
+ .IsRequired()
+ .HasMaxLength(100)
+ .HasColumnType("nvarchar(100)");
+
+ b.Property("cu_name")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("cu_notes")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("cu_phone")
+ .IsRequired()
+ .HasMaxLength(30)
+ .HasColumnType("nvarchar(30)");
+
+ b.Property("cu_state")
+ .IsRequired()
+ .HasMaxLength(40)
+ .HasColumnType("nvarchar(40)");
+
+ b.Property("cu_street")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("cu_zip")
+ .IsRequired()
+ .HasMaxLength(20)
+ .HasColumnType("nvarchar(20)");
+
+ b.HasKey("cu_id");
+
+ b.ToTable("customers", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Inventory", b =>
+ {
+ b.Property("in_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("in_id"));
+
+ b.Property("in_expiry")
+ .HasColumnType("datetime2");
+
+ b.Property("in_lot")
+ .IsRequired()
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("in_product_id")
+ .HasColumnType("int");
+
+ b.Property("in_qty")
+ .HasColumnType("int");
+
+ b.Property("in_received")
+ .HasColumnType("datetime2");
+
+ b.Property("in_reserved")
+ .HasColumnType("int");
+
+ b.Property("in_warehouse_id")
+ .HasColumnType("int");
+
+ b.HasKey("in_id");
+
+ b.ToTable("inventory", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Order_lines", b =>
+ {
+ b.Property("ol_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("ol_id"));
+
+ b.Property("ol_notes")
+ .IsRequired()
+ .HasMaxLength(200)
+ .HasColumnType("nvarchar(200)");
+
+ b.Property("ol_order_id")
+ .HasColumnType("int");
+
+ b.Property("ol_price")
+ .HasColumnType("decimal(12, 2)")
+ .HasColumnName("ol_price");
+
+ b.Property("ol_product_id")
+ .HasColumnType("int");
+
+ b.Property("ol_qty")
+ .HasColumnType("int");
+
+ b.Property("ol_total")
+ .HasColumnType("decimal(12, 2)")
+ .HasColumnName("ol_total");
+
+ b.HasKey("ol_id");
+
+ b.ToTable("order_lines", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Orders", b =>
+ {
+ b.Property("or_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("or_id"));
+
+ b.Property("or_created")
+ .HasColumnType("datetime2");
+
+ b.Property("or_customer_id")
+ .HasColumnType("int");
+
+ b.Property("or_date")
+ .HasColumnType("datetime2");
+
+ b.Property("or_notes")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("or_number")
+ .HasColumnType("bigint");
+
+ b.Property("or_ship_date")
+ .HasColumnType("datetime2");
+
+ b.Property("or_status")
+ .IsRequired()
+ .HasMaxLength(30)
+ .HasColumnType("nvarchar(30)");
+
+ b.Property("or_total")
+ .HasColumnType("decimal(12, 2)")
+ .HasColumnName("or_total");
+
+ b.Property("or_warehouse_id")
+ .HasColumnType("int");
+
+ b.HasKey("or_id");
+
+ b.ToTable("orders", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Products", b =>
+ {
+ b.Property("pr_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("pr_id"));
+
+ b.Property("pr_active")
+ .HasColumnType("bit");
+
+ b.Property("pr_category")
+ .IsRequired()
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("pr_code")
+ .IsRequired()
+ .HasMaxLength(30)
+ .HasColumnType("nvarchar(30)");
+
+ b.Property("pr_cost")
+ .HasColumnType("decimal(12, 2)")
+ .HasColumnName("pr_cost");
+
+ b.Property("pr_created")
+ .HasColumnType("datetime2");
+
+ b.Property("pr_name")
+ .IsRequired()
+ .HasMaxLength(100)
+ .HasColumnType("nvarchar(100)");
+
+ b.Property("pr_notes")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("pr_price")
+ .HasColumnType("decimal(12, 2)")
+ .HasColumnName("pr_price");
+
+ b.Property("pr_unit")
+ .IsRequired()
+ .HasMaxLength(20)
+ .HasColumnType("nvarchar(20)");
+
+ b.Property("pr_updated")
+ .HasColumnType("datetime2");
+
+ b.HasKey("pr_id");
+
+ b.ToTable("products", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Units_of_measure", b =>
+ {
+ b.Property("um_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("um_id"));
+
+ b.Property("um_abbrev")
+ .IsRequired()
+ .HasMaxLength(10)
+ .HasColumnType("nvarchar(10)");
+
+ b.Property("um_name")
+ .IsRequired()
+ .HasMaxLength(30)
+ .HasColumnType("nvarchar(30)");
+
+ b.HasKey("um_id");
+
+ b.ToTable("units_of_measure", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Vendors", b =>
+ {
+ b.Property("ve_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("ve_id"));
+
+ b.Property("ve_active")
+ .HasColumnType("bit");
+
+ b.Property("ve_city")
+ .IsRequired()
+ .HasMaxLength(40)
+ .HasColumnType("nvarchar(40)");
+
+ b.Property("ve_code")
+ .IsRequired()
+ .HasMaxLength(30)
+ .HasColumnType("nvarchar(30)");
+
+ b.Property("ve_contact")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("ve_name")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("ve_notes")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("ve_phone")
+ .IsRequired()
+ .HasMaxLength(30)
+ .HasColumnType("nvarchar(30)");
+
+ b.Property("ve_state")
+ .IsRequired()
+ .HasMaxLength(40)
+ .HasColumnType("nvarchar(40)");
+
+ b.Property("ve_street")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("ve_zip")
+ .IsRequired()
+ .HasMaxLength(20)
+ .HasColumnType("nvarchar(20)");
+
+ b.HasKey("ve_id");
+
+ b.ToTable("vendors", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPP.Warehouses", b =>
+ {
+ b.Property("wa_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("wa_id"));
+
+ b.Property("wa_active")
+ .HasColumnType("bit");
+
+ b.Property("wa_city")
+ .IsRequired()
+ .HasMaxLength(40)
+ .HasColumnType("nvarchar(40)");
+
+ b.Property("wa_code")
+ .IsRequired()
+ .HasMaxLength(20)
+ .HasColumnType("nvarchar(20)");
+
+ b.Property("wa_name")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("wa_state")
+ .IsRequired()
+ .HasMaxLength(40)
+ .HasColumnType("nvarchar(40)");
+
+ b.Property("wa_street")
+ .IsRequired()
+ .HasMaxLength(60)
+ .HasColumnType("nvarchar(60)");
+
+ b.Property("wa_zip")
+ .IsRequired()
+ .HasMaxLength(20)
+ .HasColumnType("nvarchar(20)");
+
+ b.HasKey("wa_id");
+
+ b.ToTable("warehouses", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPPMisc.AcidCorrection", b =>
+ {
+ b.Property("ac_id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("ac_id"));
+
+ b.Property("AcidCorrectionValue")
+ .HasColumnType("real")
+ .HasColumnName("AcidCorrection");
+
+ b.Property("PercentAcid")
+ .HasColumnType("float");
+
+ b.HasKey("ac_id");
+
+ b.ToTable("acidcorrection", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPPMisc.BrixChart", b =>
+ {
+ b.Property("id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("id"));
+
+ b.Property("Brix")
+ .HasColumnType("real");
+
+ b.Property("LbPerGallon")
+ .HasColumnType("float");
+
+ b.Property("PoundSolid")
+ .HasColumnType("float");
+
+ b.Property("RefractiveIndex")
+ .HasColumnType("float");
+
+ b.Property("SpecificGravity")
+ .HasColumnType("float");
+
+ b.HasKey("id");
+
+ b.ToTable("brixchart", "dbo");
+ });
+
+ modelBuilder.Entity("DotNetWebApp.Models.Generated.WEBAPPMisc.Webapp_allocate", b =>
+ {
+ b.Property("all_index")
+ .HasColumnType("int");
+
+ b.Property("all_id")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_id");
+
+ b.Property("all_ordernum")
+ .HasColumnType("bigint");
+
+ b.Property("all_codenum")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_userlot")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_carrier")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_chr1")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_chr2")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_chr3")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_chr4")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_chr5")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_chr6")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_chx1")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("all_chx2")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("all_chx3")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("all_chx4")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("all_chx5")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("all_chx6")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("all_date")
+ .HasColumnType("datetime2");
+
+ b.Property("all_date1")
+ .HasColumnType("datetime2");
+
+ b.Property("all_date2")
+ .HasColumnType("datetime2");
+
+ b.Property("all_date3")
+ .HasColumnType("datetime2");
+
+ b.Property("all_date4")
+ .HasColumnType("datetime2");
+
+ b.Property("all_date5")
+ .HasColumnType("datetime2");
+
+ b.Property("all_date6")
+ .HasColumnType("datetime2");
+
+ b.Property("all_dec1")
+ .HasColumnType("decimal(18, 8)")
+ .HasColumnName("all_dec1");
+
+ b.Property("all_dec3")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_dec3");
+
+ b.Property("all_dec4")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_dec4");
+
+ b.Property("all_dec5")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_dec5");
+
+ b.Property("all_dec6")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_dec6");
+
+ b.Property("all_description")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("all_gwt")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_gwt");
+
+ b.Property("all_int1")
+ .HasColumnType("int");
+
+ b.Property("all_int2")
+ .HasColumnType("int");
+
+ b.Property("all_int3")
+ .HasColumnType("int");
+
+ b.Property("all_int4")
+ .HasColumnType("int");
+
+ b.Property("all_int5")
+ .HasColumnType("int");
+
+ b.Property("all_int6")
+ .HasColumnType("int");
+
+ b.Property("all_notes")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("all_num1")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_num1");
+
+ b.Property("all_num2")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_num2");
+
+ b.Property("all_num3")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_num3");
+
+ b.Property("all_nwt")
+ .HasColumnType("decimal(18, 0)")
+ .HasColumnName("all_nwt");
+
+ b.Property("all_pick")
+ .HasColumnType("int");
+
+ b.Property