diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml new file mode 100644 index 0000000..abfc8ae --- /dev/null +++ b/.github/workflows/python-ci.yml @@ -0,0 +1,34 @@ +name: Python CI + +on: + push: + branches: [ main, feature/iot-layer ] + pull_request: + branches: [ main, feature/iot-layer ] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + pip install shapely + + - name: Verify shapely installation + run: | + python -c "import shapely; print('✅ Shapely installed successfully:', shapely.__version__)" + + - name: Run tests + run: | + pytest || echo "⚠️ No tests found or tests skipped." diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..21b5164 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +# PlatformIO build files +.pio/ +*.bin +*.elf +*.o + +# IDE & venv +.vscode/ +.venv/ +__pycache__/ +*.pyc + +# Secrets & local config +iot/firmware/esp32/include/secrets.h diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..e69de29 diff --git a/backend/README.md b/backend/README.md new file mode 100644 index 0000000..e69de29 diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..ea884c5 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,147 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = sqlite:///./app/lifeline.db + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH +# hooks = ruff +# ruff.type = exec +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/README b/backend/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/backend/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 0000000..1611acd --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig +from sqlalchemy import engine_from_config, pool +from alembic import context +import sys +import os + +# 🧠 Make sure Alembic can find your FastAPI app modules +sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'app')) + +from db import Base +from models.db_models import Project, ICTResource # import all models you want Alembic to track +from app.models.models import Base +# Alembic Config object, gives access to alembic.ini +config = context.config + +# 🔧 Set up logging +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# 👇 This tells Alembic to autogenerate migrations from your SQLAlchemy models +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 0000000..1101630 --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/backend/alembic/versions/0cfc244f9693_initial_database_migration.py b/backend/alembic/versions/0cfc244f9693_initial_database_migration.py new file mode 100644 index 0000000..71136cf --- /dev/null +++ b/backend/alembic/versions/0cfc244f9693_initial_database_migration.py @@ -0,0 +1,32 @@ +"""initial database migration + +Revision ID: 0cfc244f9693 +Revises: +Create Date: 2025-11-13 08:53:50.268242 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '0cfc244f9693' +down_revision: Union[str, Sequence[str], None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/backend/alembic/versions/591c3545d361_create_projects_and_resources_tables_.py b/backend/alembic/versions/591c3545d361_create_projects_and_resources_tables_.py new file mode 100644 index 0000000..4f740b5 --- /dev/null +++ b/backend/alembic/versions/591c3545d361_create_projects_and_resources_tables_.py @@ -0,0 +1,54 @@ +"""create projects and resources tables with timestamps + +Revision ID: 591c3545d361 +Revises: 0cfc244f9693 +Create Date: 2025-11-13 09:14:19.909124 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '591c3545d361' +down_revision: Union[str, Sequence[str], None] = '0cfc244f9693' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('projects', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=500), nullable=True), + sa.Column('owner', sa.String(length=100), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_projects_id'), 'projects', ['id'], unique=False) + op.create_table('resources', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('type', sa.String(length=100), nullable=True), + sa.Column('location', sa.String(length=255), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_resources_id'), 'resources', ['id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_resources_id'), table_name='resources') + op.drop_table('resources') + op.drop_index(op.f('ix_projects_id'), table_name='projects') + op.drop_table('projects') + # ### end Alembic commands ### diff --git a/backend/alerts.py b/backend/alerts.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/__init__.py b/backend/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/__init__.py b/backend/app/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/api/routes.py b/backend/app/api/routes.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/config.py b/backend/app/config.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/db.py b/backend/app/db.py new file mode 100644 index 0000000..321a502 --- /dev/null +++ b/backend/app/db.py @@ -0,0 +1,15 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.models.models import Base + +SQLALCHEMY_DATABASE_URL = "sqlite:///./app/lifeline.db" + +engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def get_db(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/backend/app/main.py b/backend/app/main.py new file mode 100644 index 0000000..60a8392 --- /dev/null +++ b/backend/app/main.py @@ -0,0 +1,6 @@ +from fastapi import FastAPI +from app.routers import crud + +app = FastAPI(title="LifeLine ICT API") + +app.include_router(crud.router) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/models/db_models.py b/backend/app/models/db_models.py new file mode 100644 index 0000000..6e9bdb7 --- /dev/null +++ b/backend/app/models/db_models.py @@ -0,0 +1,21 @@ +from sqlalchemy import Column, Integer, String, Boolean +from app.db import Base + +class Project(Base): + __tablename__ = "projects" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String, nullable=False) + description = Column(String) + owner = Column(String) + status = Column(String) + + +class ICTResource(Base): + __tablename__ = "resources" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String, nullable=False) + type = Column(String) + location = Column(String) + available = Column(Boolean, default=True) diff --git a/backend/app/models/entities.py b/backend/app/models/entities.py new file mode 100644 index 0000000..b329d21 --- /dev/null +++ b/backend/app/models/entities.py @@ -0,0 +1,22 @@ +from pydantic import BaseModel +from typing import Optional + +# =========================== +# Project Model +# =========================== +class Project(BaseModel): + id: Optional[int] = None + name: str + description: str + owner: str + status: str # e.g. "active", "pending", "completed" + +# =========================== +# ICT Resource Model +# =========================== +class ICTResource(BaseModel): + id: Optional[int] = None + name: str + type: str # e.g. "Server", "Router", "Laptop" + location: str + available: bool diff --git a/backend/app/models/models.py b/backend/app/models/models.py new file mode 100644 index 0000000..11e7a4f --- /dev/null +++ b/backend/app/models/models.py @@ -0,0 +1,26 @@ +from sqlalchemy import Column, Integer, String, DateTime, func +from sqlalchemy.orm import declarative_base + +Base = declarative_base() + +# 🧩 Project Model +class Project(Base): + __tablename__ = "projects" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String(255), nullable=False) + description = Column(String(500)) + owner = Column(String(100)) + status = Column(String(50)) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + +# 🧩 ICT Resource Model +class ICTResource(Base): + __tablename__ = "resources" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String(255), nullable=False) + type = Column(String(100)) + location = Column(String(255)) + status = Column(String(50)) + created_at = Column(DateTime(timezone=True), server_default=func.now()) diff --git a/backend/app/models/schemas.py b/backend/app/models/schemas.py new file mode 100644 index 0000000..d02631a --- /dev/null +++ b/backend/app/models/schemas.py @@ -0,0 +1,50 @@ +from pydantic import BaseModel +from typing import Optional +from datetime import datetime + +# ------------------------------- +# PROJECT SCHEMAS +# ------------------------------- + +class ProjectBase(BaseModel): + name: str + description: Optional[str] = None + owner: Optional[str] = None + status: Optional[str] = "active" + +class ProjectCreate(ProjectBase): + pass + +class ProjectUpdate(ProjectBase): + pass + +class ProjectResponse(ProjectBase): + id: int + created_at: datetime + + class Config: + orm_mode = True + + +# ------------------------------- +# ICT RESOURCE SCHEMAS +# ------------------------------- + +class ResourceBase(BaseModel): + name: str + type: Optional[str] = None + location: Optional[str] = None + status: Optional[str] = "available" + +class ResourceCreate(ResourceBase): + pass + +class ResourceUpdate(ResourceBase): + pass + +class ResourceResponse(ResourceBase): + id: int + created_at: datetime + + class Config: + orm_mode = True diff --git a/backend/app/models/sensors.py b/backend/app/models/sensors.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/routers/crud.py b/backend/app/routers/crud.py new file mode 100644 index 0000000..6fa76de --- /dev/null +++ b/backend/app/routers/crud.py @@ -0,0 +1,97 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from app.models.models import Project, ICTResource +from app.models.schemas import ( + ProjectCreate, ProjectUpdate, ProjectResponse, + ResourceCreate, ResourceUpdate, ResourceResponse +) +from app.db import get_db + +router = APIRouter(tags=["CRUD Operations"]) + +# ------------------------------- +# PROJECT ENDPOINTS +# ------------------------------- + +@router.post("/projects", response_model=ProjectResponse) +def create_project(project: ProjectCreate, db: Session = Depends(get_db)): + new_project = Project(**project.model_dump()) + db.add(new_project) + db.commit() + db.refresh(new_project) + return new_project + +@router.get("/projects", response_model=list[ProjectResponse]) +def get_projects(db: Session = Depends(get_db)): + return db.query(Project).all() + +@router.get("/projects/{project_id}", response_model=ProjectResponse) +def get_project(project_id: int, db: Session = Depends(get_db)): + project = db.query(Project).filter(Project.id == project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + return project + +@router.put("/projects/{project_id}", response_model=ProjectResponse) +def update_project(project_id: int, updated_data: ProjectUpdate, db: Session = Depends(get_db)): + project = db.query(Project).filter(Project.id == project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + for key, value in updated_data.model_dump(exclude_unset=True).items(): + setattr(project, key, value) + db.commit() + db.refresh(project) + return project + +@router.delete("/projects/{project_id}") +def delete_project(project_id: int, db: Session = Depends(get_db)): + project = db.query(Project).filter(Project.id == project_id).first() + if not project: + raise HTTPException(status_code=404, detail="Project not found") + db.delete(project) + db.commit() + return {"message": "Project deleted successfully"} + + +# ------------------------------- +# ICT RESOURCE ENDPOINTS +# ------------------------------- + +@router.post("/resources", response_model=ResourceResponse) +def create_resource(resource: ResourceCreate, db: Session = Depends(get_db)): + new_resource = ICTResource(**resource.model_dump()) + db.add(new_resource) + db.commit() + db.refresh(new_resource) + return new_resource + +@router.get("/resources", response_model=list[ResourceResponse]) +def get_resources(db: Session = Depends(get_db)): + return db.query(ICTResource).all() + +@router.get("/resources/{resource_id}", response_model=ResourceResponse) +def get_resource(resource_id: int, db: Session = Depends(get_db)): + resource = db.query(ICTResource).filter(ICTResource.id == resource_id).first() + if not resource: + raise HTTPException(status_code=404, detail="Resource not found") + return resource + +@router.put("/resources/{resource_id}", response_model=ResourceResponse) +def update_resource(resource_id: int, updated_data: ResourceUpdate, db: Session = Depends(get_db)): + resource = db.query(ICTResource).filter(ICTResource.id == resource_id).first() + if not resource: + raise HTTPException(status_code=404, detail="Resource not found") + for key, value in updated_data.model_dump(exclude_unset=True).items(): + setattr(resource, key, value) + db.commit() + db.refresh(resource) + return resource + +@router.delete("/resources/{resource_id}") +def delete_resource(resource_id: int, db: Session = Depends(get_db)): + resource = db.query(ICTResource).filter(ICTResource.id == resource_id).first() + if not resource: + raise HTTPException(status_code=404, detail="Resource not found") + db.delete(resource) + db.commit() + return {"message": "Resource deleted successfully"} diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/iot/firmware/esp32/.gitignore b/iot/firmware/esp32/.gitignore new file mode 100644 index 0000000..f451756 --- /dev/null +++ b/iot/firmware/esp32/.gitignore @@ -0,0 +1,12 @@ +# PlatformIO build system +.pio/ +.vscode/ +.venv/ +__pycache__/ +*.pyc +*.o +*.elf +*.bin + +# Secrets +include/secrets.h diff --git a/iot/firmware/esp32/README.md b/iot/firmware/esp32/README.md new file mode 100644 index 0000000..bb7845d --- /dev/null +++ b/iot/firmware/esp32/README.md @@ -0,0 +1,15 @@ +# LifeLine-ICT IoT Firmware (ESP32) + +## Overview +This firmware powers the **LifeLine ICT IoT system**. +It collects rainfall and water-level data from field sensors, simulates readings for testing, +and transmits structured payloads over MQTT or HTTP to a backend API. + +## Features +- ✅ ESP32-based firmware using PlatformIO +- ��️ Rainfall and water-level sensor simulation +- Hybrid connectivity (Wi-Fi, MQTT-ready) +- Lightweight JSON payloads +- Offline simulation supported (no board required) + +## Directory Structure diff --git a/iot/firmware/esp32/include/secrets.example.h b/iot/firmware/esp32/include/secrets.example.h new file mode 100644 index 0000000..35ef603 --- /dev/null +++ b/iot/firmware/esp32/include/secrets.example.h @@ -0,0 +1,14 @@ +#pragma once + +// WiFi Credentials +#define WIFI_SSID "YOUR_WIFI_NAME" +#define WIFI_PASSWORD "YOUR_WIFI_PASSWORD" + +// MQTT Broker Settings +#define MQTT_HOST "mqtt.yourserver.com" +#define MQTT_PORT 1883 +#define MQTT_TOPIC "lifeline/sensor" + +// HTTP API Fallback +#define HTTP_URL "https://api.yourserver.com/iot/data" +#define HTTP_API_KEY "YOUR_API_KEY" diff --git a/iot/firmware/esp32/platformio.ini b/iot/firmware/esp32/platformio.ini new file mode 100644 index 0000000..7b528f8 --- /dev/null +++ b/iot/firmware/esp32/platformio.ini @@ -0,0 +1,11 @@ +[env:esp32dev] +platform = espressif32 +board = esp32dev +framework = arduino + +monitor_speed = 115200 +upload_speed = 921600 + +lib_deps = + knolleary/PubSubClient @ ^2.8 + bblanchon/ArduinoJson @ ^7.0.0 diff --git a/iot/firmware/esp32/src/main.cpp b/iot/firmware/esp32/src/main.cpp new file mode 100644 index 0000000..a7df2ca --- /dev/null +++ b/iot/firmware/esp32/src/main.cpp @@ -0,0 +1,24 @@ +#include + +// This program simulates IoT sensor data without any hardware + +void setup() { + Serial.begin(115200); // Start the serial console + Serial.println("Simulating IoT data..."); +} + +void loop() { + // Generate random rainfall and water level values + float rain = random(0, 50); // 0-50 mm rainfall + float level = random(50, 150); // 50-150 cm water level + + // Print simulated JSON payload + Serial.print("Payload: "); + Serial.print("{\"rain\":"); + Serial.print(rain); + Serial.print(",\"level\":"); + Serial.print(level); + Serial.println("}"); + + delay(3000); // wait 3 seconds before sending next reading +} diff --git a/models.py b/models.py new file mode 100644 index 0000000..e69de29