Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ output/*
mosquitto/data/*
postgres/data/*
config.json
config.toml
spa

.claude/
Expand Down
80 changes: 33 additions & 47 deletions POSTGRES.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,25 +13,22 @@ MeshInfo now supports PostgreSQL as an alternative storage backend alongside JSO

## Configuration

Add the following to your `config.json`:

```json
{
"storage": {
"read_from": "json",
"write_to": ["json", "postgres"],
"postgres": {
"enabled": false,
"host": "postgres",
"port": 5432,
"database": "meshinfo",
"username": "postgres",
"password": "password",
"min_pool_size": 5,
"max_pool_size": 20
}
}
}
Add the following to your `config.toml`:

```toml
[storage]
read_from = "json"
write_to = ["json", "postgres"]

[storage.postgres]
enabled = false
host = "postgres"
port = 5432
database = "meshinfo"
username = "postgres"
password = "password"
min_pool_size = 5
max_pool_size = 20
```

### Configuration Options
Expand Down Expand Up @@ -68,15 +65,10 @@ To migrate existing JSON data to PostgreSQL:

### Step 1: Enable PostgreSQL

Update `config.json`:
```json
{
"storage": {
"postgres": {
"enabled": true
}
}
}
Update `config.toml`:
```toml
[storage.postgres]
enabled = true
```

### Step 2: Start PostgreSQL
Expand All @@ -102,17 +94,14 @@ The script will:

### Step 4: Enable Dual-Write

Update `config.json` to write to both backends:
```json
{
"storage": {
"read_from": "json",
"write_to": ["json", "postgres"],
"postgres": {
"enabled": true
}
}
}
Update `config.toml` to write to both backends:
```toml
[storage]
read_from = "json"
write_to = ["json", "postgres"]

[storage.postgres]
enabled = true
```

### Step 5: Verify Data Consistency
Expand All @@ -124,13 +113,10 @@ Update `config.json` to write to both backends:
### Step 6: Switch to PostgreSQL Reads

Once confident in data consistency, switch to direct PostgreSQL queries:
```json
{
"storage": {
"read_from": "postgres",
"write_to": ["json", "postgres"]
}
}
```toml
[storage]
read_from = "postgres"
write_to = ["json", "postgres"]
```

**Important**: When `read_from: "postgres"`, the API queries PostgreSQL directly without loading data into memory. This provides:
Expand Down Expand Up @@ -222,7 +208,7 @@ Monitor these logs to ensure healthy operation.

If PostgreSQL connection fails:
1. Check that PostgreSQL container is running
2. Verify connection settings in config.json
2. Verify connection settings in config.toml
3. Check network connectivity
4. Review PostgreSQL logs

Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ cd meshinfo

##### Edit Configuration

1. Copy and then edit the `config.json.sample` to `config.json`.
1. Copy and then edit the `config.toml.sample` to `config.toml` (or `config.json.sample` to `config.json` for legacy JSON format).
2. Copy `Caddyfile.sample` to `Caddyfile` then edit the `Caddyfile` and be sure it is setup for your hostname (FQDN if requiring Let's Encrypt cert to be generated) and your email address for the TLS line.

- Caddy will request a cert of the FQDN, be sure to specify any subdomain. For example: `https://meshinfo.domain.com`.
Expand Down
80 changes: 56 additions & 24 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
import datetime
import json
import logging
import os
import tomllib
import uuid
from copy import deepcopy
from typing import Any
Expand Down Expand Up @@ -414,19 +416,19 @@ def check(result: str | None) -> None:
"\n"
"To fix this:\n"
" 1. Add a PostgreSQL service to your docker-compose.yml (see docker-compose.yml.sample)\n"
" 2. Enable PostgreSQL in config.json:\n"
' "storage": {\n'
' "read_from": "postgres",\n'
' "write_to": ["postgres"],\n'
' "postgres": {\n'
' "enabled": true,\n'
' "host": "postgres",\n'
' "port": 5432,\n'
' "database": "meshinfo",\n'
' "username": "postgres",\n'
' "password": "your_password"\n'
" }\n"
" }\n"
" 2. Enable PostgreSQL in config.toml:\n"
" [storage]\n"
' read_from = "postgres"\n'
' write_to = ["postgres"]\n'
"\n"
" [storage.postgres]\n"
" enabled = true\n"
' host = "postgres"\n'
" port = 5432\n"
' database = "meshinfo"\n'
' username = "postgres"\n'
' password = "your_password"\n'
"\n"
" 3. If migrating from JSON, run: docker exec -it meshinfo-meshinfo-1 python3 scripts/migrate_json_to_postgres.py\n"
" 4. Restart MeshInfo\n"
)
Expand Down Expand Up @@ -493,11 +495,23 @@ class Config:
@classmethod
def load(cls) -> dict:
"""
Load config.json, merge with defaults, validate, and return
the final config dict.
Load config.toml (or config.json as fallback), merge with defaults,
validate, and return the final config dict.
"""
# Load user config
user_config = cls._load_from_file("config.json")
# Load user config: prefer TOML, fall back to JSON
if os.path.isfile("config.toml"):
user_config = cls._load_toml("config.toml")
elif os.path.isfile("config.json"):
logger.warning(
"Loading config.json (JSON format is deprecated). "
"Please migrate to config.toml. See config.toml.sample for the format."
)
user_config = cls._load_json("config.json")
else:
raise ConfigValidationError(
"No config file found. "
"Copy config.toml.sample to config.toml and edit it for your deployment."
)

# Merge: defaults first, user overrides on top
config = _deep_merge(DEFAULT_CONFIG, user_config)
Expand All @@ -515,7 +529,7 @@ def load(cls) -> dict:

# Version info (optional, best-effort)
try:
version_info = cls._load_from_file("version-info.json")
version_info = cls._load_json("version-info.json")
if version_info is not None:
config["server"]["version_info"] = version_info
except (ConfigValidationError, FileNotFoundError, json.JSONDecodeError):
Expand All @@ -532,15 +546,30 @@ def load(cls) -> dict:
return config

@classmethod
def _load_from_file(cls, path: str) -> dict:
"""Load and parse a JSON file, with a clear error on failure."""
def _load_toml(cls, path: str) -> dict:
"""Load and parse a TOML file, with a clear error on failure."""
try:
with open(path, "rb") as f:
return tomllib.load(f)
except FileNotFoundError:
raise ConfigValidationError(
f"Config file '{path}' not found. "
f"Copy config.toml.sample to config.toml and edit it for your deployment."
)
except tomllib.TOMLDecodeError as e:
raise ConfigValidationError(
f"Config file '{path}' contains invalid TOML: {e}"
)

@classmethod
def _load_json(cls, path: str) -> dict:
"""Load and parse a JSON file (legacy format)."""
try:
with open(path, "r", encoding="utf-8") as f:
return json.load(f)
except FileNotFoundError:
raise ConfigValidationError(
f"Config file '{path}' not found. "
f"Copy config.json.sample to config.json and edit it for your deployment."
f"Config file '{path}' not found."
)
except json.JSONDecodeError as e:
raise ConfigValidationError(
Expand All @@ -549,8 +578,11 @@ def _load_from_file(cls, path: str) -> dict:

@classmethod
def load_from_file(cls, path: str) -> dict:
"""Public alias for backward compatibility."""
return cls._load_from_file(path)
"""Load a config file by extension (TOML or JSON)."""
from pathlib import Path
if Path(path).suffix == ".toml":
return cls._load_toml(path)
return cls._load_json(path)

@classmethod
def cleanse(cls, config: dict) -> dict:
Expand Down
Loading