Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
131 changes: 131 additions & 0 deletions 08-power-quality-monitor/dashboard/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

secrets.toml
17 changes: 17 additions & 0 deletions 08-power-quality-monitor/dashboard/Pipfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"

[packages]
altair = "*"
pandas = "*"
streamlit = "*"
snowflake-connector-python = "==2.7.6"
matplotlib = "*"
streamlit-option-menu = "*"

[dev-packages]

[requires]
python_version = "3.9"
1,045 changes: 1,045 additions & 0 deletions 08-power-quality-monitor/dashboard/Pipfile.lock

Large diffs are not rendered by default.

110 changes: 110 additions & 0 deletions 08-power-quality-monitor/dashboard/Power_Monitor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
import streamlit as st
import pandas as pd
import numpy as np
import snowflake.connector

hours_in_year = 8760
elec_emission_factor = 0.85

# Initialize connection.
@st.cache_resource
def init_connection():
return snowflake.connector.connect(**st.secrets["snowflake"])

conn = init_connection()

power_cols = ['Serial Number', 'Device ID', 'Created', 'Apparent Power', 'Frequency', 'Last Current', 'Last Power', 'Power Factor', 'Reactive Power']
device_cols = ['Device ID', 'Serial Number', 'Maximum Current', 'Maximum Power', 'Maximum Voltage', 'Maximum Power Factor', 'Maximum Reactive Power']

# Perform query of Snowflake database.
@st.cache_data(ttl=600)
def run_query(query):
print(f'Running query: {query}')

with conn.cursor() as cur:
cur.execute(query)
return cur.fetchall()

# queries to run every time we load the page
devices_query = run_query("select * from devices_vw;")

# Get data from POWER_VW based on options.
def get_data():
if len(sort) > 0:
# escape single quotes
sort_clean = sort.replace("'", "''")
query = f'select Serial_Number AS "Serial Number", Device as "Device ID", Created, APPARENT_POWER as "Apparent Power", Frequency, LAST_CURRENT as "Last Current", LAST_POWER as "Last Power", POWER_FACTOR as "Power Factor", REACTIVE_POWER as "Reactive Power" from POWER_VW where SERIAL_NUMBER = \'{sort_clean}\' AND CREATED IS NOT NULL order by CREATED desc limit {num_records};'
else:
query = f'select Serial_Number AS "Serial Number", Device as "Device ID", Created, APPARENT_POWER as "Apparent Power", Frequency, LAST_CURRENT as "Last Current", LAST_POWER as "Last Power", POWER_FACTOR as "Power Factor", REACTIVE_POWER as "Reactive Power" from POWER_VW WHERE CREATED IS NOT NULL order by CREATED desc limit {num_records};'
return run_query(query)

with st.sidebar:
# Get list of devices and serial numbers for the Sort by Device option.
device_rows = devices_query
device_rows = sorted(device_rows, key=lambda x: x[0])

# Get keys from device_rows.
device_names = [row[1] for row in device_rows]
device_keys = [row[0] for row in device_rows]

"""
### Options
"""
num_records = st.slider('Records to fetch?', 10, 1000, 100)
sort = st.selectbox('Device',options=device_names, key=device_keys[0])
show_map = st.checkbox('Show map?', False)
show_charts = st.checkbox('Show charts?', True)
show_table_data = st.checkbox('Show table data?', False)


"""
# Blues Power Monitoring Demo
"""
devices_df = pd.DataFrame(device_rows, columns=device_cols)

st.dataframe(devices_df, hide_index=True, use_container_width=True)

if show_map:
"""
### Power Monitor Device Locations
"""
device_locations_query = run_query("select * from device_locations_vw;")
locations_cols = ['Device ID', 'Serial Number', 'latitude', 'longitude']

power_locations = pd.DataFrame(device_locations_query, columns=locations_cols)[["latitude", "longitude"]]

st.map(power_locations)

if show_table_data or show_charts:
# find the index of the selected device
device_index = device_names.index(sort)
st.write(f'### Data for {sort} ({device_keys[device_index]})')

data = get_data()
power_df = pd.DataFrame(data, columns=power_cols)

kwh_per_year = round((power_df['Last Power'][0] / 1000) * hours_in_year, 2)
last_kwh_per_year = round((power_df['Last Power'][1] / 1000) * hours_in_year, 2)

co2_per_year = round(kwh_per_year * elec_emission_factor, 2)
last_co2_per_year = round(last_kwh_per_year * elec_emission_factor, 2)

col1, col2 = st.columns(2)

col1.metric(label="Projected Annual Emissions 🌲", value=f"{co2_per_year} Kg of CO2", delta=f"{round(co2_per_year-last_co2_per_year, 2)} Kg of CO2", delta_color="inverse")
col2.metric(label="Projected Annual Power Use 🔌", value=f"{kwh_per_year} KwH", delta=f"{(kwh_per_year-last_kwh_per_year)} KwH", delta_color="inverse")

st.divider()

col1, col2, col3 = st.columns(3)

col1.metric(label="Apparent Power", value=power_df['Apparent Power'][0], delta=(power_df['Apparent Power'][0]-power_df['Apparent Power'][1]), delta_color="inverse")
col2.metric(label="Last Power", value=f"{power_df['Last Power'][0]} Watts", delta=f"{(power_df['Last Power'][0]-power_df['Last Power'][1])} Watts", delta_color="inverse")
col3.metric(label="Reactive Power", value=f"{power_df['Reactive Power'][0]} VAR", delta=f"{round(power_df['Reactive Power'][0]-power_df['Reactive Power'][1] ,2)} VAR", delta_color="inverse")

if show_table_data:
st.dataframe(power_df, hide_index=True)

if show_charts:
st.area_chart(data=power_df[['Apparent Power', 'Last Power','Created']], x='Created', y=['Apparent Power', 'Last Power'])
st.area_chart(data=power_df[['Reactive Power','Created']], x='Created', y=['Reactive Power'])
18 changes: 18 additions & 0 deletions 08-power-quality-monitor/dashboard/pages/About.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import streamlit as st

st.markdown(
"""
# Blues Power Monitoring Demo!

This demo pulls data from Snowflake that was routed from [this Notehub project](https://notehub.io/project/app:eb43a9ae-0b78-4508-93c2-d39dc511fb70).

The application in question is a Notecard and Notecarrier-F-based device. The Swan-powered host
application takes readings from a connected Dr. Wattson device that monitors
power through a connected supply and sends those readings to the Notecard.

Raw JSON is routed to Snowflake using the Snowflake SQL API and transformed into
a structured data tables using views, with a view for `power.qo`, `_session.qo`
events.

"""
)
5 changes: 5 additions & 0 deletions 08-power-quality-monitor/dashboard/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
altair
pandas
streamlit==1.23.0
snowflake-connector-python==2.7.6
matplotlib
Loading