-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
223 lines (184 loc) · 8.93 KB
/
app.py
File metadata and controls
223 lines (184 loc) · 8.93 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
import streamlit as st
import pandas as pd
import psycopg2
import altair as alt
import os
# To install required libraries:
# pip install streamlit pandas psycopg2-binary altair
def get_data_from_db():
"""
Connects to the PostgreSQL database and fetches data from the tastytrades table.
"""
# --- Configuration ---
DB_NAME = "tastytrades"
DB_USER = "daeron"
DB_HOST = "localhost"
try:
# Connect to PostgreSQL without a password.
conn = psycopg2.connect(
dbname=DB_NAME,
user=DB_USER,
host=DB_HOST
)
print("Database connected without a password")
# Use pandas to read the SQL query results directly into a DataFrame
df = pd.read_sql("SELECT * FROM tastytrades", conn)
return df
except Exception as e:
st.error(f"❌ Error: Could not connect to the database or fetch data. {e}")
return pd.DataFrame() # Return an empty DataFrame on error
finally:
try:
if 'conn' in locals() and conn:
conn.close()
print("Database connection closed")
except NameError:
# Handle the case where conn was never created
pass
# --- Streamlit App UI ---
st.set_page_config(page_title="Tastytrade Transactions", layout="wide")
st.title("Tastytrade Transaction History")
st.markdown("This app displays the transaction data loaded from your CSV into the PostgreSQL database.")
# Define the required columns for the app
required_columns = ['underlying_symbol', 'trade_date', 'pl_total']
df_trades = pd.DataFrame()
# A dictionary to map the expected column names from the CSV to the desired names
column_mapping = {
'Date': 'trade_date',
'Underlying Symbol': 'underlying_symbol',
'Total': 'pl_total'
}
# --- Sidebar Filters ---
st.sidebar.header("Data Source")
uploaded_file = st.sidebar.file_uploader("Upload your own CSV file", type=['csv'])
if uploaded_file is not None:
try:
df_trades = pd.read_csv(uploaded_file)
# Rename columns to match the app's requirements
df_trades = df_trades.rename(columns=column_mapping)
st.success("✅ CSV file uploaded and loaded successfully.")
except Exception as e:
st.error(f"❌ Error: Could not load the uploaded file. {e}")
df_trades = pd.DataFrame()
else:
df_trades = get_data_from_db()
if not df_trades.empty:
st.success(f"✅ Loaded {len(df_trades)} rows from the database.")
# Data processing
if not df_trades.empty:
# Ensure date columns are in the correct datetime format
if 'trade_date' in df_trades.columns:
df_trades['trade_date'] = pd.to_datetime(df_trades['trade_date'], errors='coerce')
if 'expiration_date' in df_trades.columns:
df_trades['expiration_date'] = pd.to_datetime(df_trades['expiration_date'], errors='coerce')
# Convert pl_total to a numeric type, coercing errors
if 'pl_total' in df_trades.columns:
df_trades['pl_total'] = pd.to_numeric(df_trades['pl_total'], errors='coerce')
# Drop rows where critical data is missing
df_trades.dropna(subset=required_columns, inplace=True)
# Check if the fetched DataFrame has all the required columns after cleaning
if not df_trades.empty and all(col in df_trades.columns for col in required_columns):
# --- Sidebar Filters ---
st.sidebar.header("Filter Transactions")
# Get unique underlying symbols for the multiselect
symbols = sorted(df_trades['underlying_symbol'].unique())
selected_symbols = st.sidebar.multiselect(
"Select Underlying Symbol(s)",
options=symbols,
default=symbols
)
# Date range selection
min_date = df_trades['trade_date'].min().date()
max_date = df_trades['trade_date'].max().date()
start_date, end_date = st.sidebar.date_input(
"Select Date Range",
value=(min_date, max_date),
min_value=min_date,
max_value=max_date
)
# Filter the DataFrame based on selections
filtered_df = df_trades[
(df_trades['underlying_symbol'].isin(selected_symbols)) &
(df_trades['trade_date'].dt.date >= start_date) &
(df_trades['trade_date'].dt.date <= end_date)
]
if not filtered_df.empty:
# --- Summary Statistics ---
st.subheader("Summary Statistics")
col1, col2, col3, col4 = st.columns(4)
total_pl = filtered_df['pl_total'].sum()
total_trades = len(filtered_df)
avg_pl_per_trade = filtered_df['pl_total'].mean()
winning_trades = len(filtered_df[filtered_df['pl_total'] > 0])
win_rate = (winning_trades / total_trades) * 100 if total_trades > 0 else 0
with col1:
st.metric(label="Total P/L", value=f"${total_pl:.2f}")
with col2:
st.metric(label="Total Trades", value=total_trades)
with col3:
st.metric(label="Avg P/L per Trade", value=f"${avg_pl_per_trade:.2f}")
with col4:
st.metric(label="Win Rate", value=f"{win_rate:.2f}%")
# --- Cumulative P/L Over Time Chart ---
st.subheader("Cumulative P/L Over Time")
# Create a DataFrame for the cumulative P/L
cumulative_pl_df = filtered_df.copy()
# Sort by date to ensure the cumulative sum is correct
cumulative_pl_df.sort_values('trade_date', inplace=True)
# Calculate the running total P/L
cumulative_pl_df['cumulative_pl'] = cumulative_pl_df['pl_total'].cumsum()
# Create the Altair line chart for cumulative P/L
chart_cumulative_pl = alt.Chart(cumulative_pl_df).mark_line().encode(
x=alt.X('trade_date', title="Date"),
y=alt.Y('cumulative_pl', title="Cumulative P/L"),
tooltip=[alt.Tooltip('trade_date', title='Date'), alt.Tooltip('cumulative_pl', title='Cumulative P/L', format='$.2f')]
).properties(
title='Cumulative P/L Over Selected Date Range'
)
st.altair_chart(chart_cumulative_pl, use_container_width=True)
# --- P/L per Day of the Week Chart ---
st.subheader("Daily Profit & Loss (P/L)")
# Create a DataFrame with all weekdays to ensure they are all displayed
day_order = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday']
all_days_df = pd.DataFrame({'day_of_week': day_order})
# Group by day of week and sum P/L
daily_pl = filtered_df.groupby(filtered_df['trade_date'].dt.day_name())['pl_total'].sum().reset_index()
daily_pl.columns = ['day_of_week', 'pl_total']
# Merge the grouped data with the all_days_df to include days with no trades
daily_pl_complete = pd.merge(all_days_df, daily_pl, on='day_of_week', how='left').fillna(0)
# Ensure the correct ordering of days for the chart
daily_pl_complete['day_of_week'] = pd.Categorical(daily_pl_complete['day_of_week'], categories=day_order, ordered=True)
# Create the Altair chart with color coding
chart_pl = alt.Chart(daily_pl_complete).mark_bar().encode(
x=alt.X('day_of_week', sort=day_order, title="Day of the Week"),
y=alt.Y('pl_total', title="Total P/L"),
color=alt.condition(
alt.datum.pl_total > 0,
alt.value("green"), # The positive color
alt.value("red") # The negative color
),
tooltip=['day_of_week', 'pl_total']
).properties(
title='Total P/L per Day of the Week'
)
st.altair_chart(chart_pl, use_container_width=True)
# --- Trades per Day Chart ---
st.subheader("Trades Over Time")
# Group by date and count trades
trades_over_time = filtered_df.groupby('trade_date').size().reset_index(name='number_of_trades')
# Create a line chart
chart_trades = alt.Chart(trades_over_time).mark_line().encode(
x=alt.X('trade_date', title="Date"),
y=alt.Y('number_of_trades', title="Number of Trades"),
tooltip=[alt.Tooltip('trade_date', title='Date'), 'number_of_trades']
).properties(
title='Number of Trades over Selected Date Range'
)
st.altair_chart(chart_trades, use_container_width=True)
else:
st.warning("⚠️ No data available for the selected filters.")
else:
st.error("❌ The uploaded CSV file is missing one or more required columns. Please check your file.")
st.info(f"Required columns: {required_columns}")
else:
st.warning("Please upload a CSV file or ensure your database is running.")