Skip to content

Commit 213f076

Browse files
committed
Refactor enrichment to use google-genai SDK and global location for Gemini 3 Flash
1 parent 3542910 commit 213f076

12 files changed

Lines changed: 1099 additions & 146 deletions

File tree

.gcloudignore

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# This file specifies files that are *not* uploaded to Google Cloud
2+
# using gcloud. It follows the same syntax as .gitignore, with the addition of
3+
# "#!include" directives (which insert the entries of the given .gitignore-style
4+
# file at that point).
5+
#
6+
# For more information, run:
7+
# $ gcloud topic gcloudignore
8+
#
9+
.gcloudignore
10+
# If you would like to upload your .git directory, .gitignore file or files
11+
# from your .gitignore file, remove the corresponding line
12+
# below:
13+
.git
14+
.gitignore
15+
16+
node_modules
17+
#!include:.gitignore
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
[
2+
{"name": "scan_date", "type": "DATE", "mode": "REQUIRED"},
3+
{"name": "scan_timestamp", "type": "TIMESTAMP", "mode": "REQUIRED"},
4+
{"name": "ticker", "type": "STRING", "mode": "REQUIRED"},
5+
{"name": "direction", "type": "STRING"},
6+
{"name": "overnight_score", "type": "INTEGER"},
7+
{"name": "price_change_pct", "type": "FLOAT"},
8+
{"name": "underlying_price", "type": "FLOAT"},
9+
{"name": "day_volume", "type": "INTEGER"},
10+
{"name": "call_dollar_volume", "type": "FLOAT"},
11+
{"name": "put_dollar_volume", "type": "FLOAT"},
12+
{"name": "total_options_dollar_volume", "type": "FLOAT"},
13+
{"name": "call_vol_oi_ratio", "type": "FLOAT"},
14+
{"name": "put_vol_oi_ratio", "type": "FLOAT"},
15+
{"name": "call_active_strikes", "type": "INTEGER"},
16+
{"name": "put_active_strikes", "type": "INTEGER"},
17+
{"name": "call_uoa_depth", "type": "FLOAT"},
18+
{"name": "put_uoa_depth", "type": "FLOAT"},
19+
{"name": "signals", "type": "STRING", "mode": "REPEATED"},
20+
{"name": "recommended_contract", "type": "STRING"},
21+
{"name": "recommended_strike", "type": "FLOAT"},
22+
{"name": "recommended_expiration", "type": "DATE"},
23+
{"name": "recommended_dte", "type": "INTEGER"},
24+
{"name": "recommended_mid_price", "type": "FLOAT"},
25+
{"name": "recommended_spread_pct", "type": "FLOAT"},
26+
{"name": "contract_score", "type": "FLOAT"},
27+
{"name": "inserted_at", "type": "TIMESTAMP"}
28+
]

overnight-edge-enrichment/PROMPT-SITE-POLISH-SEO-PAGES.md

Lines changed: 331 additions & 0 deletions
Large diffs are not rendered by default.

src/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
# src/__init__.py
22

3-
from . import enrichment, ingestion, serving, utils
3+
# from . import enrichment, ingestion, serving, utils

src/enrichment/core/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
# src/enrichment/core/__init__.py
22

3-
from . import pipelines
3+
# from . import pipelines

src/enrichment/core/clients/polygon_client.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import logging
22
import time
3+
from datetime import date
34
import requests
45
from requests.adapters import HTTPAdapter
56
from tenacity import retry, stop_after_attempt, wait_exponential
@@ -140,3 +141,80 @@ def fetch_option_contract_snapshot(
140141
"Option contract snapshot failed for %s: %s", contract_symbol, e
141142
)
142143
return None
144+
145+
def fetch_all_tickers_snapshot(self) -> list[dict]:
146+
"""
147+
Get snapshot for ALL stock tickers in one call.
148+
/v2/snapshot/locale/us/markets/stocks/tickers
149+
"""
150+
url = f"{self.BASE}/v2/snapshot/locale/us/markets/stocks/tickers"
151+
try:
152+
res = self._get(url)
153+
return res.get("tickers") or []
154+
except Exception as e:
155+
logging.error("All-tickers snapshot failed: %s", e)
156+
return []
157+
158+
def fetch_underlying_price(self, ticker: str) -> float | None:
159+
"""
160+
Fetch current/latest price for a ticker (for backfilling options data).
161+
"""
162+
try:
163+
snap = self.fetch_stock_snapshot(ticker)
164+
if not snap:
165+
return None
166+
t = snap.get("ticker") or {}
167+
# 1. lastTrade.p
168+
lt = t.get("lastTrade") or {}
169+
p = lt.get("p")
170+
if p is not None: return float(p)
171+
# 2. day.c
172+
day = t.get("day") or {}
173+
c = day.get("c")
174+
if c is not None: return float(c)
175+
# 3. prevDay.c
176+
pd = t.get("prevDay") or {}
177+
pc = pd.get("c")
178+
if pc is not None: return float(pc)
179+
180+
return self._extract_underlying_price(t)
181+
except Exception:
182+
return None
183+
184+
def fetch_options_chain(self, ticker: str, max_days: int = 90) -> list[dict]:
185+
"""
186+
Snapshot all active option contracts for an underlying (paged).
187+
"""
188+
from datetime import timedelta
189+
190+
url = f"{self.BASE}/v3/snapshot/options/{ticker}"
191+
params = {"limit": 250}
192+
out: list[dict] = []
193+
today = date.today()
194+
max_exp = today + timedelta(days=max_days)
195+
196+
while True:
197+
j = self._get(url, params=params)
198+
for r in j.get("results") or []:
199+
exp = (r.get("details") or {}).get("expiration_date")
200+
try:
201+
if exp and not (today <= date.fromisoformat(exp) <= max_exp):
202+
continue
203+
except Exception:
204+
continue
205+
out.append(self._map_options_result(r))
206+
207+
next_url = j.get("next_url")
208+
if not next_url:
209+
break
210+
url, params = next_url, {}
211+
212+
# Backfill underlying price if missing
213+
if out and any(o.get("underlying_price") is None for o in out):
214+
upx = self.fetch_underlying_price(ticker)
215+
if isinstance(upx, (int, float)):
216+
for o in out:
217+
if o.get("underlying_price") is None:
218+
o["underlying_price"] = float(upx)
219+
220+
return out

src/enrichment/core/config.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,10 @@
2727
CHAIN_TABLE = OPTIONS_CHAIN_TABLE_ID
2828
CAND_TABLE = OPTIONS_CANDIDATES_TABLE_ID
2929

30+
# Overnight Scanner
31+
OVERNIGHT_SIGNALS_TABLE = f"{PROJECT_ID}.{BIGQUERY_DATASET}.overnight_signals"
32+
OVERNIGHT_UNIVERSE_FILE = os.getenv("UNIVERSE_FILE", "overnight-universe.txt")
33+
3034
# --- Score Aggregator: Regime-Aware Weighting ---
3135

3236
# 1. EVENT REGIME: Catalyst Driven
Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
# src/enrichment/core/pipelines/__init__.py
22

3-
from . import (
4-
business_summarizer,
5-
financials_analyzer,
6-
fundamentals_analyzer,
7-
mda_analyzer,
8-
news_analyzer,
9-
options_analyzer,
10-
options_candidate_selector,
11-
options_feature_engineering,
12-
score_aggregator,
13-
technicals_analyzer,
14-
transcript_analyzer,
15-
)
3+
# from . import (
4+
# business_summarizer,
5+
# financials_analyzer,
6+
# fundamentals_analyzer,
7+
# mda_analyzer,
8+
# news_analyzer,
9+
# options_analyzer,
10+
# options_candidate_selector,
11+
# options_feature_engineering,
12+
# score_aggregator,
13+
# technicals_analyzer,
14+
# transcript_analyzer,
15+
# )

0 commit comments

Comments
 (0)