Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
master-v1.2.1
master-v1.2.2
2 changes: 1 addition & 1 deletion flows/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
FLOWS pipeline package
"""
from .photometry import timed_photometry as photometry
from .catalogs import download_catalog
from .catalogs import download_catalog, delete_catalog
from .visibility import visibility
from .version import get_version
from .load_image import load_image
Expand Down
23 changes: 20 additions & 3 deletions flows/catalogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from astroquery.simbad import Simbad
from bottleneck import anynan
from tendrils.utils import load_config, query_ztf_id
from tendrils import api

from .aadc_db import AADC_DB

Expand Down Expand Up @@ -257,7 +258,7 @@ def query_skymapper(coo_centre, radius=24 * u.arcmin):

# --------------------------------------------------------------------------------------------------

def query_local_refcat2(coo_centre, radius=24 * u.arcmin):
def query_local_refcat2(coo_centre, radius=24 * u.arcmin, limiting_magnitude=19):
"""
Uses refcat.c from https://archive.stsci.edu/hlsp/atlas-refcat2 to do a cone-search around the position.
NOTE: In going from mast casjobs to local refcat.c, we have lost the unique object identifier, 'objid'
Expand Down Expand Up @@ -289,6 +290,7 @@ def query_local_refcat2(coo_centre, radius=24 * u.arcmin):
refcat_output = subprocess.run(["refcat",
str(coo_centre.ra.deg), str(coo_centre.dec.deg),
"-rad", str(Angle(radius).deg),
"-mlim", str(limiting_magnitude),
"-var", "ra,dec,pmra,pmdec,gaia,bp,rp,dupvar,g,r,i,z,j,h,k",
"-nohdr"],
encoding="utf-8", capture_output=True, check=True)
Expand Down Expand Up @@ -331,7 +333,7 @@ def query_local_refcat2(coo_centre, radius=24 * u.arcmin):
logger.debug("Found %d results", len(results))
return results

def query_all(coo_centre, radius=24 * u.arcmin, dist_cutoff=2 * u.arcsec):
def query_all(coo_centre, radius=24 * u.arcmin, dist_cutoff=2 * u.arcsec, limiting_magnitude=19):
"""
Query all catalogs (REFCAT2, APASS, SDSS and SkyMapper) and return merged catalog.

Expand All @@ -354,7 +356,7 @@ def query_all(coo_centre, radius=24 * u.arcmin, dist_cutoff=2 * u.arcsec):
"""

# Query the REFCAT2 catalog using CasJobs around the target position:
results = query_local_refcat2(coo_centre, radius=radius)
results = query_local_refcat2(coo_centre, radius=radius, limiting_magnitude=limiting_magnitude)
AT_results = Table(results)
refcat = SkyCoord(ra=AT_results['ra'], dec=AT_results['decl'], unit=u.deg, frame='icrs')

Expand Down Expand Up @@ -567,3 +569,18 @@ def download_catalog(target=None, radius=24 * u.arcmin, radius_ztf=3 * u.arcsec,
except: # noqa: E722, pragma: no cover
db.conn.rollback()
raise


def delete_catalog(target):
cat = api.get_catalog(target)
ids = list(cat['references']['starid'])

id_list = ",".join([str(id) for id in ids])

print(id_list)

with AADC_DB() as db:
if target is not None and isinstance(target, (int, float)):
db.cursor.execute("DELETE FROM flows.refcat2 WHERE starid IN (%s);" % id_list)
db.cursor.execute("UPDATE flows.targets SET catalog_downloaded=FALSE,ztf_id=NULL WHERE targetid=%s;" % int(target))
db.conn.commit()
7 changes: 7 additions & 0 deletions flows/fileio.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,13 @@ def get_filter(self):
def load_references(self, catalog: Optional[Dict] = None) -> refclean.References:
use_filter = self.get_filter()
references = api.get_catalog(self.target.name)['references'] if catalog is None else catalog['references']

filter = (references["J_mag"] > 10) & (references["H_mag"] > 10) & (references["g_mag"] is not None) \
& (references["H_mag"] is not None) & (references["gaia_variability"] != 1) \
& (references["g_mag"] - references["r_mag"] < 10) & (references["starid"] != 107441912858406185) \
& (references["starid"] != 107441912838383932)

references = references[filter]
references.sort(use_filter)
# Check that there actually are reference stars in that filter:
if allnan(references[use_filter]):
Expand Down
14 changes: 12 additions & 2 deletions run_catalogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import argparse
import logging
from tendrils import api
from flows import download_catalog
from flows import download_catalog, delete_catalog


def parse():
Expand All @@ -16,7 +16,8 @@ def parse():
parser.add_argument('-q', '--quiet', help='Only report warnings and errors.', action='store_true')
parser.add_argument('-c', '--commit', help='Commit downloaded catalog(s) to flows database.', action='store_true')
parser.add_argument('-p', '--print', help='Print single catalog which already exists in flows database.', action='store_true')
parser.add_argument('-t', '--target', type=str, help='Optionally specify just one target for downloading/committing/printing.', nargs='?', default=None)
parser.add_argument('-t', '--target', type=str, help='Optionally specify just one target for downloading/committing/printing/deleting.', nargs='?', default=None)
parser.add_argument('-D', '--delete', help='Delete single catalog which already exists in flows database.', action='store_true')
return parser.parse_args()

def set_logging_level(args):
Expand All @@ -42,6 +43,15 @@ def main():
logger.addHandler(console)
logger.setLevel(logging_level)

if args.delete:
if args.target is not None:
logger.info("Deleting catalog for target=%s...", args.target)
delete_catalog(int(args.target))
logger.info("Done deleting catalog for target=%s...", args.target)
else:
logger.warning("Need to specify target to delete")
exit(0)

targets = api.get_catalog_missing()
if args.target is not None:
if args.print is False:
Expand Down
Loading