1+ #####
2+ # About:
3+ # This script is indended to be used to UNMUTE ALL currently muted datasets within a specified warehouse
4+ # Instructions:
5+ # 1. Run this script, input your API Key ID, Token (generated in Settings -> API within MC UI)
6+ # 2. If applicable, copy/paste the UUID of the warehouse you would like to target to unmute datasets
7+ # Note: the script must be run for one warehouse at a time, run multiple times for multiple warehouses
8+ # 3. Review the list of datasets to be unmuted in the .csv file provided by the prompt
9+ # RECOMMENDATION: Keep this CSV file as a means to audit which datasets were unmuted by this script
10+ # 4. Proceed to unmute the list of datasets
11+ #####
12+
13+ from pycarlo .core import Client , Query , Session
14+ import csv
15+ import json
16+ from typing import Optional
17+ from datetime import datetime
18+
19+ def getWarehouses (mcdId ,mcdToken ):
20+ client = Client (session = Session (mcd_id = mcdId ,mcd_token = mcdToken ))
21+ warehousesQuery = """
22+ query getUser {
23+ getUser {
24+ account {
25+ warehouses {
26+ name
27+ connectionType
28+ uuid
29+ }
30+ }
31+ }
32+ }
33+ """
34+
35+ warehouses = client (warehousesQuery ).get_user .account .warehouses
36+
37+ if len (warehouses ) == 1 :
38+ print (f"Found one warehouse - Name: { warehouses [0 ].name } - UUID: { warehouses [0 ].uuid } " )
39+ return warehouses [0 ].uuid
40+ elif len (warehouses ) > 1 :
41+ print ("Found multiple warehouses... " )
42+ for val in warehouses :
43+ print ("Name: " + val .name + ", Connection Type: " + val .connection_type + ", UUID: " + val .uuid )
44+ dwId = input ("Please copy/paste the full UUID of the warehouse you would like to target: " )
45+ return dwId
46+
47+ def get_dataset_query (dwId ,first : Optional [int ] = 1000 , after : Optional [str ] = None ) -> Query :
48+ query = Query ()
49+ get_datasets = query .get_datasets (first = first , dw_id = dwId , ** (dict (after = after ) if after else {}))
50+ get_datasets .edges .node .__fields__ ("dataset" ,"uuid" ,"is_muted" )
51+ get_datasets .page_info .__fields__ (end_cursor = True )
52+ get_datasets .page_info .__fields__ ("has_next_page" )
53+ return query
54+
55+ def getDatasetUuidDict (mcdId ,mcdToken ,dwId ):
56+ client = Client (session = Session (mcd_id = mcdId ,mcd_token = mcdToken ))
57+ dataset_uuid_dict = {}
58+ next_token = None
59+ while True :
60+ response = client (get_dataset_query (dwId = dwId ,after = next_token )).get_datasets
61+ for dataset in response .edges :
62+ if dataset .node .is_muted :
63+ dataset_uuid_dict [dataset .node .dataset .lower ()] = dataset .node .uuid
64+ if response .page_info .has_next_page :
65+ next_token = response .page_info .end_cursor
66+ else :
67+ break
68+ return dataset_uuid_dict
69+
70+ def get_date ():
71+ return datetime .today ().strftime ('%Y-%m-%d_%H:%M:%S' )
72+
73+ def userReview (uuid_dict , dw_id ):
74+ if not uuid_dict :
75+ print (f"No muted datasets found in selected warehouse id { dw_id } . Exiting" )
76+ quit ()
77+
78+ fname = f"datasets_to_mute_{ get_date ()} .csv"
79+ header = ['dataset' , 'uuid' ]
80+ with open (fname , 'w' ) as csvfile :
81+ writer = csv .writer (csvfile )
82+ writer .writerow (header )
83+ for dataset , uuid in uuid_dict .items ():
84+ writer .writerow ([dataset , uuid ])
85+ userReview = input (f'Datasets to unmute written to file { fname } for your review. OK to proceed? (y/n) ' ).lower ()
86+
87+ if userReview == 'y' :
88+ return
89+ else :
90+ print ("Acknowledged do not proceed. Exiting." )
91+ quit ()
92+
93+ def generateVarsInput (uuid_list ):
94+ vars_input = {
95+ "input" : {
96+ "datasets" : uuid_list ,
97+ "mute" : False
98+ }
99+ }
100+ return vars_input
101+
102+ def unmute_datasets (mcdId ,dwId ,mcdToken ,uuidDict ):
103+ client = Client (session = Session (mcd_id = mcdId ,mcd_token = mcdToken ))
104+ uuid_list = []
105+ unmute_datasets_query = """
106+ mutation toggleMuteDatasets($input: ToggleMuteDatasetsInput!) {
107+ toggleMuteDatasets(input: $input) {
108+ muted {
109+ uuid
110+ isMuted
111+ }
112+ }
113+ }
114+ """
115+
116+ unmuted_dataset_counter = 0
117+ incremental_datasets = 0
118+ for uuid in uuidDict .values ():
119+ temp_obj = dict (dsId = uuid , dwId = dwId )
120+ print (temp_obj )
121+ uuid_list .append (temp_obj )
122+ unmuted_dataset_counter += 1
123+ incremental_datasets += 1
124+ if incremental_datasets == 99 :
125+ vars_input = generateVarsInput (uuid_list )
126+ print (client (unmute_datasets_query , variables = vars_input ))
127+ uuid_list .clear ()
128+ incremental_datasets = 0
129+ if incremental_datasets > 0 :
130+ vars_input = generateVarsInput (uuid_list )
131+ print (client (unmute_datasets_query , variables = vars_input ))
132+ print ("Successfully Unmuted " + str (unmuted_dataset_counter ) + " Datasets" )
133+
134+ if __name__ == '__main__' :
135+ #-------------------INPUT VARIABLES---------------------
136+ mcd_id = input ("MCD ID: " )
137+ mcd_token = input ("MCD Token: " )
138+ #-------------------------------------------------------
139+ print ("Getting warehouses..." )
140+ dw_id = getWarehouses (mcd_id , mcd_token )
141+ print ("Getting datasets..." )
142+ uuid_dict = getDatasetUuidDict (mcd_id ,mcd_token ,dw_id )
143+ userReview (uuid_dict , dw_id )
144+ unmute_datasets (mcd_id ,dw_id ,mcd_token ,uuid_dict )
0 commit comments