-
Notifications
You must be signed in to change notification settings - Fork 1
Description
Technical specifics
What: Frequently scientists on PEARL need to run a script that outputs a .csv file for ingestion by extra teams and such. As such we should create a script that can be ran regularly (every days or week) that interacts with our systems to run the scripts, downloads the file to a given output.
Why: To benefit users and allow better ingest of data to stakeholders
Who (will benefit): Teams and individuals who will be able to use these outputs, and the individual who runs this manually at the moment.
How (are we doing this now): We are not.
Technical caveats: We will need to login using a username and password stored in a file near to this script which will be in a separate file, or in the environment of the running execution, then we will be starting a simple job via the API. The status of the job needs to be monitored until completion, then the script needs to download the file into a given output directory.
The script looks like this:
# import mantid algorithms, numpy and matplotlib
from mantid.simpleapi import *
import matplotlib.pyplot as plt
import numpy as np
#Cycles2Run=['15_2', '15_3', '15_4', '16_1', '16_3', '16_4', '16_5', '17_1', '17_2', '17_3', '17_4', '18_1', '18_2','18_3', '18_4',
#'19_1', '19_2', '19_3', '19_4', '20_2', '20_3', '21_1', '22_5', '23_1', '23_2', '23_3', '23_4', '23_5', '24_1', '24_2']
Cycles2Run=['25_4']
#Cycles2Run=['24_5']
Path2Save = r'E:\Data\Moderator'
Path2Data = r'X:\data'
# Dictionary of cycle run numbers for PEARL
CycleDict = {
"start_15_2": 90482,"end_15_2": 91528,
"start_15_3": 91530,"end_15_3": 92430,
"start_15_4": 92434,"end_15_4": 93402,
"start_16_1": 93404,"end_16_1": 94515,
"start_16_3": 94519,"end_16_3": 95629,
"start_16_4": 95634,"end_16_4": 97530,
"start_16_5": 97534,"end_16_5": 98469,
"start_17_1": 98472,"end_17_1": 99474,
"start_17_2": 99480,"end_17_2": 100574,
"start_17_3": 100583,"end_17_3": 101505,
"start_17_4": 101508,"end_17_4": 102939,
"start_18_1": 102947,"end_18_1": 105079,
"start_18_2": 105081,"end_18_2": 106253,
"start_18_3": 106257,"end_18_3": 107151,
"start_18_4": 107154,"end_18_4": 108579,
"start_19_1": 108592,"end_19_1": 109798,
"start_19_2": 109800,"end_19_2": 111030,
"start_19_3": 111056,"end_19_3": 112080,
"start_19_4": 112083,"end_19_4": 113280,
"start_20_2": 113286,"end_20_2": 114296,
"start_20_3": 114303,"end_20_3": 115227,
"start_21_1": 115231,"end_21_1": 116442,
"start_22_5": 116463,"end_22_5": 116478,
"start_23_1": 116489,"end_23_1": 116646,
"start_23_2": 116650,"end_23_2": 117456,
"start_23_3": 117462,"end_23_3": 117960,
"start_23_4": 117986,"end_23_4": 118728,
"start_23_5": 118731,"end_23_5": 119340,
"start_24_1": 119342,"end_24_1": 120011,
"start_24_2": 120013,"end_24_2": 120800,
"start_24_3": 120804,"end_24_3": 121559,
"start_24_4": 121561,"end_24_4": 122322,
"start_24_5": 122325,"end_24_5": 123150,
"start_25_1": 123154,"end_25_1": 124134,
"start_25_2": 124138,"end_25_2": 124140,
"start_25_3": 124142,"end_25_3": 124946,
"start_25_4": 124987,"end_25_4": 124526,
}
#124988,"end_25_4": 125324
for cycle in Cycles2Run:
reject=[]
peak_centres=[]
peak_centres_error=[]
peak_intensity=[]
peak_intensity_error=[]
uAmps=[]
RunNo=[]
index=0
start=CycleDict['start_'+cycle]
end=CycleDict['end_'+cycle]
for i in range(start,end+1):
if i == 95382:
continue
Load(Filename=Path2Data+'\cycle_'+cycle+'\PEARL00'+ str(i)+'.nxs', OutputWorkspace=str(i))
ws = mtd[str(i)]
run = ws.getRun()
pcharge = run.getProtonCharge()
if pcharge <1.0:
reject.append(str(i))
DeleteWorkspace(str(i))
continue
NormaliseByCurrent(InputWorkspace=str(i), OutputWorkspace=str(i))
ExtractSingleSpectrum(InputWorkspace=str(i),WorkspaceIndex=index,
OutputWorkspace=str(i)+ '_' + str(index))
CropWorkspace(InputWorkspace=str(i)+ '_' + str(index), Xmin=1100,
Xmax=19990, OutputWorkspace=str(i)+ '_' + str(index))
DeleteWorkspace(str(i))
#Some constraints included to precent divergence
fit_output = Fit(Function='name=Gaussian,Height=19.2327,\
PeakCentre=4843.8,Sigma=1532.64,\
constraints=(4600<PeakCentre<5200,1100<Sigma<1900);\
name=FlatBackground,A0=16.6099,ties=(A0=16.6099)',
InputWorkspace=str(i)+ '_' + str(index),
MaxIterations=1000, CreateOutput=True,
Output=str(i)+ '_' + str(index) + '_fit',
OutputCompositeMembers=True,
StartX=3800, EndX=6850, Normalise=True)
paramTable = fit_output.OutputParameters
#This catches some spectra where the alignment mirror
# was accidentally in place
# if paramTable.column(1)[0] < 10.0:
# DeleteWorkspace(str(i)+'_0_fit_Parameters')
# DeleteWorkspace(str(i)+'_0_fit_Workspace')
# DeleteWorkspace(str(i)+'_0')
# DeleteWorkspace(str(i)+'_0_fit_NormalisedCovarianceMatrix')
# reject.append(str(i))
# continue
# This catches some fits where the fit constraints are ignored,
# allowing the peak to fall far outside the nominal range
if paramTable.column(1)[1] < 4600.0 or paramTable.column(1)[1] > 5200.0:
DeleteWorkspace(str(i)+'_0_fit_Parameters')
DeleteWorkspace(str(i)+'_0_fit_Workspace')
DeleteWorkspace(str(i)+'_0')
DeleteWorkspace(str(i)+'_0_fit_NormalisedCovarianceMatrix')
reject.append(str(i))
continue
else:
uAmps.append(pcharge)
peak_centres.append(paramTable.column(1)[1])
peak_centres_error.append(paramTable.column(2)[1])
peak_intensity.append(paramTable.column(1)[0])
peak_intensity_error.append(paramTable.column(2)[0])
RunNo.append(str(i))
DeleteWorkspace(str(i)+'_0')
DeleteWorkspace(str(i)+'_0_fit_Parameters')
DeleteWorkspace(str(i)+'_0_fit_Workspace')
DeleteWorkspace(str(i)+'_0_fit_NormalisedCovarianceMatrix')
combined_data=np.column_stack((RunNo, uAmps, peak_intensity,
peak_intensity_error, peak_centres, peak_centres_error))
np.savetxt(Path2Save+'\peak_centres_'+cycle+'.csv',
combined_data, delimiter=", ", fmt='% s',)