diff --git a/org.dawnsci.squishtests/suite_autoprocessing/Read Me.txt b/org.dawnsci.squishtests/suite_autoprocessing/Read Me.txt new file mode 100644 index 0000000..6b5224b --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/Read Me.txt @@ -0,0 +1,31 @@ +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# To perfom a validation run the script called 'validator.sh', by default it should do the # +# validation from its current location. # +# # +# To override these defaults, read the header of the validator.sh script file for more # +# information. # +# # +# Success will result in the following line being printed to the console: # +# # +# Success! The files are the same! # +# # +# and an exit code of 0 (zero) will be returned. If there is a problem a number of console error # +# strings and exit codes have been programmed in, again read the header of the validator.sh # +# script file for more information. # +# # +################################################################################################### +# # +# Last updated 2017-03-30 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### diff --git a/org.dawnsci.squishtests/suite_autoprocessing/Read Me.txt~ b/org.dawnsci.squishtests/suite_autoprocessing/Read Me.txt~ new file mode 100644 index 0000000..8aa3fed --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/Read Me.txt~ @@ -0,0 +1,33 @@ +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# To perfom a validation run the script called 'validator.sh', by default it should do the # +# validation from its current location. # +# # +# To override these defaults, read the header of the validator.sh script file for more # +# information. # +# # +# Success will result in the following line being printed to the console: # +# # +# Success! The files are the same! # +# # +# and an exit code of 0 (zero) will be returned. If there is a problem a number of console error # +# strings and exit codes have been programmed in, again read the header of the validator.sh # +# script file for more information. # +# # +################################################################################################### +# # +# Last updated 2017-03-30 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### + + diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061-Pilatus2M_SAXS.h5 b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061-Pilatus2M_SAXS.h5 new file mode 100644 index 0000000..b92ba08 Binary files /dev/null and b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061-Pilatus2M_SAXS.h5 differ diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061-Pilatus2M_WAXS.h5 b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061-Pilatus2M_WAXS.h5 new file mode 100644 index 0000000..25f48c1 Binary files /dev/null and b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061-Pilatus2M_WAXS.h5 differ diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061.nxs b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061.nxs new file mode 100644 index 0000000..474f31e Binary files /dev/null and b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061.nxs differ diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_calibration.nxs b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_calibration.nxs new file mode 100644 index 0000000..83631e6 Binary files /dev/null and b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_calibration.nxs differ diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_iqReduction.json b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_iqReduction.json new file mode 100644 index 0000000..b127f1b --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_iqReduction.json @@ -0,0 +1 @@ +{"runDirectory": "/dls/i22/data/2017/sw17105-1", "name": "TestRun", "filePath": "path_to_file", "dataDimensions": [-1, -2], "processingPath": "/dls/i22/data/2017/sw17105-1/xml/templates/saxs_iQ_reduction_pipeline.nxs", "outputFilePath": "path_to_output", "deleteProcessingFile": false, "datasetPath": "/entry1/detector", "numberOfCores" : 1, "xmx" : 1024} diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_mask.nxs b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_mask.nxs new file mode 100644 index 0000000..e89768f Binary files /dev/null and b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_mask.nxs differ diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_pipeline.nxs b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_pipeline.nxs new file mode 100644 index 0000000..1d68d10 Binary files /dev/null and b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_pipeline.nxs differ diff --git a/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_processed.nxs b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_processed.nxs new file mode 100644 index 0000000..2b6427f Binary files /dev/null and b/org.dawnsci.squishtests/suite_autoprocessing/default_dataset/i22-363061_processed.nxs differ diff --git a/org.dawnsci.squishtests/suite_autoprocessing/h5FileComparer.sh b/org.dawnsci.squishtests/suite_autoprocessing/h5FileComparer.sh new file mode 100755 index 0000000..86997ac --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/h5FileComparer.sh @@ -0,0 +1,95 @@ +#!/bin/bash + + +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# This bash script is designed to take the output from data reduced from a development version of # +# DAWN and compare it to data obtained from a stable version of DAWN to deduce whether the # +# processing pipeline (and therefore autoprocessing) has been killed accidentally. # +# # +################################################################################################### +# # +# Last updated 2017-03-30 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### + +# Script usage: +# +# h5FileComparer.sh fileOne fileTwo +# +# Script inputs are: +# +# fileOne - First file for comparing +# fileTwo - Second file for comparing +# +# Script exit codes are: +# +# 0 - Success! The files were the same! +# 1 - Failure! The files weren't the same... +# 2 - Failure! The first and second files couldn't be opened! +# 3 - Failure! The first file couldn't be opened! +# 4 - Failure! The second file couldn't be opened! +# 255 - Failure! You broke the script. Well done? + + +# First do our 'imports' or module loadings... +module load dawn > /dev/null 2>&1 + +# This script will take two arguments, which are files, and compare them. +# If it will inform the user either way as to what happened. + +# Get the file inputs from the console +fileOne="$1" +fileTwo="$2" + +# Run h5ls to see if the file exists +fileOneCheck="$(h5ls "$fileOne" 2>&1)" +fileTwoCheck="$(h5ls "$fileTwo" 2>&1)" + +# Do the check +if [ "$fileOneCheck" == "$fileOne: unable to open file" ] && [ "$fileTwoCheck" == "$fileTwo: unable to open file" ] +then + echo "Can't open either of the files given in the two arguments" + exit 2 + +elif [ "$fileOneCheck" == "$fileOne: unable to open file" ] +then + echo "Can't open the file given in the first argument" + exit 3 + +elif [ "$fileTwoCheck" == "$fileTwo: unable to open file" ] +then + echo "Can't open the file given in the second argument" + exit 4 +fi + +# Then set up some strings so that the comparison responses are uniform +successString="Success" +failureString="Failure" + +# Compare the files, getting back the standard string +fileCompare=$(cmp --silent <(h5ls -d "$fileOne"/entry/result/data) <(h5ls -d "$fileTwo"/entry/result/data) && echo "$successString" || echo "$failureString") + +# Do the appropriate action if the files are or are not equal and handling for crazy results too +if [ "$fileCompare" == "$successString" ] +then + echo "Success! The files are the same!" + exit 0 +elif [ "$fileCompare" == "$failureString" ] +then + echo "Failure! The files are not the same!" + exit 1 +else + echo "Failure! Somehow you've broken this script" + exit 255 +fi diff --git a/org.dawnsci.squishtests/suite_autoprocessing/h5FileComparer.sh~ b/org.dawnsci.squishtests/suite_autoprocessing/h5FileComparer.sh~ new file mode 100644 index 0000000..0a75ff6 --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/h5FileComparer.sh~ @@ -0,0 +1,96 @@ +#!/bin/bash + + +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# This bash script is designed to take the output from data reduced from a development version of # +# DAWN and compare it to data obtained from a stable version of DAWN to deduce whether the # +# processing pipeline (and therefore autoprocessing) has been killed accidentally. # +# # +# # +################################################################################################### +# # +# Last updated 2017-03-30 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### + +# Script usage: +# +# h5FileComparer.sh fileOne fileTwo +# +# Script inputs are: +# +# fileOne - First file for comparing +# fileTwo - Second file for comparing +# +# Script exit codes are: +# +# 0 - Success! The files were the same! +# 1 - Failure! The files weren't the same... +# 2 - Failure! The first and second files couldn't be opened! +# 3 - Failure! The first file couldn't be opened! +# 4 - Failure! The second file couldn't be opened! +# 255 - Failure! You broke the script. Well done? + + +# First do our 'imports' or module loadings... +module load dawn > /dev/null 2>&1 + +# This script will take two arguments, which are files, and compare them. +# If it will inform the user either way as to what happened. + +# Get the file inputs from the console +fileOne="$1" +fileTwo="$2" + +# Run h5ls to see if the file exists +fileOneCheck="$(h5ls "$fileOne" 2>&1)" +fileTwoCheck="$(h5ls "$fileTwo" 2>&1)" + +# Do the check +if [ "$fileOneCheck" == "$fileOne: unable to open file" ] && [ "$fileTwoCheck" == "$fileTwo: unable to open file" ] +then + echo "Can't open either of the files given in the two arguments" + exit 2 + +elif [ "$fileOneCheck" == "$fileOne: unable to open file" ] +then + echo "Can't open the file given in the first argument" + exit 3 + +elif [ "$fileTwoCheck" == "$fileTwo: unable to open file" ] +then + echo "Can't open the file given in the second argument" + exit 4 +fi + +# Then set up some strings so that the comparison responses are uniform +successString="Success" +failureString="Failure" + +# Compare the files, getting back the standard string +fileCompare=$(cmp --silent <(h5ls -d "$fileOne"/entry/result/data) <(h5ls -d "$fileTwo"/entry/result/data) && echo "$successString" || echo "$failureString") + +# Do the appropriate action if the files are or are not equal and handling for crazy results too +if [ "$fileCompare" == "$successString" ] +then + echo "Success! The files are the same!" + exit 0 +elif [ "$fileCompare" == "$failureString" ] +then + echo "Failure! The files are not the same!" + exit 1 +else + echo "Failure! Somehow you've broken this script" + exit 255 +fi diff --git a/org.dawnsci.squishtests/suite_autoprocessing/pipelineEditor.py b/org.dawnsci.squishtests/suite_autoprocessing/pipelineEditor.py new file mode 100644 index 0000000..0b08b6c --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/pipelineEditor.py @@ -0,0 +1,131 @@ +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# This Python script is designed to copy and modify a DAWN pipeline file to contain the correct # +# calibration and mask locations as well as generating the accompanying JSON file for the # +# headless DAWN client to reduce an input diffraction image for comparison against a 'model' # +# reduction file. # +# # +################################################################################################### +# # +# Last updated 2017-03-30 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### + + +# Script usage: +# +# pipelineEditor.py workingDirectory inputPipelineFilePath pathToCalibrationFile pathToMaskFile +# +# Script inputs are: +# +# workingDirectory - Full path to the current working directory, must have write access! +# inputPipelineFilePath - Full path to the model pipeline path +# pathToCalibrationFile - Full path to the diffraction image calibration file +# pathToMaskFile - Full path to the diffraction image mask file +# +# Script exit codes are: +# +# 0 - Success! +# 1 - Wrong number of inputs +# 2 - The working directory doesn't exist +# 3 - The given pipeline file path doesn't work +# 4 - The given calibration file path doesn't work +# 5 - The given mask file path doesn't work +# 6 - No tmp directory in the working directory and couldn't make one either +# 7 - Couldn't copy the model pipeline file to tmp +# 8 - Couldn't edit the copied pipeline file + + +# Starting with some imports! +import h5py +from shutil import copy +from os import mkdir, sep +from sys import argv, exit +from os.path import exists, isdir + +# Check our inputs, if they match assign them and if not send out a warning! +if (len(argv) == 5): + workingDirectory = str(argv[1]) + inputPipelineFilePath = str(argv[2]) + pathToCalibrationFile = str(argv[3]) + pathToMaskFile = str(argv[4]) +else: + print "\nThis script will only run is given four arguments which should be:\n\n- The working directory\n- A full path to some data\n- A full path to a NeXus calibration file\n- A full path to a NeXus mask file\n\nPlease try again.\n" + exit(1) + +# Now exhaustively check the given arguments +if (isdir(workingDirectory) == False): + print "The given path to the working directory leads nowhere!" + exit(2) + +if (exists(inputPipelineFilePath) == False): + print "The given path to the pipeline file leads nowhere!" + exit(3) + +if (exists(pathToCalibrationFile) == False): + print "The given path to the calibration file leads nowhere!" + exit(4) + +if (exists(pathToMaskFile) == False): + print "The given path to the mask file leads nowhere!" + exit(5) + +# Check, and if neccessary setup, that we have a tmp directory +tmpDirectory = workingDirectory + "/tmp" + +if (isdir(tmpDirectory) == False): + try: + mkdir(tmpDirectory) + except: + print "There wasn't a tmp directory in the working directory and one couldn't be created" + exit(6) + +# Copy the pipeline file so that we can customise it +pipelineFilePath = inputPipelineFilePath.split(sep) +outputPipelineFilePath = tmpDirectory + "/validationPipeline.nxs" + +try: + copy(inputPipelineFilePath, outputPipelineFilePath) +except: + print "There was a problem copying the pipeline file" + exit(7) + +# Then try to edit the variables in the pipeline so that they work with our current setup +try: + neXusFileReference = h5py.File(outputPipelineFilePath) + + # The hard coded, internal, NeXus paths + neXusCalibrationPath = "/entry/process/4/data" + neXusMaskPath = "/entry/process/5/data" + + # Making room + del(neXusFileReference[neXusCalibrationPath]) + del(neXusFileReference[neXusMaskPath]) + + + # For our new variables! + neXusFileReference[neXusCalibrationPath] = u'{"filePath":"' + pathToCalibrationFile + '"}' + neXusFileReference[neXusMaskPath] = u'{"filePath":"' + pathToMaskFile + '"}' + + neXusFileReference.close() +except: + print "There was a problem opening the copied pipeline file" + exit(8) + +# A bit of user feedback +print "\nSuccessfully copied the pipeline to tmp and edited to the internal filepaths to:\n" +print " Calibration file: " + pathToCalibrationFile +print " Mask file: " + pathToCalibrationFile + "\n" + +# Success! +exit(0) diff --git a/org.dawnsci.squishtests/suite_autoprocessing/pipelineEditor.py~ b/org.dawnsci.squishtests/suite_autoprocessing/pipelineEditor.py~ new file mode 100644 index 0000000..0b08b6c --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/pipelineEditor.py~ @@ -0,0 +1,131 @@ +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# This Python script is designed to copy and modify a DAWN pipeline file to contain the correct # +# calibration and mask locations as well as generating the accompanying JSON file for the # +# headless DAWN client to reduce an input diffraction image for comparison against a 'model' # +# reduction file. # +# # +################################################################################################### +# # +# Last updated 2017-03-30 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### + + +# Script usage: +# +# pipelineEditor.py workingDirectory inputPipelineFilePath pathToCalibrationFile pathToMaskFile +# +# Script inputs are: +# +# workingDirectory - Full path to the current working directory, must have write access! +# inputPipelineFilePath - Full path to the model pipeline path +# pathToCalibrationFile - Full path to the diffraction image calibration file +# pathToMaskFile - Full path to the diffraction image mask file +# +# Script exit codes are: +# +# 0 - Success! +# 1 - Wrong number of inputs +# 2 - The working directory doesn't exist +# 3 - The given pipeline file path doesn't work +# 4 - The given calibration file path doesn't work +# 5 - The given mask file path doesn't work +# 6 - No tmp directory in the working directory and couldn't make one either +# 7 - Couldn't copy the model pipeline file to tmp +# 8 - Couldn't edit the copied pipeline file + + +# Starting with some imports! +import h5py +from shutil import copy +from os import mkdir, sep +from sys import argv, exit +from os.path import exists, isdir + +# Check our inputs, if they match assign them and if not send out a warning! +if (len(argv) == 5): + workingDirectory = str(argv[1]) + inputPipelineFilePath = str(argv[2]) + pathToCalibrationFile = str(argv[3]) + pathToMaskFile = str(argv[4]) +else: + print "\nThis script will only run is given four arguments which should be:\n\n- The working directory\n- A full path to some data\n- A full path to a NeXus calibration file\n- A full path to a NeXus mask file\n\nPlease try again.\n" + exit(1) + +# Now exhaustively check the given arguments +if (isdir(workingDirectory) == False): + print "The given path to the working directory leads nowhere!" + exit(2) + +if (exists(inputPipelineFilePath) == False): + print "The given path to the pipeline file leads nowhere!" + exit(3) + +if (exists(pathToCalibrationFile) == False): + print "The given path to the calibration file leads nowhere!" + exit(4) + +if (exists(pathToMaskFile) == False): + print "The given path to the mask file leads nowhere!" + exit(5) + +# Check, and if neccessary setup, that we have a tmp directory +tmpDirectory = workingDirectory + "/tmp" + +if (isdir(tmpDirectory) == False): + try: + mkdir(tmpDirectory) + except: + print "There wasn't a tmp directory in the working directory and one couldn't be created" + exit(6) + +# Copy the pipeline file so that we can customise it +pipelineFilePath = inputPipelineFilePath.split(sep) +outputPipelineFilePath = tmpDirectory + "/validationPipeline.nxs" + +try: + copy(inputPipelineFilePath, outputPipelineFilePath) +except: + print "There was a problem copying the pipeline file" + exit(7) + +# Then try to edit the variables in the pipeline so that they work with our current setup +try: + neXusFileReference = h5py.File(outputPipelineFilePath) + + # The hard coded, internal, NeXus paths + neXusCalibrationPath = "/entry/process/4/data" + neXusMaskPath = "/entry/process/5/data" + + # Making room + del(neXusFileReference[neXusCalibrationPath]) + del(neXusFileReference[neXusMaskPath]) + + + # For our new variables! + neXusFileReference[neXusCalibrationPath] = u'{"filePath":"' + pathToCalibrationFile + '"}' + neXusFileReference[neXusMaskPath] = u'{"filePath":"' + pathToMaskFile + '"}' + + neXusFileReference.close() +except: + print "There was a problem opening the copied pipeline file" + exit(8) + +# A bit of user feedback +print "\nSuccessfully copied the pipeline to tmp and edited to the internal filepaths to:\n" +print " Calibration file: " + pathToCalibrationFile +print " Mask file: " + pathToCalibrationFile + "\n" + +# Success! +exit(0) diff --git a/org.dawnsci.squishtests/suite_autoprocessing/validator.sh b/org.dawnsci.squishtests/suite_autoprocessing/validator.sh new file mode 100755 index 0000000..7dd1039 --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/validator.sh @@ -0,0 +1,166 @@ +#!/bin/bash + + +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# This bash script is designed to create pipeline and json file for the DAWN headless processing, # +# modified to either their current, or some input defined, locations. DAWN is then invoked and # +# the pipeline is run. The result from this reduction is then compared against a known working # +# state to see if the processing in DAWN has been broken. # +# # +################################################################################################### +# # +# Last updated 2017-04-24 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### + +# Script usage: +# +# validator.sh +# (Defaults to internal values) +# +# validator.sh workingDirectory neXusDataFile neXusPipelineFile jsonFile neXusCalibrationFile neXusMaskFile neXusResultsFile +# (Overrides internal values) +# +# Script inputs are: +# +# workingDirectory - The directory that the script shall work from, must be writable to the script +# neXusDataFile - The raw diffraction data for reducing +# neXusPipelineFile - The model pipeline that will be modified and used for the reduction +# jsonFile - The JSON file that accompanies the pipeline file +# neXusCalibrationFile - The calibration file for the diffraction data +# neXusMaskFile - The mask file for the diffraction data +# neXusResultsFile - The model result file, which the freshly reduced data will be compared against +# +# Script exit codes are: +# +# 0 - Success! The files were the same! +# 1 - Failure! Couldn't make the tmp folder in the working directory +# 2 - Failure! Couldn't set up the processing pipeline in the tmp directory +# 3 - Failure! Couldn't set up the JSON file in the tmp directory +# 4 - Failure! Couldn't perform the data reduction in DAWN +# 5 - Failure! File comparison failed +# 6 - Failure! Couldn't delete tmp from working directory + + +# We have to fetch the current script location BEFORE loading the modules or we lose the path! +workingDirectory="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +# Next we do our 'imports' or module loadings... +module load dawn/nightly > /dev/null 2>&1 +module load python/ana > /dev/null 2>&1 + +# Then a bit of space +echo "" + +# If we've been given enough variables, let's listen to the invoker and override our default locations +if [ $# -eq 7 ] +then + workingDirectory="$1" + tmpDirectory="$workingDirectory/tmp" + jsonFile="$4" + neXusCalibrationFile="$5" + neXusDataFile="$2" + neXusMaskFile="$6" + neXusPipelineFile="$3" + neXusResultsFile="$7" + cd $workingDirectory + echo "Hard coded default locations overriden" +else + # If not let's be a bit smart, as the folder might have moved, but overall use our defaults + tmpDirectory="$workingDirectory/tmp" + jsonFile="$workingDirectory/tmp/validation.json" + neXusCalibrationFile="$workingDirectory/default_dataset/i22-363061_calibration.nxs" + neXusDataFile="$workingDirectory/default_dataset/i22-363061.nxs" + neXusMaskFile="$workingDirectory/default_dataset/i22-363061_mask.nxs" + neXusPipelineFile="$workingDirectory/default_dataset/i22-363061_pipeline.nxs" + neXusResultsFile="$workingDirectory/default_dataset/i22-363061_processed.nxs" + echo "Using default locations as not enough arguments given" +fi + +# First, try to set up a temporary directory +if [ ! -d "$tmpDirectory" ] +then + mkdir "$tmpDirectory" > /dev/null 2>&1 + exitCode="$?" + + if [ $exitCode -ne 0 ] + then + echo "" + echo "Couldn't create a tmp directory in the working directory, exit code: $exitCode, see mkdir manpage. Exiting here." + echo "" + exit 1 + fi +fi + +# Next, try to make our new NeXus pipeline file, complete with customised paths for the calibration and mask files +python $workingDirectory/pipelineEditor.py $workingDirectory $neXusPipelineFile $neXusCalibrationFile $neXusMaskFile +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "Couldn't set up the processing pipeline in the tmp directory, exit code: $exitCode, see pipelineEditor.py for details. Exiting here." + echo "" + exit 2 +fi + +# Try to set up the processing JSON file, as required, and stick it in our tmp directory +echo '{"runDirectory": "'"$workingDirectory"'/tmp", "name": "Validation Test", "filePath": "'"$neXusDataFile"'", "dataDimensions": [-1, -2], "processingPath": "'"$tmpDirectory"'/validationPipeline.nxs", "outputFilePath": "'"$workingDirectory"'/tmp/validationResults.nxs", "deleteProcessingFile": false, "datasetPath": "/entry1/detector", "numberOfCores" : 1, "xmx" : 1024}' > $jsonFile +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "Couldn't set up the processing JSON in the tmp directory, exit code: $exitCode, see echo manpage for details. Exiting here." + echo "" + exit 3 +fi + +# Then perform the data reduction using DAWN +/dls_sw/apps/DawnDiamond/2.4/builds/release-linux64/dawn -noSplash -configuration $workingDirectory/tmp/.eclipse -application org.dawnsci.commandserver.processing.processing -data @none -path $jsonFile > $workingDirectory/tmp/log.txt 2>&1 +exitCode="$?" + +if [ $? -ne 0 ] +then + echo "" + echo "Couldn't perform the data reduction using DAWN, exit code: $exitCode, see the DAWN manual for details. Exiting here." + echo "" + exit 4 +fi + +$workingDirectory/h5FileComparer.sh "$neXusResultsFile" "$workingDirectory/tmp/validationResults.nxs" +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "The file comparison failed, exit code: $exitCode, see h5FileComparer.sh for details. Exiting here." + echo "" + exit 5 +fi + +rm -R $tmpDirectory +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "Couldn't delete the tmp directory in the working directory, exit code: $exitCode, see rm manpage for details. Exiting here." + echo "" + exit 6 +fi + +# If we've gotten this far, success! Give some space then exit. +echo "" +exit 0 diff --git a/org.dawnsci.squishtests/suite_autoprocessing/validator.sh~ b/org.dawnsci.squishtests/suite_autoprocessing/validator.sh~ new file mode 100755 index 0000000..12f87f8 --- /dev/null +++ b/org.dawnsci.squishtests/suite_autoprocessing/validator.sh~ @@ -0,0 +1,166 @@ +#!/bin/bash + + +################################################################################################### +# # +# Copyright (c) 2017 Diamond Light Source Ltd. # +# # +# All rights reserved. This program and the accompanying materials are made available under the # +# terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available # +# at http://www.eclipse.org/legal/epl-v10.html # +# # +################################################################################################### +# # +# This bash script is designed to create pipeline and json file for the DAWN headless processing, # +# modified to either their current, or some input defined, locations. DAWN is then invoked and # +# the pipeline is run. The result from this reduction is then compared against a known working # +# state to see if the processing in DAWN has been broken. # +# # +################################################################################################### +# # +# Last updated 2017-03-30 # +# # +# Author: Tim Snow (tim.snow@diamond.ac.uk) # +# # +################################################################################################### + +# Script usage: +# +# validator.sh +# (Defaults to internal values) +# +# validator.sh workingDirectory neXusDataFile neXusPipelineFile jsonFile neXusCalibrationFile neXusMaskFile neXusResultsFile +# (Overrides internal values) +# +# Script inputs are: +# +# workingDirectory - The directory that the script shall work from, must be writable to the script +# neXusDataFile - The raw diffraction data for reducing +# neXusPipelineFile - The model pipeline that will be modified and used for the reduction +# jsonFile - The JSON file that accompanies the pipeline file +# neXusCalibrationFile - The calibration file for the diffraction data +# neXusMaskFile - The mask file for the diffraction data +# neXusResultsFile - The model result file, which the freshly reduced data will be compared against +# +# Script exit codes are: +# +# 0 - Success! The files were the same! +# 1 - Failure! Couldn't make the tmp folder in the working directory +# 2 - Failure! Couldn't set up the processing pipeline in the tmp directory +# 3 - Failure! Couldn't set up the JSON file in the tmp directory +# 4 - Failure! Couldn't perform the data reduction in DAWN +# 5 - Failure! File comparison failed +# 6 - Failure! Couldn't delete tmp from working directory + + +# We have to fetch the current script location BEFORE loading the modules or we lose the path! +workingDirectory="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +# Next we do our 'imports' or module loadings... +module load dawn > /dev/null 2>&1 +module load python/ana > /dev/null 2>&1 + +# Then a bit of space +echo "" + +# If we've been given enough variables, let's listen to the invoker and override our default locations +if [ $# -eq 7 ] +then + workingDirectory="$1" + tmpDirectory="$workingDirectory/tmp" + jsonFile="$4" + neXusCalibrationFile="$5" + neXusDataFile="$2" + neXusMaskFile="$6" + neXusPipelineFile="$3" + neXusResultsFile="$7" + cd $workingDirectory + echo "Hard coded default locations overriden" +else + # If not let's be a bit smart, as the folder might have moved, but overall use our defaults + tmpDirectory="$workingDirectory/tmp" + jsonFile="$workingDirectory/tmp/validation.json" + neXusCalibrationFile="$workingDirectory/default_dataset/i22-363061_calibration.nxs" + neXusDataFile="$workingDirectory/default_dataset/i22-363061.nxs" + neXusMaskFile="$workingDirectory/default_dataset/i22-363061_mask.nxs" + neXusPipelineFile="$workingDirectory/default_dataset/i22-363061_pipeline.nxs" + neXusResultsFile="$workingDirectory/default_dataset/i22-363061_processed.nxs" + echo "Using default locations as not enough arguments given" +fi + +# First, try to set up a temporary directory +if [ ! -d "$tmpDirectory" ] +then + mkdir "$tmpDirectory" > /dev/null 2>&1 + exitCode="$?" + + if [ $exitCode -ne 0 ] + then + echo "" + echo "Couldn't create a tmp directory in the working directory, exit code: $exitCode, see mkdir manpage. Exiting here." + echo "" + exit 1 + fi +fi + +# Next, try to make our new NeXus pipeline file, complete with customised paths for the calibration and mask files +python $workingDirectory/pipelineEditor.py $workingDirectory $neXusPipelineFile $neXusCalibrationFile $neXusMaskFile +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "Couldn't set up the processing pipeline in the tmp directory, exit code: $exitCode, see pipelineEditor.py for details. Exiting here." + echo "" + exit 2 +fi + +# Try to set up the processing JSON file, as required, and stick it in our tmp directory +echo '{"runDirectory": "'"$workingDirectory"'/tmp", "name": "Validation Test", "filePath": "'"$neXusDataFile"'", "dataDimensions": [-1, -2], "processingPath": "'"$tmpDirectory"'/validationPipeline.nxs", "outputFilePath": "'"$workingDirectory"'/tmp/validationResults.nxs", "deleteProcessingFile": false, "datasetPath": "/entry1/detector", "numberOfCores" : 1, "xmx" : 1024}' > $jsonFile +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "Couldn't set up the processing JSON in the tmp directory, exit code: $exitCode, see echo manpage for details. Exiting here." + echo "" + exit 3 +fi + +# Then perform the data reduction using DAWN +/dls_sw/apps/DawnDiamond/2.4/builds/release-linux64/dawn -noSplash -configuration $workingDirectory/tmp/.eclipse -application org.dawnsci.commandserver.processing.processing -data @none -path $jsonFile > $workingDirectory/tmp/log.txt 2>&1 +exitCode="$?" + +if [ $? -ne 0 ] +then + echo "" + echo "Couldn't perform the data reduction using DAWN, exit code: $exitCode, see the DAWN manual for details. Exiting here." + echo "" + exit 4 +fi + +$workingDirectory/h5FileComparer.sh "$neXusResultsFile" "$workingDirectory/tmp/validationResults.nxs" +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "The file comparison failed, exit code: $exitCode, see h5FileComparer.sh for details. Exiting here." + echo "" + exit 5 +fi + +rm -R $tmpDirectory +exitCode="$?" + +if [ $exitCode -ne 0 ] +then + echo "" + echo "Couldn't delete the tmp directory in the working directory, exit code: $exitCode, see rm manpage for details. Exiting here." + echo "" + exit 6 +fi + +# If we've gotten this far, success! Give some space then exit. +echo "" +exit 0