From: cervenansky Date: Wed, 24 Jul 2013 09:25:56 +0000 (+0200) Subject: Merge branch 'PacsConnection' of ssh://git.creatis.insa-lyon.fr/clitk into PacsConnection X-Git-Tag: v1.4.0~2^2~10 X-Git-Url: https://git.creatis.insa-lyon.fr/pubgit/?a=commitdiff_plain;h=f45412cabff9aab013b0259377ab9f80e4987c02;hp=60aac9e89d0de6bb5dc26fa15f3bf04141ec7158;p=clitk.git Merge branch 'PacsConnection' of ssh://git.creatis.insa-lyon.fr/clitk into PacsConnection --- diff --git a/README.md b/README.md new file mode 100644 index 0000000..8f03a23 --- /dev/null +++ b/README.md @@ -0,0 +1,4 @@ + + +VV, a 4D image viewer, see : http://vv.creatis.insa-lyon.fr + diff --git a/cluster_tools/gate_common.sh b/cluster_tools/gate_common.sh new file mode 100644 index 0000000..ccad419 --- /dev/null +++ b/cluster_tools/gate_common.sh @@ -0,0 +1,115 @@ + +set -u + +# print error message and exit immediately +programname="$(basename ${0})" +function error { + echo "${programname} **ERROR** $1" + exit 1 +} + +# ensure a valid proxy is present for at least one hour +# if no proxy is found, try to create a new one +# return 0 if a valid proxy is found or created +# else return voms-proxy-init error code +function ensure_proxy { + voms-proxy-info --exists -valid 1:0 > /dev/null && return 0 + voms-proxy-init --voms biomed -valid 24:00 || exit 1 +} + +# print prompt to ensure that the user want to continue further +# the user has to answer with 'y' to continue +function check_user { + prompt="${1:-is that correct?}" + read -p "${prompt} [y/n] " answer + test "${answer}" == "y" && return 0 + test "${answer}" == "n" && return 1 + check_user "${prompt}" +} + +# checks if the lfn file exists +function file_exists { + lfnfile="${1:?"provide lfn to file"}" + lfc-ls ${lfnfile} 2>&1 > /dev/null +} + + +# upload file to grid storage element +# source can be a relative or an absolute path to the source file +# dest must be the lfn to the target **file** (not the directory) +# if dest already exists, it prompts the user for overwritting +function upload_file { + sourcefile=${1:?"provide source file"} + destlfn=${2:?"provide destination file lfn"} + sourcefile="$(readlink -f "${sourcefile}")" # convert to absolute path + test -f "${sourcefile}" || error "can't find ${sourcefile}" + echo "uploading ${sourcefile} to ${destlfn}" + if file_exists "${destlfn}"; then + check_user "${destlfn} already exists. overwrite it?" || return 2 + lcg-del -a "lfn:${destlfn}" || error "lcg-del error" + fi + echo "lets roll" + + local pending_ses=${SES} + local registered=false + while [ "x${pending_ses}" != "x" ] + do + #select a se from list + local S=`echo ${pending_ses} | awk '{print $1}'` + #update list of SEs + local new_list="" + for i in `echo ${pending_ses}` + do + if [ "$i" != "${S}" ] + then + new_list="${new_list} ${i}" + fi + done + pending_ses=${new_list} + local TEMP=`mktemp lcg-XXXXX` + if [ "${registered}" = "false" ] + then + echo -n "Registering release to ${S}..." + lcg-cr -v -d ${S} -l "lfn:${destlfn}" "file:${sourcefile}" &>${TEMP} + if [ $? != 0 ] + then + echo -e "\t [\033[31m FAILED \033[0m]" + cat ${TEMP} + \rm ${TEMP} + else + echo -e "\t [\033[32m OK \033[0m]" + registered=true + \rm ${TEMP} + fi + else + echo -n "Replicating release to ${S}..." + lcg-rep -v -d ${S} "lfn:${destlfn}" &>${TEMP} + if [ $? != 0 ] + then + echo -e "\t [\033[31m FAILED \033[0m]" + cat ${TEMP} + \rm ${TEMP} + else + echo -e "\t [\033[32m OK \033[0m]" + \rm ${TEMP} + fi + fi + done +} + + +# common path used +lfnbase="/grid/biomed/creatis/fgate/" +lfnrelease="/grid/biomed/creatis/vip/data/groups/GateLab/releases/" +lfnworkflow="${lfnbase}workflow/" +lfngasw="${lfnbase}gasw/" +lfnscript="${lfnbase}bin/" + +#list of SEs used for storage. Don't modify this list unless you know +# what you're doing. Replicating the release in a bad place (e.g. on a +# remote continent) can dramatically slow down the transfers +SES="ccsrm02.in2p3.fr sbgse1.in2p3.fr marsedpm.in2p3.fr" + +# define the prefix for uploaded file +# default to local machine username +prefix="${USER:?"USER must be set"}_" diff --git a/cluster_tools/gate_job_cluster.job b/cluster_tools/gate_job_cluster.job new file mode 100644 index 0000000..1242dfd --- /dev/null +++ b/cluster_tools/gate_job_cluster.job @@ -0,0 +1,102 @@ +#!/bin/bash +# +# MACRODIR +# MACROFILE +# RELEASEDIR +# OUTPUTDIR +# INDEX +# INDEXMAX +# PARAM +# +#PBS -r n +#PBS -l walltime=100:00:00 +#PBS -j oe + +#env +#pwd +#exit 1 + + +function error { +echo "ERROR: $1" +exit 1 +} + +function warning { +echo "WARNING: $1" +} + +test -f ${HOME}/.bashrc && echo "Sourcing bashrc" && source ${HOME}/.bashrc +set -u + +echo "Checking" +uname -a +echo "MACRODIR=${MACRODIR:?"unknown MACRODIR"}" +echo "MACROFILE=${MACROFILE:?"unknown MACROFILE"}" +echo "RELEASEDIR=${RELEASEDIR:?"unknown RELEASEDIR"}" +echo "OUTPUTDIR=${OUTPUTDIR:?"unknown OUTPUTDIR"}" +echo "PBS_JOBID=${PBS_JOBID}" +echo "INDEX=${INDEX}" +echo "INDEXMAX=${INDEX}" +echo "PARAM=${PARAM}" + +if test "$RELEASEDIR" = "NONE" +then + echo Using $(which Gate) + ldd $(which Gate) +else + test -d "${RELEASEDIR}" || error "can't find release" + md5sum ${RELEASEDIR}/Gate + test -f ${RELEASEDIR}/libGate.so && md5sum ${RELEASEDIR}/libGate.so + + echo "Finding libraries" + ROOTLIBS="${RELEASEDIR}/libCore.so:${RELEASEDIR}/libCint.so:${RELEASEDIR}/libRIO.so:${RELEASEDIR}/libNet.so:${RELEASEDIR}/libHist.so:${RELEASEDIR}/libGraf.so:${RELEASEDIR}/libGraf3d.so:${RELEASEDIR}/libGpad.so:${RELEASEDIR}/libTree.so:${RELEASEDIR}/libRint.so:${RELEASEDIR}/libPostscript.so:${RELEASEDIR}/libMatrix.so:${RELEASEDIR}/libPhysics.so:${RELEASEDIR}/libMathCore.so:${RELEASEDIR}/libThread.so:" + echo "ROOTLIBS=${ROOTLIBS}" + G4LIBS="$(for library in $(find "${RELEASEDIR}" -maxdepth 1 -name 'libG4*.so'); do echo -n "${library}:"; done)" + echo "G4LIBS=${G4LIBS}" + CLHEPLIBS="$(for library in $(find "${RELEASEDIR}" -maxdepth 1 -name 'libCLHEP*.so'); do echo -n "${library}:"; done)" + echo "CLHEPLIBS=${CLHEPLIBS}" + GATELIBS="" + test -f ${RELEASEDIR}/libGate.so && GATELIBS="${RELEASEDIR}/libGate.so:" + echo "GATELIBS=${GATELIBS}" +fi +test -d "${MACRODIR}" && test -d "${MACRODIR}/mac" || error "invalid macro" + + +echo "Copying inputs" +LOCALMACRODIR=$(mktemp -d) +trap "mv output ${OUTPUTDIR}/output.${PBS_JOBID%%.*} ; rm -r ${LOCALMACRODIR} ; exit 1" 1 2 3 15 +cd ${LOCALMACRODIR} +cp -r -L "${MACRODIR}"/{data,mac} . +mkdir output + +# Enforce one thread +ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 + +echo "Lauching macro" +date +if test "$RELEASEDIR" = "NONE" +then + Gate ${PARAM} ${MACROFILE} || error "gate failed" +else + LD_PRELOAD="${ROOTLIBS}${G4LIBS}${CLHEPLIBS}${GATELIBS}" \ + G4LEVELGAMMADATA="${RELEASEDIR}/PhotonEvaporation2.1" \ + G4RADIOACTIVEDATA="${RELEASEDIR}/RadioactiveDecay3.3" \ + G4LEDATA="${RELEASEDIR}/G4EMLOW6.19" \ + G4NEUTRONHPDATA="${RELEASEDIR}/G4NDL3.14" \ + G4ABLADATA="${RELEASEDIR}/G4ABLA3.0" \ + G4REALSURFACEDATA="${RELEASEDIR}/RealSurface1.0" \ + G4NEUTRONXSDATA="${RELEASEDIR}/G4NEUTRONXS1.0" \ + G4PIIDATA="${RELEASEDIR}/G4PII1.2" \ + /usr/bin/time --format="real %es\nuser %Us\nsys %Ss\nmaxmem %Mk" \ + ${RELEASEDIR}/Gate ${PARAM} ${MACROFILE} \ + || error "gate failed" +fi + +echo "Copying data back" +mv output "${OUTPUTDIR}/output.${PBS_JOBID%%.*}" + +echo "Cleanup" +rm -r ${LOCALMACRODIR} + +echo "Success!!!" diff --git a/cluster_tools/gate_make_release.sh b/cluster_tools/gate_make_release.sh new file mode 100755 index 0000000..363d583 --- /dev/null +++ b/cluster_tools/gate_make_release.sh @@ -0,0 +1,97 @@ +#!/bin/bash + +function usage { + echo "ERROR: $1" + echo "$(basename $0) [name]" + exit 1 +} + +GATENAME="${1:-Gate}" +which "${GATENAME}" > /dev/null || usage "cant locate ${GATENAME} binary" +GATEBIN="$(which ${GATENAME})" +echo "${GATENAME} executable is ${GATEBIN}" + +test -d "${G4NEUTRONHPDATA}" || usage "can't locate neutron data. please set G4NEUTRONHPDATA" +echo "neutron data is ${G4NEUTRONHPDATA}" +test -d "${G4LEVELGAMMADATA}" || usage "can't locate gamma data. please set G4LEVELGAMMADATA" +echo "gamma data is ${G4LEVELGAMMADATA}" +test -d "${G4RADIOACTIVEDATA}" || usage "can't locate radioactivity data. please set G4RADIOACTIVEDATA" +echo "radioactivity data is ${G4RADIOACTIVEDATA}" +test -d "${G4ABLADATA}" || usage "can't locate abla data. please set G4ABLADATA" +echo "abla data is ${G4ABLADATA}" +test -d "${G4LEDATA}" || usage "can't locate em data. please set G4LEDATA" +echo "em data is ${G4LEDATA}" +test -d "${G4REALSURFACEDATA}" || usage "can't locate surface data. please set G4REALSURFACEDATA" +echo "surface data is ${G4REALSURFACEDATA}" +test -d "${G4NEUTRONXSDATA}" || usage "can't locate neutron xs data. please set G4NEUTRONXSDATA" +echo "neutron xs data is ${G4NEUTRONXSDATA}" +test -d "${G4PIIDATA}" || usage "can't locate pii data. please set G4PIIDATA" +echo "pii data is ${G4PIIDATA}" + +echo "Cleaning previous build" +rm -fr $(basename ${G4NEUTRONHPDATA}) +rm -fr $(basename ${G4LEVELGAMMADATA}) +rm -fr $(basename ${G4RADIOACTIVEDATA}) +rm -fr $(basename ${G4ABLADATA}) +rm -fr $(basename ${G4LEDATA}) +rm -fr $(basename ${G4REALSURFACEDATA}) +rm -fr test_libs gate_shared_libs.tar.gz gate_release.tar.gz + +echo "Copying libraries" +function get_deps { + ldd $1 | while read library; do + libfile="$(echo ${library} | awk -F' ' '/=> \// {print $3}')" + test $libfile || continue # didn't macht regex + test -f "test_libs/$(basename ${libfile})" && continue # already exists + echo "${libfile}" + cp "${libfile}" "test_libs/$(basename ${libfile})" + get_deps "${libfile}" + done +} + +mkdir test_libs +get_deps "${GATEBIN}" + +echo "Removing unused libraries" +rm -f test_libs/libdl.so* +rm -f test_libs/libm.so* +rm -f test_libs/libstdc++.so* +rm -f test_libs/libgcc_s.so* +rm -f test_libs/libpthread.so* +rm -f test_libs/libc.so* + +echo "Zipping libraries" +( + cd test_libs + tar -czvf ../gate_shared_libs.tar.gz ** +) || usage "can't create libraries tar" + +echo "Copying binary" +cp "${GATEBIN}" . + +echo "Copying data" +cp -r "${G4NEUTRONHPDATA}" . +cp -r "${G4LEVELGAMMADATA}" . +cp -r "${G4RADIOACTIVEDATA}" . +cp -r "${G4ABLADATA}" . +cp -r "${G4LEDATA}" . +cp -r "${G4REALSURFACEDATA}" . +cp -r "${G4NEUTRONXSDATA}" . +cp -r "${G4PIIDATA}" . + +echo "Making release" +tar -czvf gate_release.tar.gz \ + ${GATENAME} gate_shared_libs.tar.gz \ + $(basename ${G4NEUTRONHPDATA}) \ + $(basename ${G4LEVELGAMMADATA}) \ + $(basename ${G4RADIOACTIVEDATA}) \ + $(basename ${G4ABLADATA}) \ + $(basename ${G4LEDATA}) \ + $(basename ${G4REALSURFACEDATA}) \ + $(basename ${G4NEUTRONXSDATA}) \ + $(basename ${G4PIIDATA}) \ + || usage "can't create release zip" + +chmod -w gate_release.tar.gz + + diff --git a/cluster_tools/gate_power_merge.sh b/cluster_tools/gate_power_merge.sh new file mode 100755 index 0000000..fc0e6aa --- /dev/null +++ b/cluster_tools/gate_power_merge.sh @@ -0,0 +1,453 @@ +#!/usr/bin/env bash + +set -u + +function error { +echo "ERROR: $1" +exit 1 +} + +warning_count=0 +function warning { +let "warning_count++" +echo "MERGE_WARNING: $1" +} + +function start_bar { +count_max="${1:?"provide count max"}" +} + +function update_bar { +local count="${1:?"provide count"}" +local message="${2:?"provide message"}" +local percent=$(echo "100*${count}/${count_max}" | bc) +#printf "[%03d/%03d] %3d%% %-80.80s\r" ${count} ${count_max} ${percent} "${message}" +printf "[%03d/%03d] %3d%% %-80.80s\n" ${count} ${count_max} ${percent} "${message}" +} + +function end_bar { +unset count_max +#echo -ne '\n' +} + +function check_interfile { +local input_interfile="${1:?"provide input interfile"}" + +grep -qs '!INTERFILE :=' "${input_interfile}" || return 1 + +local header_byte_size=$(awk -F' ' ' +BEGIN { zsize = 0; } +/matrix size/ && $3 == "[1]" { xsize = $5; } +/matrix size/ && $3 == "[2]" { ysize = $5; } +/number of projections/ { zsize += $5; } +/number of bytes per pixel/ { byte_per_pixel = $7; } +END { print xsize * ysize * zsize * byte_per_pixel; }' "${input_interfile}") + +local raw_interfile="$(dirname "${input_interfile}")/$(awk -F' := ' '/name of data file/ { print $2; }' "${input_interfile}")" + +test -f "${raw_interfile}" || return 1 +test $(stat -c%s "${raw_interfile}") -eq ${header_byte_size} || return 1 +} + +function write_mhd_header { +local input_interfile="${1:?"provide input interfile"}" +local output_mhd="$(dirname "${input_interfile}")/$(basename "${input_interfile}" ".hdr").mhd" + +check_interfile "${input_interfile}" || error "${input_interfile} isn't an interfile image" + +local header_start='ObjectType = Image +NDims = 3 +AcquisitionDate = none +BinaryData = True +BinaryDataByteOrderMSB = False +CompressedData = False +TransformMatrix = 1 0 0 0 1 0 0 0 1 +Offset = 0 0 0 +CenterOfRotation = 0 0 0 +DistanceUnits = mm +AnatomicalOrientation = RIP' + +echo "${header_start}" > "${output_mhd}" + +awk -F' ' ' +/scaling factor/ && $4 == "[1]" { xspacing = $6; } +/scaling factor/ && $4 == "[2]" { yspacing = $6; } +END { print "ElementSpacing = " xspacing " " yspacing " 1"; }' "${input_interfile}" >> "${output_mhd}" + +awk -F' ' ' +BEGIN { zsize = 0; } +/matrix size/ && $3 == "[1]" { xsize = $5; } +/matrix size/ && $3 == "[2]" { ysize = $5; } +/number of projections/ { zsize += $5; } +END { print "DimSize = " xsize " " ysize " " zsize; }' "${input_interfile}" >> "${output_mhd}" + +awk -F' := ' ' +/number format/ { format = $2; } +/number of bytes per pixel/ { byte_per_pixel = $2 } +END { +if (format == "unsigned integer" && byte_per_pixel == 8) { print "ElementType = MET_ULONG"; exit }; +if (format == "unsigned integer" && byte_per_pixel == 4) { print "ElementType = MET_UINT"; exit }; +if (format == "unsigned integer" && byte_per_pixel == 2) { print "ElementType = MET_USHORT"; exit }; +if (format == "unsigned integer" && byte_per_pixel == 1) { print "ElementType = MET_UCHAR"; exit }; +if (format == "integer" && byte_per_pixel == 8) { print "ElementType = MET_LONG"; exit }; +if (format == "integer" && byte_per_pixel == 4) { print "ElementType = MET_INT"; exit }; +if (format == "integer" && byte_per_pixel == 2) { print "ElementType = MET_SHORT"; exit }; +if (format == "integer" && byte_per_pixel == 1) { print "ElementType = MET_CHAR"; exit }; +if (format == "float" && byte_per_pixel == 8) { print "ElementType = MET_FLOAT"; exit }; +if (format == "float" && byte_per_pixel == 4) { print "ElementType = MET_DOUBLE"; exit }; +print "ElementType = MET_INT"; +}' "${input_interfile}" >> "${output_mhd}" + +awk -F' := ' ' +/name of data file/ { print "ElementDataFile = " $2; }' "${input_interfile}" >> "${output_mhd}" +} + +rootMerger="clitkMergeRootFiles" +test -x "./clitkMergeRootFiles" && rootMerger="./clitkMergeRootFiles" + +function merge_root { +local merged="$1" +shift +echo " ${indent}entering root merger" +echo " ${indent}merger is ${rootMerger}" +echo " ${indent}creating ${merged}" +#echo "######## $#" +#echo "######## $*" + +if test $# -eq 1 +then + echo " ${indent}just one partial file => just copy it" + cp "$1" "${merged}" + return +fi + +local count=0 +local arguments=" -o ${merged}" +while test $# -gt 0 +do + local partial="$1" + shift + let count++ + local arguments=" -i ${partial} ${arguments}" +done +${rootMerger} ${arguments} > /dev/null || error "error while calling ${rootMerger}" +echo " ${indent}merged ${count} files" +} + +statMerger="mergeStatFile.py" +test -x "./mergeStatFile.sh" && statMerger="./mergeStatFile.sh" + +function merge_stat { +local merged="$1" +shift +echo " ${indent}entering stat merger" +echo " ${indent}merger is ${statMerger}" +echo " ${indent}creating ${merged}" +local count=0 +start_bar $# +while test $# -gt 0 +do + local partial="$1" + shift + let count++ + + if test ! -f "${merged}" + then + update_bar ${count} "copying first partial result ${partial}" + cp "${partial}" "${merged}" + continue + fi + + update_bar ${count} "adding ${partial}" + ${statMerger} -i "${merged}" -j "${partial}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${statMerger}" +done +end_bar +echo " ${indent}merged ${count} files" +} + +doseMerger="mergeDosePerEnegryFile.sh" +test -x "./mergeDosePerEnergyFile.sh" && doseMerger="./mergeDosePerEnergyFile.sh" + +function merge_dose { +local merged="$1" +shift +echo " ${indent}entering dose merger" +echo " ${indent}merger is ${doseMerger}" +echo " ${indent}creating ${merged}" +local count=0 +start_bar $# +while test $# -gt 0 +do + local partial="$1" + shift + let count++ + + if test ! -f "${merged}" + then + update_bar ${count} "copying first partial result ${partial}" + cp "${partial}" "${merged}" + continue + fi + + update_bar ${count} "adding ${partial}" + ${doseMerger} -i "${merged}" -j "${partial}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${doseMerger}" +done +end_bar +echo " ${indent}merged ${count} files" +} + +txtImageMerger="clitkMergeAsciiDoseActor" +test -f "./clitkMergeAsciiDoseActor" && txtImageMerger="./clitkMergeAsciiDoseActor" + +function merge_txt_image { +local merged="$1" +shift +echo " ${indent}entering text image merger" +echo " ${indent}merger is ${txtImageMerger}" +echo " ${indent}creating ${merged}" +local count=0 +start_bar $# +while test $# -gt 0 +do + local partial="$1" + shift + let count++ + + if test ! -f "${merged}" + then + update_bar ${count} "copying first partial result ${partial}" + cp "${partial}" "${merged}" + continue + fi + + update_bar ${count} "adding ${partial}" + local header="$(cat "${merged}" | head -n 6)" + local tmp="$(mktemp)" + ${txtImageMerger} -i "${partial}" -j "${merged}" -o "${tmp}" 2> /dev/null > /dev/null || warning "error while calling ${txtImageMerger}" + echo "${header}" > "${merged}" + grep -v '## Merge' "${tmp}" >> "${merged}" + rm "${tmp}" +done +end_bar +echo " ${indent}merged ${count} files" +} + +hdrImageMerger="clitkImageArithm" +test -x "./clitkImageArithm" && hdrImageMerger="./clitkImageArithm" + +function merge_hdr_image { +local merged="$1" +local merged_bin="${merged%.*}.img" +shift +echo " ${indent}entering hdr image merger" +echo " ${indent}merger is ${hdrImageMerger}" +echo " ${indent}creating ${merged}" +local count=0 +start_bar $# +while test $# -gt 0 +do + local partial="$1" + local partial_bin="${partial%.*}.img" + shift + let count++ + + if test ! -f "${merged}" + then + update_bar ${count} "copying first partial result ${partial}" + cp "${partial}" "${merged}" + cp "${partial_bin}" "${merged_bin}" + continue + fi + + update_bar ${count} "adding ${partial}" + ${hdrImageMerger} -t 0 -i "${partial}" -j "${merged}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${hdrImageMerger}" +done +end_bar +echo " ${indent}merged ${count} files" +} + +mhdImageMerger="clitkImageArithm" +test -x "./clitkImageArithm" && mhdImageMerger="./clitkImageArithm" + +function merge_mhd_image { +local merged="$1" +local merged_bin="${merged%.*}.raw" +shift +echo " ${indent}entering mhd image merger" +echo " ${indent}merger is ${mhdImageMerger}" +echo " ${indent}creating ${merged}" +local count=0 +start_bar $# +while test $# -gt 0 +do + local partial="$1" + local partial_bin="$(dirname "${partial}")/$(awk -F' = ' '/ElementDataFile/ { print $2; }' "${partial}")" + shift + let count++ + + if test ! -f "${merged}" + then + update_bar ${count} "copying first partial result ${partial}" + cp "${partial}" "${merged}" + cp "${partial_bin}" "${merged_bin%.*}.${partial_bin##*.}" + continue + fi + + update_bar ${count} "adding ${partial}" + ${mhdImageMerger} -t 0 -i "${partial}" -j "${merged}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${mhdImageMerger}" + mv "${merged_bin}" "${merged_bin%.*}.${partial_bin##*.}" + sed -i "s/$(basename "${merged_bin}")/$(basename "${merged_bin%.*}.${partial_bin##*.}")/" "${merged}" +done +end_bar +echo " ${indent}merged ${count} files" +} + +function merge_dispatcher { + local indent=" ** " + local outputfile="${1:?"provide output filename"}" + echo "merging ${outputfile}" + + local partialoutputfiles="$(find "${rundir}" -mindepth 2 -type f -name "${outputfile}")" + local nboutputfiles="$(echo "${partialoutputfiles}" | wc -l)" + if test ${nboutputdirs} -ne ${nboutputfiles} + then + warning "missing files" + return + fi + + local firstpartialoutputfile="$(echo "${partialoutputfiles}" | head -n 1)" + local firstpartialoutputextension="${firstpartialoutputfile##*.}" + echo "${indent}testing file type on ${firstpartialoutputfile}" + + if test "${firstpartialoutputextension}" == "hdr" && grep -qs 'INTERFILE' "${firstpartialoutputfile}" + then + echo "${indent}this is a interfile image" + echo "${indent}creating mhd headers" + for partialoutputfile in $partialoutputfiles; do write_mhd_header "${partialoutputfile}"; done + local mhd_partialoutputfiles="$(for partialoutputfile in $partialoutputfiles; do echo "${partialoutputfile%.*}.mhd"; done)" + local mhd_mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}" ".hdr").mhd" + merge_mhd_image "${mhd_mergedfile}" ${mhd_partialoutputfiles} || error "error while merging" + echo "${indent}cleaning mhd headers" + for mhd_partialoutputfile in $mhd_partialoutputfiles; do rm "${mhd_partialoutputfile}"; done + rm "${mhd_mergedfile}" + echo "${indent}copy interfile header" + cp "${firstpartialoutputfile}" "${outputdir}" + return + fi + + if test "${firstpartialoutputextension}" == "hdr" + then + echo "${indent}this is a analyse image" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + merge_hdr_image "${mergedfile}" ${partialoutputfiles} || error "error while merging" + return + fi + + if test "${firstpartialoutputextension}" == "mhd" + then + echo "${indent}this is a mhd image" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + merge_mhd_image "${mergedfile}" ${partialoutputfiles} || error "error while merging" + return + fi + + if test "${firstpartialoutputextension}" == "root" + then + echo "${indent}this is a root file" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + merge_root "${mergedfile}" ${partialoutputfiles} || error "error while merging" + return + fi + + if test "${firstpartialoutputextension}" == "txt" && grep -qs 'NumberOfEvent' "${firstpartialoutputfile}" + then + echo "${indent}this is a stat file" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + merge_stat "${mergedfile}" ${partialoutputfiles} || error "error while merging" + return + fi + + if test "${firstpartialoutputextension}" == "txt" && grep -qs 'energydose' "${firstpartialoutputfile}" + then + echo "${indent}this is a dose file" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + merge_dose "${mergedfile}" ${partialoutputfiles} || error "error while merging" + return + fi + + + + if test "${firstpartialoutputextension}" == "txt" && grep -qs 'Resol' "${firstpartialoutputfile}" + then + local resol="$(sed -nr '/Resol/s/^.*=\s+\((.+)\)\s*$/\1/p' "${firstpartialoutputfile}")" + local resolx="$(echo "${resol}" | cut -d',' -f1)" + local resoly="$(echo "${resol}" | cut -d',' -f2)" + local resolz="$(echo "${resol}" | cut -d',' -f3)" + if test "${resol}" == "1,1,1" + then + echo "${indent}this is a txt integral value" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + merge_txt_image "${mergedfile}" ${partialoutputfiles} || error "error while merging" + return + fi + if test \( "${resolx}" == "1" -a "${resoly}" == "1" \) -o \( "${resoly}" == "1" -a "${resolz}" == "1" \) -o \( "${resolz}" == "1" -a "${resolx}" == "1" \) + then + echo "${indent}this is a txt profile" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + merge_txt_image "${mergedfile}" ${partialoutputfiles} || error "error while merging" + return + fi + fi + + if test "${firstpartialoutputextension}" == "txt" + then + echo "${indent}this is a non specific txt output" + local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")" + local nbdifferent="$(md5sum ${partialoutputfiles} | awk '{ print $1; }' | sort | uniq | wc -l)" + echo " ${indent}${nbdifferent} different files" + if test ${nbdifferent} -gt 1 + then + echo " ${indent}catting to ${mergedfile}" + cat ${partialoutputfiles} > "${mergedfile}" || error "error while merging" + return + else + echo " ${indent}moving to ${mergedfile}" + cp "${firstpartialoutputfile}" "${mergedfile}" || error "error while merging" + return + fi + fi + + error "unknown file type" +} + +echo "!!!! this is $0 v0.3k !!!!" + +rundir="${1?"provide run dir"}" +rundir="$(echo "${rundir}" | sed 's|/*$||')" +nboutputdirs="$(find "${rundir}" -mindepth 1 -type d -name 'output*' | wc -l)" + +test ${nboutputdirs} -gt 0 || error "no output dir found" +echo "found ${nboutputdirs} partial output dirs" + +outputdir="results" +if [ "${rundir}" != "." -a "${rundir##*.}" != "${rundir}" ] +then + outputdir="results.${rundir##*.}" +fi +outputdir="$(basename "${outputdir}")" +echo "output dir is ${outputdir}" +test -d "${outputdir}" && rm -r "${outputdir}" +mkdir "${outputdir}" + +for outputfile in $(find "${rundir}" -regextype 'posix-extended' -type f -regex "${rundir}/output.*\.(hdr|mhd|root|txt)" | awk -F '/' '{ print $NF; }' | sort | uniq) +do + merge_dispatcher "${outputfile}" +done + +if [ -f "${rundir}/params.txt" ] +then + echo "copying params file" + cp "${rundir}/params.txt" "${outputdir}/params.txt" +fi + +echo "these was ${warning_count} warning(s)" diff --git a/cluster_tools/gate_run_submit_cluster.sh b/cluster_tools/gate_run_submit_cluster.sh new file mode 100755 index 0000000..0248a1e --- /dev/null +++ b/cluster_tools/gate_run_submit_cluster.sh @@ -0,0 +1,92 @@ +#!/usr/bin/env bash + +set -u +SCRIPTNAME="$(basename "${0}")" + +# ------------------------------------------------- +function error { + echo "ERROR: $1" + usage + exit 1 +} +# ------------------------------------------------- + +DEFAULTRELEASESUFFIX="NONE" +DEFAULTNUMBEROFJOBS="10" + +# ------------------------------------------------- +function usage { + echo "${SCRIPTNAME} mac/main.mac njobs releasesuffix paramtogate" + echo "default njobs = ${DEFAULTNUMBEROFJOBS}" + echo "default releasesuffix = ${DEFAULTRELEASESUFFIX} (NONE means use Gate in PATH)" +} +# ------------------------------------------------- + +test $# -eq 0 && usage && exit 0 + +SCRIPTDIR="${HOME}/git/gate-tests/bin" +RELEASESUFFIX=${3:-"${DEFAULTRELEASESUFFIX}"} +RELEASEDIR="${HOME}/releases/grid_release${RELEASESUFFIX}" +JOBFILE="$(dirname $0)/gate_job_cluster.job" + +echo "Checking stuff" +test -f ${JOBFILE} || error "can't find job file ${JOBFILE}" +if test "${RELEASESUFFIX}" = "${DEFAULTRELEASESUFFIX}" +then + RELEASEDIR="NONE" + which Gate 2>&1 >/dev/null || error "there is no Gate in the PATH" +else + test -d ${RELEASEDIR} || error "invalid release dir ${RELEASEDIR}" +fi +MACRODIR=$(pwd) +test -d ${MACRODIR}/mac && test -d ${MACRODIR}/data || error "invalid path" +MACROFILE=${1:?"provide relative macro path"} +test -f ${MACRODIR}/${MACROFILE} || error "invalid macro" +OUTPUTDIR=$(mktemp --tmpdir=${MACRODIR} -d run.XXXX || error "can't create temp dir") +test -d ${OUTPUTDIR} || error "can't locate output dir" +RUNID=${OUTPUTDIR##*.} +NJOBS=${2:-"${DEFAULTNUMBEROFJOBS}"} +NJOBSMAX=${NJOBS} +PARAM="${4:-""}" + +echo "Lets roll!!" +echo "runid is ${RUNID}" + +QSUB=$(which qsub 2> /dev/null) +# echo "qsub is $(which qsub)" +test -z "${QSUB}" && QSUB="noqsub" +if test "${QSUB}" = "noqsub" +then + echo "qsub is not found. Simply run Gate on multiple cores." +fi + +test -z "${PARAM}" && echo "no param" || echo "param is ${PARAM}" +if test "$RELEASESUFFIX" = "$DEFAULTRELEASESUFFIX" +then + echo "Gate is $(which Gate)" +else + echo "Gate release is $(basename ${RELEASEDIR})" +fi +echo "submitting ${NJOBS} jobs" + +PARAMFILE="${OUTPUTDIR}/params.txt" +echo "njobs = ${NJOBS}" >> "${PARAMFILE}" +echo "macro = ${MACROFILE}" >> "${PARAMFILE}" +test -z "${PARAM}" || echo "param = ${PARAM}" >> "${PARAMFILE}" + +while test $NJOBS -gt 0; do + + if test "${QSUB}" = "noqsub" + then + echo "Launching Gate log in ${OUTPUTDIR}/gate_${NJOBS}.log" + PARAM=\"${PARAM}\" INDEX=${NJOBS} INDEXMAX=${NJOBSMAX} SCRIPTDIR=${SCRIPTDIR} OUTPUTDIR=${OUTPUTDIR} RELEASEDIR=${RELEASEDIR} MACROFILE=${MACROFILE} MACRODIR=${MACRODIR} PBS_JOBID="local_${NJOBS}" bash "${JOBFILE}" > ${OUTPUTDIR}/gate_${NJOBS}.log & + else + qsub -N "gatejob.${RUNID}" -o "${OUTPUTDIR}" \ + -v "PARAM=\"${PARAM}\",INDEX=${NJOBS},INDEXMAX=${NJOBSMAX},SCRIPTDIR=${SCRIPTDIR},OUTPUTDIR=${OUTPUTDIR},RELEASEDIR=${RELEASEDIR},MACROFILE=${MACROFILE},MACRODIR=${MACRODIR}" \ + "${JOBFILE}" || error "submission error" + fi + + let NJOBS-- +done + +echo "runid is ${RUNID}" diff --git a/cluster_tools/gate_upload_release.sh b/cluster_tools/gate_upload_release.sh new file mode 100755 index 0000000..784cd45 --- /dev/null +++ b/cluster_tools/gate_upload_release.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +. gate_common.sh + +releasearchive="${1:?"provide path to release archive"}" +test -f ${releasearchive} || error "file ${releasearchive} doesn't exist" +releasename="${prefix}$(basename "${releasearchive}")" + +ensure_proxy || error "no valid proxy" + +echo "releasearchive=${releasearchive}" +echo "releasename=${releasename}" +echo "lfnrelease=${lfnrelease}" +check_user || exit 2 + +upload_file "${releasearchive}" "${lfnrelease}${releasename}" && echo "success" || echo "failed" diff --git a/cluster_tools/mergeDosePerEnergyFile.sh b/cluster_tools/mergeDosePerEnergyFile.sh new file mode 100755 index 0000000..b63c5ae --- /dev/null +++ b/cluster_tools/mergeDosePerEnergyFile.sh @@ -0,0 +1,33 @@ +#!/bin/bash +set -u + +function usage { + echo "$0 -i -j -o " + exit 1 +} + +if [ $# != 6 ] +then + usage +fi + +IN1=$2 +IN2=$4 +RESULT=$6 + +test -f ${IN1} && test -f ${IN2} || usage + +TMP="$(mktemp)" +echo "merging dose file" +for PARAM in `awk '$1 == "#" {print $3}' ${IN1}` +do + echo "merging ${PARAM}" + V1=`awk -v P=${PARAM} '$3 == P {print $4}' ${IN1} ` + V2=`awk -v P=${PARAM} '$3 == P {print $4}' ${IN2} ` + V1=`echo ${V1} | sed -e 's/[eE]+*/\*10\^/'` + V2=`echo ${V2} | sed -e 's/[eE]+*/\*10\^/'` + R=`echo "scale=30; ${V1} + ${V2}" | bc -l` + test -z "${R}" && continue + echo "# energydose ${PARAM} ${R}" >> ${TMP} +done +mv -f ${TMP} ${RESULT} diff --git a/cluster_tools/mergeStatFile.py b/cluster_tools/mergeStatFile.py new file mode 100755 index 0000000..a1cd573 --- /dev/null +++ b/cluster_tools/mergeStatFile.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# coding: utf-8 + +import sys +import re +import datetime + +linere = re.compile(r'''^#\s+([a-zA-Z]+)\s+=\s(.*)$''') +mergedlines = ['NumberOfRun', 'NumberOfEvents', 'NumberOfTracks', 'NumberOfSteps', 'NumberOfGeometricalSteps', 'NumberOfPhysicalSteps', 'ElapsedTimeWoInit', 'ElapsedTime', 'StartDate', 'EndDate'] + +assert(len(sys.argv)==7) +assert(sys.argv[1]=="-i") +assert(sys.argv[3]=="-j") +assert(sys.argv[5]=="-o") + +def total_seconds(deltat): + try: + return float(deltat.total_seconds()) + except AttributeError: # total_seconds defined in 2.7 + total = 0. + total += deltat.seconds + total += deltat.microseconds*1e-6 + total += deltat.days*3600.*24. + return total + +def parse_stat_file(filename): + keys = {} + for line in open(filename,"r").readlines(): + match = linere.match(line) + assert(match is not None) + groups = match.groups() + if groups[0] not in mergedlines: + continue + keys[groups[0]]=groups[1] + return keys + +def merge_keys(ikeys,jkeys): + mindate = None + maxdate = None + keys = {} + for line in mergedlines: + value = None + + if value is None: + try: + ivalue = int(ikeys[line]) + jvalue = int(jkeys[line]) + value = ivalue + jvalue + value = str(value) + except ValueError: + pass + + if value is None: + try: + ivalue = float(ikeys[line]) + jvalue = float(jkeys[line]) + value = ivalue + jvalue + value = str(value) + except ValueError: + pass + + if value is None: + ivalue = datetime.datetime.strptime(ikeys[line],"%a %b %d %H:%M:%S %Y") + jvalue = datetime.datetime.strptime(jkeys[line],"%a %b %d %H:%M:%S %Y") + if line=="StartDate": + value = min(ivalue,jvalue) + mindate = value + if line=="EndDate": + value = max(ivalue,jvalue) + maxdate = value + value = value.strftime("%a %b %d %H:%M:%S %Y") + + assert(value is not None) + keys[line] = value + if mindate is not None and maxdate is not None: + speedup = float(keys["ElapsedTime"])/total_seconds(maxdate-mindate) + keys["Speedup"] = str(speedup) + return keys + +def format_keys(keys): + output = "\n".join("# %s = %s" % (line,keys[line]) for line in mergedlines) + if "Speedup" in keys: + output += "\n# Speedup = %s" % keys["Speedup"] + return output + +ikeys = parse_stat_file(sys.argv[2]) +jkeys = parse_stat_file(sys.argv[4]) +keys = merge_keys(ikeys,jkeys) +output = format_keys(keys) +open(sys.argv[6],"w").write(output) + diff --git a/cluster_tools/mergeStatFile.sh b/cluster_tools/mergeStatFile.sh new file mode 100755 index 0000000..12c7c05 --- /dev/null +++ b/cluster_tools/mergeStatFile.sh @@ -0,0 +1,32 @@ +#!/bin/bash +set -u + +function usage { + echo "$0 -i -j -o " + exit 1 +} + +if [ $# != 6 ] +then + usage +fi + +IN1=$2 +IN2=$4 +RESULT=$6 + + +test -f ${IN1} && test -f ${IN2} || usage + +TMP="$(mktemp)" +echo "merging stat file" +for PARAM in `awk '$1 == "#" {print $2}' ${IN1}` +do + echo "merging ${PARAM}" + V1=`awk -v P=${PARAM} '$2 == P {print $4}' ${IN1} ` + V2=`awk -v P=${PARAM} '$2 == P {print $4}' ${IN2} ` + R=`echo "${V1} + ${V2}" | bc` + test -z "${R}" && continue + echo "# ${PARAM} = ${R}" >> ${TMP} +done +mv -f ${TMP} ${RESULT} diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index ba65d28..6c1096a 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -66,8 +66,7 @@ ENDIF(CLITK_MEMORY_INFO) IF (CLITK_USE_SYSTEM_GDCM) FIND_PACKAGE(GDCM REQUIRED) include(${GDCM_USE_FILE}) - TARGET_LINK_LIBRARIES(clitkCommon vtkgdcm) - # gdcmDICT gdcmMSFF) + TARGET_LINK_LIBRARIES(clitkCommon vtkgdcm gdcmDICT gdcmMSFF gdcmMEXD) ENDIF() #========================================================= diff --git a/common/clitkDicomRT_ROI.cxx b/common/clitkDicomRT_ROI.cxx index b661353..1fd9fe1 100644 --- a/common/clitkDicomRT_ROI.cxx +++ b/common/clitkDicomRT_ROI.cxx @@ -475,6 +475,16 @@ void clitk::DicomRT_ROI::Read(vtkSmartPointer & reader, i //mColor = //FIXME !! + // gdcm::Attribute<0x3006,0x002a> color = {}; + + // const gdcm::DataSet & nestedds = mItemContour->GetNestedDataSet(); + // color.SetFromDataSet( nestedds ); + // assert( color.GetNumberOfValues() == 3 ); + // mColor[0] = color.GetValue(0); + // mColor[1] = color.GetValue(1); + // mColor[2] = color.GetValue(2); + + SetDicomUptodateFlag(true); // Get the contour mMesh = reader->GetOutput(roiindex); diff --git a/common/clitkImage2DicomRTStructFilter.h b/common/clitkImage2DicomRTStructFilter.h index 924bcc0..b6a74b2 100644 --- a/common/clitkImage2DicomRTStructFilter.h +++ b/common/clitkImage2DicomRTStructFilter.h @@ -41,25 +41,25 @@ namespace clitk { typedef typename clitk::DicomRT_StructureSet::Pointer DicomRTStructPointer; // Set inputs - itkSetMacro(Input, ImagePointer); - itkGetConstMacro(Input, ImagePointer); + itkSetMacro(InputFilenames, std::vector ); itkSetMacro(StructureSetFilename, std::string); itkSetMacro(DicomFolder, std::string); itkSetMacro(OutputFilename, std::string); - void SetROIName(std::string name, std::string type); + void SetROIType(std::string type); itkSetMacro(ThresholdValue, PixelType); + itkSetMacro(SkipInitialStructuresFlag, bool); // Run filter void Update(); protected: - ImagePointer m_Input; std::string m_StructureSetFilename; std::string m_DicomFolder; std::string m_OutputFilename; - std::string m_ROIName; std::string m_ROIType; PixelType m_ThresholdValue; + std::vector m_InputFilenames; + bool m_SkipInitialStructuresFlag; }; //-------------------------------------------------------------------- diff --git a/common/clitkImage2DicomRTStructFilter.txx b/common/clitkImage2DicomRTStructFilter.txx index 1b241a9..77c5e07 100644 --- a/common/clitkImage2DicomRTStructFilter.txx +++ b/common/clitkImage2DicomRTStructFilter.txx @@ -45,6 +45,7 @@ #include "vtkCamera.h" #include "vtkProperty.h" #include "vtkProperty2D.h" +#include // itk #include @@ -61,6 +62,7 @@ clitk::Image2DicomRTStructFilter::Image2DicomRTStructFilter() m_DicomFolder = ""; m_OutputFilename = "default-output.dcm"; m_ThresholdValue = 0.5; + m_SkipInitialStructuresFlag = false; } //-------------------------------------------------------------------- @@ -76,9 +78,8 @@ clitk::Image2DicomRTStructFilter::~Image2DicomRTStructFilter() //-------------------------------------------------------------------- template void -clitk::Image2DicomRTStructFilter::SetROIName(std::string name, std::string type) +clitk::Image2DicomRTStructFilter::SetROIType(std::string type) { - m_ROIName = name; m_ROIType = type; } //-------------------------------------------------------------------- @@ -112,9 +113,18 @@ void clitk::Image2DicomRTStructFilter::Update() << p->GetNumberOfStructureSetROIs() << std::endl; } + + // number of additional contours + int m = m_InputFilenames.size(); + // Init writer vtkGDCMPolyDataWriter * writer = vtkGDCMPolyDataWriter::New(); - int numMasks = reader->GetNumberOfOutputPorts() + 1;//add one more + int numMasks = reader->GetNumberOfOutputPorts() + m; + + if (m_SkipInitialStructuresFlag) { + numMasks = m; + } + writer->SetNumberOfInputPorts(numMasks); writer->SetFileName(m_OutputFilename.c_str()); writer->SetMedicalImageProperties(reader->GetMedicalImageProperties()); @@ -128,18 +138,46 @@ void clitk::Image2DicomRTStructFilter::Update() roiTypes->SetNumberOfValues(numMasks); // Convert the image into a mesh - typedef clitk::BinaryImageToMeshFilter BinaryImageToMeshFilterType; - typename BinaryImageToMeshFilterType::Pointer convert = BinaryImageToMeshFilterType::New(); - convert->SetThresholdValue(m_ThresholdValue); - convert->SetInput(m_Input); - convert->Update(); - vtkPolyData* mesh = convert->GetOutputMesh(); - if (GetVerboseFlag()) { - std::cout << "Mesh has " << mesh->GetNumberOfLines() << " lines." << std::endl; + std::vector > meshes; + std::vector m_ROINames; + meshes.resize(m); + m_ROINames.resize(m); + for(unsigned int i=0; i ImageType; + ImagePointer input = clitk::readImage(m_InputFilenames[i], false); + + std::ostringstream oss; + oss << vtksys::SystemTools:: + GetFilenameName(vtksys::SystemTools::GetFilenameWithoutLastExtension(m_InputFilenames[i])); + std::string name = oss.str(); + m_ROINames[i] = name; + + // convert to mesh + typedef clitk::BinaryImageToMeshFilter BinaryImageToMeshFilterType; + typename BinaryImageToMeshFilterType::Pointer convert = BinaryImageToMeshFilterType::New(); + convert->SetThresholdValue(m_ThresholdValue); + convert->SetInput(input); + convert->Update(); + meshes[i] = convert->GetOutputMesh(); + if (GetVerboseFlag()) { + std::cout << "Mesh has " << meshes[i]->GetNumberOfLines() << " lines." << std::endl; + } + + /* + // debug mesh write FIXME + vtkSmartPointer wr = vtkSmartPointer::New(); + wr->SetInputConnection(convert->GetOutputPort()); //psurface->GetOutputPort() + wr->SetFileName("bidon.obj"); + wr->Update(); + wr->Write(); + */ } - + // Copy previous contours - for (unsigned int i = 0; i < numMasks-1; ++i) { + for (unsigned int i = 0; i < numMasks-m; ++i) { writer->SetInput(i, reader->GetOutput(i)); std::string theString = reader->GetRTStructSetProperties()->GetStructureSetROIName(i); roiNames->InsertValue(i, theString); @@ -149,13 +187,14 @@ void clitk::Image2DicomRTStructFilter::Update() roiTypes->InsertValue(i, theString); } - // Add new one - int last = numMasks-1; - writer->SetInput(last, mesh); - roiNames->InsertValue(last, m_ROIName); - roiAlgorithms->InsertValue(last, "CLITK_CREATED"); - roiTypes->InsertValue(last, m_ROIType); - + // Add new ones + for (unsigned int i = numMasks-m; i < numMasks; ++i) { + writer->SetInput(i, meshes[i-numMasks+m]); + roiNames->InsertValue(i, m_ROINames[i-numMasks+m]); + roiAlgorithms->InsertValue(i, "CLITK_CREATED"); + roiTypes->InsertValue(i, m_ROIType); + } + /* // Visu DEBUG vtkPolyDataMapper *cubeMapper = vtkPolyDataMapper::New(); diff --git a/common/clitkTransformUtilities.cxx b/common/clitkTransformUtilities.cxx index ef210d7..832d598 100644 --- a/common/clitkTransformUtilities.cxx +++ b/common/clitkTransformUtilities.cxx @@ -67,4 +67,32 @@ itk::Matrix GetRotationMatrix<3>(itk::Array rotationParame return GetRotationMatrix3D(rotationParameters); } +//-------------------------------------------------------------------- +itk::Matrix ReadMatrix3D(std::string fileName) +{ + // read input matrix + std::ifstream is; + openFileForReading(is, fileName); + std::vector nb; + double x; + skipComment(is); + is >> x; + while (is && !is.eof()) { + nb.push_back(x); + skipComment(is); + is >> x; + } + + if(nb.size() != 16) + itkGenericExceptionMacro(<< "Could not read 4x4 matrix in " << fileName); + + //copy it to the matrix + itk::Matrix matrix; + unsigned int index=0; + for (unsigned int i=0;i<4;i++) + for (unsigned int j=0;j<4;j++) + matrix[i][j]=nb[index++]; + return matrix; +} + } diff --git a/common/clitkTransformUtilities.h b/common/clitkTransformUtilities.h index cbc30b9..d3644b1 100644 --- a/common/clitkTransformUtilities.h +++ b/common/clitkTransformUtilities.h @@ -255,29 +255,7 @@ namespace clitk return matrix; } - inline itk::Matrix ReadMatrix3D(std::string fileName) - { - // read input matrix - std::ifstream is; - openFileForReading(is, fileName); - std::vector nb; - double x; - skipComment(is); - is >> x; - while (!is.eof()) { - nb.push_back(x); - skipComment(is); - is >> x; - } - - //copy it to the matrix - itk::Matrix matrix; - unsigned int index=0; - for (unsigned int i=0;i<4;i++) - for (unsigned int j=0;j<4;j++) - matrix[i][j]=nb[index++]; - return matrix; - } + itk::Matrix ReadMatrix3D(std::string fileName); inline vtkMatrix4x4* ReadVTKMatrix3D(std::string fileName) { // read input matrix diff --git a/common/vvImageReader.cxx b/common/vvImageReader.cxx index f4f1a90..ddc4ad3 100644 --- a/common/vvImageReader.cxx +++ b/common/vvImageReader.cxx @@ -161,7 +161,15 @@ void vvImageReader::ReadMatImageTransform() if(f.is_open()) { f.close(); - itk::Matrix itkMat = clitk::ReadMatrix3D(filename); + itk::Matrix itkMat; + itkMat.SetIdentity(); + try { + itkMat = clitk::ReadMatrix3D(filename); + } + catch (itk::ExceptionObject & err) { + itkWarningMacro(<< "Found " << filename + << " but this is not a 4x4 matrix so it is ignored."); + } vtkSmartPointer matrix = vtkSmartPointer::New(); matrix->Identity(); diff --git a/common/vvImageWriter.txx b/common/vvImageWriter.txx index 0705197..81e45ee 100644 --- a/common/vvImageWriter.txx +++ b/common/vvImageWriter.txx @@ -38,6 +38,8 @@ void vvImageWriter::UpdateWithDim(std::string OutputPixelType) UpdateWithDimAndOutputPixelType(); } else if (OutputPixelType == "int") { UpdateWithDimAndOutputPixelType(); + } else if (OutputPixelType == "unsigned_int") { + UpdateWithDimAndOutputPixelType(); } else if (OutputPixelType == "double") { UpdateWithDimAndOutputPixelType(); } else if (OutputPixelType == "float") { diff --git a/segmentation/clitkExtractLung.ggo b/segmentation/clitkExtractLung.ggo index 44b99a2..f31f718 100644 --- a/segmentation/clitkExtractLung.ggo +++ b/segmentation/clitkExtractLung.ggo @@ -36,7 +36,7 @@ option "skipslices" - "0: Number of slices to skip before sear option "upperThresholdForTrachea" - "0: Initial upper threshold for trachea" double no default="-900" option "multiplierForTrachea" - "0: Multiplier for the region growing" double no default="5" option "thresholdStepSizeForTrachea" - "0: Threshold step size" int no default="64" -option "seed" - "0,1: Index of the trachea seed point (in pixel, not in mm)" int no multiple +option "seed" - "0,1: Index of the trachea seed point (in mm NOT IN PIXELS)" float no multiple option "doNotCheckTracheaVolume" - "0,1: If set, do not check the trachea volume" flag off option "verboseRG" - "0,1: Verbose RegionGrowing" flag off option "maxElongation" - "1: Maximum allowed elongation of candidate regions for the trachea" float no default="0.5" @@ -63,7 +63,7 @@ option "opencloseRadius" - "OpenClose radius" int no section "Step 6 : fill holes" option "doNotFillHoles" - "Do not fill holes if set" flag on -option "dir" d "Directions (axes) to perform filling (defaults to 2,1,0)" int multiple no +#option "dir" d "Directions (axes) to perform filling (defaults to 2,1,0)" int multiple no section "Step 7 : lung separation (labelling)" option "doNotSeparateLungs" - "Do not separate lungs if set" flag off diff --git a/segmentation/clitkExtractLungFilter.h b/segmentation/clitkExtractLungFilter.h index 6f31cee..6c50b1c 100644 --- a/segmentation/clitkExtractLungFilter.h +++ b/segmentation/clitkExtractLungFilter.h @@ -164,7 +164,8 @@ namespace clitk { itkSetMacro(SeedPreProcessingThreshold, int); itkGetConstMacro(SeedPreProcessingThreshold, int); - void AddSeed(InternalIndexType s); + void AddSeedInPixels(InternalIndexType s); + void AddSeed(InputImagePointType s); std::vector & GetSeeds() { return m_Seeds; } itkSetMacro(TracheaVolumeMustBeCheckedFlag, bool); @@ -252,6 +253,7 @@ namespace clitk { InputImagePixelType m_ThresholdStepSizeForTrachea; double m_MultiplierForTrachea; std::vector m_Seeds; + std::vector m_SeedsInMM; int m_NumberOfSlicesToSkipBeforeSearchingSeed; bool m_TracheaVolumeMustBeCheckedFlag; bool m_VerboseRegionGrowingFlag; diff --git a/segmentation/clitkExtractLungFilter.txx b/segmentation/clitkExtractLungFilter.txx index a0acdd5..b76001f 100644 --- a/segmentation/clitkExtractLungFilter.txx +++ b/segmentation/clitkExtractLungFilter.txx @@ -90,8 +90,7 @@ ExtractLungFilter(): TracheaVolumeMustBeCheckedFlagOn(); SetNumSlices(50); SetMaxElongation(0.5); - SetSeedPreProcessingThreshold(-400); - + SetSeedPreProcessingThreshold(-400); // Step 3 default values SetNumberOfHistogramBins(500); @@ -135,7 +134,19 @@ SetInput(const ImageType * image) template void clitk::ExtractLungFilter:: -AddSeed(InternalIndexType s) +//AddSeed(InternalIndexType s) +AddSeed(InputImagePointType s) +{ + m_SeedsInMM.push_back(s); +} +//-------------------------------------------------------------------- + + +//-------------------------------------------------------------------- +template +void +clitk::ExtractLungFilter:: +AddSeedInPixels(InternalIndexType s) { m_Seeds.push_back(s); } @@ -461,7 +472,6 @@ GenerateOutputInformation() // smalest one (sometimes appends with the stomach if (initialNumberOfLabels >1) { if (GetRemoveSmallLabelBeforeSeparationFlag()) { - DD(GetRemoveSmallLabelBeforeSeparationFlag()); typedef itk::RelabelComponentImageFilter RelabelFilterType; typename RelabelFilterType::Pointer relabelFilter = RelabelFilterType::New(); relabelFilter->SetInput(working_mask); @@ -577,7 +587,7 @@ SearchForTracheaSeed(int skip) it.GoToBegin(); while (!it.IsAtEnd()) { if(it.Get() < GetUpperThresholdForTrachea() ) { - AddSeed(it.GetIndex()); + AddSeedInPixels(it.GetIndex()); // DD(it.GetIndex()); } ++it; @@ -853,7 +863,7 @@ TracheaRegionGrowing() f->SetVerbose(GetVerboseRegionGrowingFlag()); for(unsigned int i=0; iAddSeed(m_Seeds[i]); - // DD(m_Seeds[i]); + //DD(m_Seeds[i]); } f->Update(); PrintMemory(GetVerboseMemoryFlag(), "After RG update"); @@ -907,6 +917,15 @@ SearchForTrachea() // compute trachea volume // if volume not plausible -> skip more slices and restart + // If initial seed, convert from mm to pixels + if (m_SeedsInMM.size() > 0) { + for(unsigned int i=0; iTransformPhysicalPointToIndex(m_SeedsInMM[i], index); + m_Seeds.push_back(index); + } + } + bool has_seed; bool stop = false; double volume = 0.0; diff --git a/segmentation/clitkExtractLungGenericFilter.txx b/segmentation/clitkExtractLungGenericFilter.txx index b9a94d8..01455c8 100644 --- a/segmentation/clitkExtractLungGenericFilter.txx +++ b/segmentation/clitkExtractLungGenericFilter.txx @@ -94,10 +94,10 @@ SetOptionsFromArgsInfoToFilter(FilterType * f) f->SetMaxElongation(mArgsInfo.maxElongation_arg); f->SetSeedPreProcessingThreshold(mArgsInfo.seedPreProcessingThreshold_arg); - typename FilterType::InputImageIndexType s; + typename FilterType::InputImagePointType s; if (mArgsInfo.seed_given) { ConvertOptionMacro(mArgsInfo.seed, s, 3, false); - f->AddSeed(s); + f->AddSeed(s); } f->SetMinimalComponentSize(mArgsInfo.minSize_arg); diff --git a/tools/CMakeLists.txt b/tools/CMakeLists.txt index 319908b..77f12f9 100644 --- a/tools/CMakeLists.txt +++ b/tools/CMakeLists.txt @@ -108,6 +108,12 @@ IF (CLITK_BUILD_TOOLS) TARGET_LINK_LIBRARIES(clitkAffineTransform clitkCommon ) SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkAffineTransform) + WRAP_GGO(clitkElastixTransformToMatrix_GGO_C clitkElastixTransformToMatrix.ggo) + ADD_EXECUTABLE(clitkElastixTransformToMatrix clitkElastixTransformToMatrix.cxx ${clitkElastixTransformToMatrix_GGO_C}) + TARGET_LINK_LIBRARIES(clitkElastixTransformToMatrix clitkCommon ) + SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkElastixTransformToMatrix) + + WRAP_GGO(clitkSetBackground_GGO_C clitkSetBackground.ggo) ADD_EXECUTABLE(clitkSetBackground clitkSetBackground.cxx clitkSetBackgroundGenericFilter.cxx ${clitkSetBackground_GGO_C}) TARGET_LINK_LIBRARIES(clitkSetBackground clitkCommon) @@ -271,7 +277,7 @@ IF (CLITK_BUILD_TOOLS) IF(CLITK_EXPERIMENTAL) WRAP_GGO(clitkBinaryImageToMesh_GGO_C clitkBinaryImageToMesh.ggo) ADD_EXECUTABLE(clitkBinaryImageToMesh clitkBinaryImageToMesh.cxx ${clitkBinaryImageToMesh_GGO_C}) - TARGET_LINK_LIBRARIES(clitkBinaryImageToMesh itksys ${ITK_LIBRARIES} ${VTK_LIBRARIES}) + TARGET_LINK_LIBRARIES(clitkBinaryImageToMesh clitkCommon) SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkBinaryImageToMesh) WRAP_GGO(clitkMeshToBinaryImage_GGO_C clitkMeshToBinaryImage.ggo) @@ -281,7 +287,7 @@ IF (CLITK_BUILD_TOOLS) WRAP_GGO(clitkMeshViewer_GGO_C clitkMeshViewer.ggo) ADD_EXECUTABLE(clitkMeshViewer clitkMeshViewer.cxx ${clitkMeshViewer_GGO_C}) - TARGET_LINK_LIBRARIES(clitkMeshViewer ${ITK_LIBRARIES} ${VTK_LIBRARIES}) + TARGET_LINK_LIBRARIES(clitkMeshViewer clitkCommon) SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkMeshViewer) ENDIF(CLITK_EXPERIMENTAL) diff --git a/tools/clitkAffineTransformGenericFilter.h b/tools/clitkAffineTransformGenericFilter.h index 6cd2a91..3950632 100644 --- a/tools/clitkAffineTransformGenericFilter.h +++ b/tools/clitkAffineTransformGenericFilter.h @@ -87,6 +87,11 @@ namespace clitk //---------------------------------------- void Update(); + template + static + typename itk::Matrix + createMatrixFromElastixFile(std::vector & filename, bool verbose=true); + protected: //---------------------------------------- @@ -103,13 +108,9 @@ namespace clitk template void UpdateWithDimAndPixelType(); template void UpdateWithDimAndVectorType(); - template - typename itk::Matrix - createMatrixFromElastixFile(std::vector & filename); - - bool GetElastixValueFromTag(std::ifstream & is, std::string tag, std::string & value); - void GetValuesFromValue(const std::string & s, - std::vector & values); + static bool GetElastixValueFromTag(std::ifstream & is, std::string tag, std::string & value); + static void GetValuesFromValue(const std::string & s, + std::vector & values); //---------------------------------------- // Data members diff --git a/tools/clitkAffineTransformGenericFilter.txx b/tools/clitkAffineTransformGenericFilter.txx index d3cd19e..03cf862 100644 --- a/tools/clitkAffineTransformGenericFilter.txx +++ b/tools/clitkAffineTransformGenericFilter.txx @@ -183,7 +183,7 @@ namespace clitk if (m_ArgsInfo.elastix_given) { std::vector s; for(uint i=0; i(s); + matrix = createMatrixFromElastixFile(s, m_Verbose); } else matrix.SetIdentity(); @@ -497,7 +497,7 @@ namespace clitk template template typename itk::Matrix - AffineTransformGenericFilter::createMatrixFromElastixFile(std::vector & filename) + AffineTransformGenericFilter::createMatrixFromElastixFile(std::vector & filename, bool verbose) { if (Dimension != 3) { FATAL("Only 3D yet" << std::endl); @@ -509,7 +509,7 @@ namespace clitk for(uint j=0; jSetParameters(p); - if (m_Verbose) { + if (verbose) { std::cout << "Rotation (deg) : " << rad2deg(p[0]) << " " << rad2deg(p[1]) << " " << rad2deg(p[2]) << std::endl; std::cout << "Center of rot (phy) : " << p[3] << " " << p[4] << " " << p[5] << std::endl; std::cout << "Translation (phy) : " << p[6] << " " << p[7] << " " << p[8] << std::endl; @@ -562,7 +562,7 @@ namespace clitk // Compose with previous if needed if (j!=0) { mat->Compose(previous); - if (m_Verbose) { + if (verbose) { std::cout << "Composed rotation (deg) : " << rad2deg(mat->GetAngleX()) << " " << rad2deg(mat->GetAngleY()) << " " << rad2deg(mat->GetAngleZ()) << std::endl; std::cout << "Composed center of rot (phy) : " << mat->GetCenter() << std::endl; std::cout << "Compsoed translation (phy) : " << mat->GetTranslation() << std::endl; diff --git a/tools/clitkDicom2Image.cxx b/tools/clitkDicom2Image.cxx index c7b2e23..b25b001 100644 --- a/tools/clitkDicom2Image.cxx +++ b/tools/clitkDicom2Image.cxx @@ -82,12 +82,13 @@ int main(int argc, char * argv[]) gdcm::Attribute<0x28, 0x100> pixel_size; pixel_size.SetFromDataSet(ds); - if (pixel_size.GetValue() != 16) - { - std::cerr << "Pixel type 2 bytes ! " << std::endl; - std::cerr << "In file " << input_files[i] << std::endl; - exit(0); - } + /* if (pixel_size.GetValue() != 16) + { + std::cerr << "Pixel type not 2 bytes ! " << std::endl; + std::cerr << "In file " << input_files[i] << std::endl; + exit(0); + } + */ #else if (args_info.verbose_flag) std::cout << "Not using GDCM-2.x" << std::endl; @@ -107,11 +108,12 @@ int main(int argc, char * argv[]) theorigin[series_number][2] = header->GetZOrigin(); sliceLocations[series_number].push_back(theorigin[series_number][2]); seriesFiles[series_number].push_back(input_files[i]); - if (header->GetPixelSize() != 2) { + /*if (header->GetPixelSize() != 2) { std::cerr << "Pixel type 2 bytes ! " << std::endl; std::cerr << "In file " << input_files[i] << std::endl; exit(0); } + */ #endif } diff --git a/tools/clitkDicomInfo.cxx b/tools/clitkDicomInfo.cxx index fe2d869..202afc7 100644 --- a/tools/clitkDicomInfo.cxx +++ b/tools/clitkDicomInfo.cxx @@ -29,8 +29,10 @@ // itk (gdcm) include #include "gdcmFile.h" #if GDCM_MAJOR_VERSION == 2 - #include "gdcmReader.h" - #include "gdcmPrinter.h" +#include "gdcmReader.h" +#include "gdcmPrinter.h" +#include "gdcmDict.h" +#include "gdcmStringFilter.h" #endif //-------------------------------------------------------------------- @@ -43,8 +45,46 @@ int main(int argc, char * argv[]) // check arg if (args_info.inputs_num == 0) return 0; + // Study ID + #if GDCM_MAJOR_VERSION == 2 + if (args_info.studyID_flag) { + std::set l; + for(unsigned int i=0; i tags; + gdcm::Tag StudyInstanceUIDTag(0x0020, 0x000d); + gdcm::Tag SeriesDateTag(0x0008, 0x0021); + tags.insert(StudyInstanceUIDTag); + tags.insert(SeriesDateTag); + if (reader.ReadSelectedTags(tags)) { + gdcm::StringFilter sf; + sf.SetFile(reader.GetFile()); + std::pair p = sf.ToStringPair(StudyInstanceUIDTag); + std::pair d = sf.ToStringPair(SeriesDateTag); + if (args_info.uniq_flag) { + if (l.insert(p.second).second == true) { + if (args_info.filename_flag) + std::cout << args_info.inputs[i] << " " << p.second << " " << d.second << std::endl; + else + std::cout << p.second << std::endl; + } + } + else { + if (args_info.filename_flag) + std::cout << args_info.inputs[i] << " " << p.second << " " << d.second << std::endl; + else + std::cout << p.second << std::endl; + } + } // tag not found + } + } +#endif + // Loop files + if (!args_info.studyID_flag) for(unsigned int i=0; i FilterType; + std::vector l; + l.push_back(args_info.input_arg); + itk::Matrix m = + FilterType::createMatrixFromElastixFile<3, int>(l, args_info.verbose_flag); + + // Print matrix + std::ofstream os; + clitk::openFileForWriting(os, args_info.output_arg); + for(unsigned int i=0; i<4; i++) { + for(unsigned int j=0; j<4; j++) + os << m[i][j] << " "; + os << std::endl; + } + os.close(); + + return EXIT_SUCCESS; +}// end main + +//-------------------------------------------------------------------- diff --git a/tools/clitkElastixTransformToMatrix.ggo b/tools/clitkElastixTransformToMatrix.ggo new file mode 100644 index 0000000..f7f0758 --- /dev/null +++ b/tools/clitkElastixTransformToMatrix.ggo @@ -0,0 +1,9 @@ +#File clitkElastixTransformToMatrix.ggo +package "clitkElastixTransformToMatrix" +version "1.0" +purpose "" + +option "config" - "Config file" string optional +option "verbose" v "Verbose" flag off +option "input" i "Input elastix filename" string required +option "output" o "Output matrix filename" string required diff --git a/tools/clitkImage2DicomRTStruct.cxx b/tools/clitkImage2DicomRTStruct.cxx index 264eb05..807b073 100644 --- a/tools/clitkImage2DicomRTStruct.cxx +++ b/tools/clitkImage2DicomRTStruct.cxx @@ -27,20 +27,22 @@ int main(int argc, char * argv[]) { // Init command line GGO(clitkImage2DicomRTStruct, args_info); - // Read initial 3D image - typedef float PixelType; - typedef itk::Image ImageType; - ImageType::Pointer input = clitk::readImage(args_info.input_arg, args_info.verbose_flag); + // Set initial 3D image filenames + std::vector filenames; + for(unsigned int i=0; i< args_info.input_given; i++) + filenames.push_back(args_info.input_arg[i]); // Create a filter to convert image into dicomRTStruct and write to disk + typedef float PixelType; clitk::Image2DicomRTStructFilter filter; filter.SetVerboseFlag(args_info.verbose_flag); - filter.SetInput(input); + filter.SetInputFilenames(filenames); filter.SetDicomFolder(args_info.dicom_arg); filter.SetStructureSetFilename(args_info.rtstruct_arg); filter.SetOutputFilename(args_info.output_arg); - filter.SetROIName(args_info.roiname_arg, args_info.roitype_arg); + filter.SetROIType(args_info.roitype_arg); filter.SetThresholdValue(args_info.threshold_arg); + filter.SetSkipInitialStructuresFlag(args_info.skip_flag); filter.Update(); // This is the end my friend diff --git a/tools/clitkImage2DicomRTStruct.ggo b/tools/clitkImage2DicomRTStruct.ggo index 928d11b..7416d32 100644 --- a/tools/clitkImage2DicomRTStruct.ggo +++ b/tools/clitkImage2DicomRTStruct.ggo @@ -5,12 +5,12 @@ version "Add a (binary) image inside a DICOM RT Structure Set (contours)" option "config" - "Config file" string no option "verbose" v "Verbose" flag off -option "input" i "Input image file (binary image) to be converted into contours" string yes +option "input" i "Input image file (binary image) to be converted into contours" string multiple yes option "rtstruct" r "Input rt struct" string yes option "dicom" d "Input folder where the initial dicom ct is" string yes option "output" o "Output DicomRT filename" string yes option "threshold" t "Threshold for binary image" float no default = "0.5" -option "roiname" - "Name of the roi added into the rt-struct" string yes +option "skip" s "Do not write in output the structures that was in input" flag off option "roitype" - "Name of the type of roi added into the rt-struct" string no default = "ORGAN" diff --git a/tools/clitkVectorImageToImageGenericFilter.txx b/tools/clitkVectorImageToImageGenericFilter.txx index cb5effe..b1cf3f9 100644 --- a/tools/clitkVectorImageToImageGenericFilter.txx +++ b/tools/clitkVectorImageToImageGenericFilter.txx @@ -42,8 +42,13 @@ namespace clitk if (Components==3) { - if (m_Verbose) std::cout << "Launching filter in "<< Dimension <<"D and 3D float..." << std::endl; - UpdateWithDimAndPixelType >(); + if (PixelType == "unsigned_char") + UpdateWithDimAndPixelType >(); + else + { + if (m_Verbose) std::cout << "Launching filter in "<< Dimension <<"D and 3D float..." << std::endl; + UpdateWithDimAndPixelType >(); + } } else std::cerr<<"Number of components is "< -#include +#include class vvPacsSettingsDialog : public QDialog { @@ -19,4 +19,4 @@ private slots: void accept(); }; -#endif //__vvPacsSettingsDialog_H \ No newline at end of file +#endif //__vvPacsSettingsDialog_H diff --git a/vv/vvQPacsConnection.h b/vv/vvQPacsConnection.h index 28a3d41..bafea83 100644 --- a/vv/vvQPacsConnection.h +++ b/vv/vvQPacsConnection.h @@ -1,11 +1,11 @@ #ifndef __vvQPacsConnection_h_INCLUDED__ #define __vvQPacsConnection_h_INCLUDED__ -#include +#include #include "ui_vvPacsConnection.h" #include "gdcmCompositeNetworkFunctions.h" -#include -#include +#include +#include #include #include "vvDicomServerQueryFactory.h" @@ -17,10 +17,10 @@ - class vvQPacsConnection : public QDialog - { +class vvQPacsConnection : public QDialog +{ Q_OBJECT - public: +public: //vvQPacsConnection(){} vvQPacsConnection(QWidget *parent=0);