]> Creatis software - clitk.git/commitdiff
Merge branch 'PacsConnection' of ssh://git.creatis.insa-lyon.fr/clitk into PacsConnection
authorcervenansky <frederic.cervenansky@creatis.insa-lyon.fr>
Wed, 24 Jul 2013 09:25:56 +0000 (11:25 +0200)
committercervenansky <frederic.cervenansky@creatis.insa-lyon.fr>
Wed, 24 Jul 2013 09:25:56 +0000 (11:25 +0200)
36 files changed:
README.md [new file with mode: 0644]
cluster_tools/gate_common.sh [new file with mode: 0644]
cluster_tools/gate_job_cluster.job [new file with mode: 0644]
cluster_tools/gate_make_release.sh [new file with mode: 0755]
cluster_tools/gate_power_merge.sh [new file with mode: 0755]
cluster_tools/gate_run_submit_cluster.sh [new file with mode: 0755]
cluster_tools/gate_upload_release.sh [new file with mode: 0755]
cluster_tools/mergeDosePerEnergyFile.sh [new file with mode: 0755]
cluster_tools/mergeStatFile.py [new file with mode: 0755]
cluster_tools/mergeStatFile.sh [new file with mode: 0755]
common/CMakeLists.txt
common/clitkDicomRT_ROI.cxx
common/clitkImage2DicomRTStructFilter.h
common/clitkImage2DicomRTStructFilter.txx
common/clitkTransformUtilities.cxx
common/clitkTransformUtilities.h
common/vvImageReader.cxx
common/vvImageWriter.txx
segmentation/clitkExtractLung.ggo
segmentation/clitkExtractLungFilter.h
segmentation/clitkExtractLungFilter.txx
segmentation/clitkExtractLungGenericFilter.txx
tools/CMakeLists.txt
tools/clitkAffineTransformGenericFilter.h
tools/clitkAffineTransformGenericFilter.txx
tools/clitkDicom2Image.cxx
tools/clitkDicomInfo.cxx
tools/clitkDicomInfo.ggo
tools/clitkDicomRTStruct2Image.ggo
tools/clitkElastixTransformToMatrix.cxx [new file with mode: 0644]
tools/clitkElastixTransformToMatrix.ggo [new file with mode: 0644]
tools/clitkImage2DicomRTStruct.cxx
tools/clitkImage2DicomRTStruct.ggo
tools/clitkVectorImageToImageGenericFilter.txx
vv/vvPacsSettingsDialog.h
vv/vvQPacsConnection.h

diff --git a/README.md b/README.md
new file mode 100644 (file)
index 0000000..8f03a23
--- /dev/null
+++ b/README.md
@@ -0,0 +1,4 @@
+
+
+VV, a 4D image viewer, see : http://vv.creatis.insa-lyon.fr
+
diff --git a/cluster_tools/gate_common.sh b/cluster_tools/gate_common.sh
new file mode 100644 (file)
index 0000000..ccad419
--- /dev/null
@@ -0,0 +1,115 @@
+
+set -u
+
+# print error message and exit immediately
+programname="$(basename ${0})"
+function error {
+    echo "${programname} **ERROR** $1"
+    exit 1
+}
+
+# ensure a valid proxy is present for at least one hour
+# if no proxy is found, try to create a new one
+# return 0 if a valid proxy is found or created
+# else return voms-proxy-init error code
+function ensure_proxy {
+    voms-proxy-info --exists -valid 1:0 > /dev/null && return 0 
+    voms-proxy-init --voms biomed -valid 24:00 || exit 1
+}
+
+# print prompt to ensure that the user want to continue further
+# the user has to answer with 'y' to continue
+function check_user {
+    prompt="${1:-is that correct?}"
+    read -p "${prompt} [y/n] " answer
+    test "${answer}" == "y" && return 0
+    test "${answer}" == "n" && return 1
+    check_user "${prompt}"
+}
+
+# checks if the lfn file exists
+function file_exists {
+    lfnfile="${1:?"provide lfn to file"}"
+    lfc-ls ${lfnfile} 2>&1 > /dev/null
+}
+
+
+# upload file to grid storage element
+# source can be a relative or an absolute path to the source file 
+# dest must be the lfn to the target **file** (not the directory) 
+# if dest already exists, it prompts the user for overwritting
+function upload_file {
+    sourcefile=${1:?"provide source file"}
+    destlfn=${2:?"provide destination file lfn"}
+    sourcefile="$(readlink -f "${sourcefile}")" # convert to absolute path
+    test -f "${sourcefile}" || error "can't find ${sourcefile}"
+    echo "uploading ${sourcefile} to ${destlfn}"
+    if file_exists "${destlfn}"; then
+       check_user "${destlfn} already exists. overwrite it?" || return 2
+       lcg-del -a "lfn:${destlfn}" || error "lcg-del error"
+    fi
+    echo "lets roll"
+
+    local pending_ses=${SES}
+    local registered=false
+    while [ "x${pending_ses}" != "x" ]
+    do 
+        #select a se from list
+        local S=`echo ${pending_ses} | awk '{print $1}'`
+        #update list of SEs
+        local new_list=""
+        for i in `echo ${pending_ses}`
+        do
+            if [ "$i" != "${S}" ]
+           then
+               new_list="${new_list} ${i}"
+            fi
+        done
+        pending_ses=${new_list}
+        local TEMP=`mktemp lcg-XXXXX`
+        if [ "${registered}" = "false" ]
+        then
+            echo -n "Registering release to ${S}..."
+            lcg-cr -v -d ${S} -l "lfn:${destlfn}" "file:${sourcefile}" &>${TEMP}
+            if [ $? != 0 ]
+           then
+               echo -e "\t [\033[31m  FAILED  \033[0m]"
+               cat ${TEMP}
+               \rm ${TEMP}
+           else
+               echo -e "\t [\033[32m  OK  \033[0m]"
+               registered=true
+               \rm ${TEMP}
+            fi
+        else
+            echo -n "Replicating release to ${S}..."
+            lcg-rep -v -d ${S} "lfn:${destlfn}" &>${TEMP}
+            if [ $? != 0 ]
+           then
+               echo -e "\t [\033[31m  FAILED  \033[0m]"
+               cat ${TEMP}
+               \rm ${TEMP}
+           else
+               echo -e "\t [\033[32m  OK  \033[0m]"
+               \rm ${TEMP}
+            fi
+        fi
+    done
+}
+
+
+# common path used
+lfnbase="/grid/biomed/creatis/fgate/"
+lfnrelease="/grid/biomed/creatis/vip/data/groups/GateLab/releases/"
+lfnworkflow="${lfnbase}workflow/"
+lfngasw="${lfnbase}gasw/"
+lfnscript="${lfnbase}bin/"
+
+#list of SEs used for storage. Don't modify this list unless you know
+# what you're doing. Replicating the release in a bad place (e.g. on a
+# remote continent) can dramatically slow down the transfers
+SES="ccsrm02.in2p3.fr sbgse1.in2p3.fr marsedpm.in2p3.fr"
+
+# define the prefix for uploaded file
+# default to local machine username
+prefix="${USER:?"USER must be set"}_"
diff --git a/cluster_tools/gate_job_cluster.job b/cluster_tools/gate_job_cluster.job
new file mode 100644 (file)
index 0000000..1242dfd
--- /dev/null
@@ -0,0 +1,102 @@
+#!/bin/bash
+#
+# MACRODIR
+# MACROFILE
+# RELEASEDIR
+# OUTPUTDIR
+# INDEX
+# INDEXMAX
+# PARAM
+#
+#PBS -r n
+#PBS -l walltime=100:00:00
+#PBS -j oe
+
+#env
+#pwd
+#exit 1
+
+
+function error {
+echo "ERROR: $1"
+exit 1
+}
+
+function warning {
+echo "WARNING: $1"
+}
+
+test -f ${HOME}/.bashrc && echo "Sourcing bashrc" && source ${HOME}/.bashrc
+set -u
+
+echo "Checking"
+uname -a
+echo "MACRODIR=${MACRODIR:?"unknown MACRODIR"}"
+echo "MACROFILE=${MACROFILE:?"unknown MACROFILE"}"
+echo "RELEASEDIR=${RELEASEDIR:?"unknown RELEASEDIR"}"
+echo "OUTPUTDIR=${OUTPUTDIR:?"unknown OUTPUTDIR"}"
+echo "PBS_JOBID=${PBS_JOBID}"
+echo "INDEX=${INDEX}"
+echo "INDEXMAX=${INDEX}"
+echo "PARAM=${PARAM}"
+
+if test "$RELEASEDIR" = "NONE"
+then
+       echo Using $(which Gate)
+       ldd $(which Gate)
+else
+       test -d "${RELEASEDIR}" || error "can't find release"
+       md5sum ${RELEASEDIR}/Gate
+       test -f ${RELEASEDIR}/libGate.so && md5sum ${RELEASEDIR}/libGate.so
+
+       echo "Finding libraries"
+       ROOTLIBS="${RELEASEDIR}/libCore.so:${RELEASEDIR}/libCint.so:${RELEASEDIR}/libRIO.so:${RELEASEDIR}/libNet.so:${RELEASEDIR}/libHist.so:${RELEASEDIR}/libGraf.so:${RELEASEDIR}/libGraf3d.so:${RELEASEDIR}/libGpad.so:${RELEASEDIR}/libTree.so:${RELEASEDIR}/libRint.so:${RELEASEDIR}/libPostscript.so:${RELEASEDIR}/libMatrix.so:${RELEASEDIR}/libPhysics.so:${RELEASEDIR}/libMathCore.so:${RELEASEDIR}/libThread.so:"
+       echo "ROOTLIBS=${ROOTLIBS}"
+       G4LIBS="$(for library in $(find "${RELEASEDIR}" -maxdepth 1 -name 'libG4*.so'); do echo -n "${library}:"; done)"
+       echo "G4LIBS=${G4LIBS}"
+       CLHEPLIBS="$(for library in $(find "${RELEASEDIR}" -maxdepth 1 -name 'libCLHEP*.so'); do echo -n "${library}:"; done)"
+       echo "CLHEPLIBS=${CLHEPLIBS}"
+       GATELIBS=""
+       test -f ${RELEASEDIR}/libGate.so && GATELIBS="${RELEASEDIR}/libGate.so:"
+       echo "GATELIBS=${GATELIBS}"
+fi
+test -d "${MACRODIR}" && test -d "${MACRODIR}/mac" || error "invalid macro"
+
+
+echo "Copying inputs"
+LOCALMACRODIR=$(mktemp -d)
+trap "mv output ${OUTPUTDIR}/output.${PBS_JOBID%%.*} ; rm -r ${LOCALMACRODIR} ; exit 1" 1 2 3 15
+cd ${LOCALMACRODIR}
+cp -r -L "${MACRODIR}"/{data,mac} .
+mkdir output
+
+# Enforce one thread
+ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1
+
+echo "Lauching macro"
+date
+if test "$RELEASEDIR" = "NONE"
+then
+       Gate ${PARAM} ${MACROFILE} || error "gate failed"
+else
+       LD_PRELOAD="${ROOTLIBS}${G4LIBS}${CLHEPLIBS}${GATELIBS}" \
+       G4LEVELGAMMADATA="${RELEASEDIR}/PhotonEvaporation2.1" \
+       G4RADIOACTIVEDATA="${RELEASEDIR}/RadioactiveDecay3.3" \
+       G4LEDATA="${RELEASEDIR}/G4EMLOW6.19" \
+       G4NEUTRONHPDATA="${RELEASEDIR}/G4NDL3.14" \
+       G4ABLADATA="${RELEASEDIR}/G4ABLA3.0" \
+       G4REALSURFACEDATA="${RELEASEDIR}/RealSurface1.0" \
+       G4NEUTRONXSDATA="${RELEASEDIR}/G4NEUTRONXS1.0" \
+       G4PIIDATA="${RELEASEDIR}/G4PII1.2" \
+       /usr/bin/time --format="real %es\nuser %Us\nsys %Ss\nmaxmem %Mk" \
+       ${RELEASEDIR}/Gate ${PARAM} ${MACROFILE} \
+       || error "gate failed"
+fi
+
+echo "Copying data back"
+mv output "${OUTPUTDIR}/output.${PBS_JOBID%%.*}"
+
+echo "Cleanup"
+rm -r ${LOCALMACRODIR}
+
+echo "Success!!!"
diff --git a/cluster_tools/gate_make_release.sh b/cluster_tools/gate_make_release.sh
new file mode 100755 (executable)
index 0000000..363d583
--- /dev/null
@@ -0,0 +1,97 @@
+#!/bin/bash
+
+function usage {
+    echo "ERROR: $1"
+    echo "$(basename $0) [name]"
+    exit 1
+}
+
+GATENAME="${1:-Gate}"
+which "${GATENAME}" > /dev/null || usage "cant locate ${GATENAME} binary"
+GATEBIN="$(which ${GATENAME})"
+echo "${GATENAME} executable is ${GATEBIN}"
+
+test -d "${G4NEUTRONHPDATA}" || usage "can't locate neutron data. please set G4NEUTRONHPDATA"
+echo "neutron data is ${G4NEUTRONHPDATA}"
+test -d "${G4LEVELGAMMADATA}" || usage "can't locate gamma data. please set G4LEVELGAMMADATA"
+echo "gamma data is ${G4LEVELGAMMADATA}"
+test -d "${G4RADIOACTIVEDATA}" || usage "can't locate radioactivity data. please set G4RADIOACTIVEDATA"
+echo "radioactivity data is ${G4RADIOACTIVEDATA}"
+test -d "${G4ABLADATA}" || usage "can't locate abla data. please set G4ABLADATA"
+echo "abla data is ${G4ABLADATA}"
+test -d "${G4LEDATA}" || usage "can't locate em data. please set G4LEDATA"
+echo "em data is ${G4LEDATA}"
+test -d "${G4REALSURFACEDATA}" || usage "can't locate surface data. please set G4REALSURFACEDATA"
+echo "surface data is ${G4REALSURFACEDATA}"
+test -d "${G4NEUTRONXSDATA}" || usage "can't locate neutron xs data. please set G4NEUTRONXSDATA"
+echo "neutron xs data is ${G4NEUTRONXSDATA}"
+test -d "${G4PIIDATA}" || usage "can't locate pii data. please set G4PIIDATA"
+echo "pii data is ${G4PIIDATA}"
+
+echo "Cleaning previous build"
+rm -fr $(basename ${G4NEUTRONHPDATA})
+rm -fr $(basename ${G4LEVELGAMMADATA})
+rm -fr $(basename ${G4RADIOACTIVEDATA})
+rm -fr $(basename ${G4ABLADATA})
+rm -fr $(basename ${G4LEDATA})
+rm -fr $(basename ${G4REALSURFACEDATA})
+rm -fr test_libs gate_shared_libs.tar.gz gate_release.tar.gz
+
+echo "Copying libraries"
+function get_deps {
+    ldd $1 | while read library; do
+       libfile="$(echo ${library} | awk -F' ' '/=> \// {print $3}')"
+       test $libfile || continue # didn't macht regex
+       test -f "test_libs/$(basename ${libfile})" && continue # already exists
+       echo "${libfile}"
+       cp "${libfile}" "test_libs/$(basename ${libfile})"
+       get_deps "${libfile}"
+    done
+}
+
+mkdir test_libs
+get_deps "${GATEBIN}"
+
+echo "Removing unused libraries"
+rm -f test_libs/libdl.so*
+rm -f test_libs/libm.so*
+rm -f test_libs/libstdc++.so*
+rm -f test_libs/libgcc_s.so*
+rm -f test_libs/libpthread.so*
+rm -f test_libs/libc.so*
+
+echo "Zipping libraries"
+(
+    cd test_libs
+    tar -czvf ../gate_shared_libs.tar.gz **
+) || usage "can't create libraries tar"
+
+echo "Copying binary"
+cp "${GATEBIN}" .
+
+echo "Copying data"
+cp -r "${G4NEUTRONHPDATA}" .
+cp -r "${G4LEVELGAMMADATA}" .
+cp -r "${G4RADIOACTIVEDATA}" .
+cp -r "${G4ABLADATA}" .
+cp -r "${G4LEDATA}" .
+cp -r "${G4REALSURFACEDATA}" .
+cp -r "${G4NEUTRONXSDATA}" .
+cp -r "${G4PIIDATA}" .
+
+echo "Making release"
+tar -czvf gate_release.tar.gz \
+    ${GATENAME} gate_shared_libs.tar.gz \
+    $(basename ${G4NEUTRONHPDATA}) \
+    $(basename ${G4LEVELGAMMADATA}) \
+    $(basename ${G4RADIOACTIVEDATA}) \
+    $(basename ${G4ABLADATA}) \
+    $(basename ${G4LEDATA}) \
+    $(basename ${G4REALSURFACEDATA}) \
+    $(basename ${G4NEUTRONXSDATA}) \
+    $(basename ${G4PIIDATA}) \
+    || usage "can't create release zip"
+
+chmod -w gate_release.tar.gz
+
+
diff --git a/cluster_tools/gate_power_merge.sh b/cluster_tools/gate_power_merge.sh
new file mode 100755 (executable)
index 0000000..fc0e6aa
--- /dev/null
@@ -0,0 +1,453 @@
+#!/usr/bin/env bash
+
+set -u 
+
+function error {
+echo "ERROR: $1"
+exit 1
+}
+
+warning_count=0
+function warning {
+let "warning_count++"
+echo "MERGE_WARNING: $1"
+}
+
+function start_bar {
+count_max="${1:?"provide count max"}"
+}
+
+function update_bar {
+local count="${1:?"provide count"}"
+local message="${2:?"provide message"}"
+local percent=$(echo "100*${count}/${count_max}" | bc)
+#printf "[%03d/%03d] %3d%% %-80.80s\r" ${count} ${count_max} ${percent} "${message}"
+printf "[%03d/%03d] %3d%% %-80.80s\n" ${count} ${count_max} ${percent} "${message}"
+}
+
+function end_bar {
+unset count_max
+#echo -ne '\n'
+}
+
+function check_interfile {
+local input_interfile="${1:?"provide input interfile"}"
+
+grep -qs '!INTERFILE :=' "${input_interfile}" || return 1
+
+local header_byte_size=$(awk -F' ' '
+BEGIN { zsize = 0; }
+/matrix size/ && $3 == "[1]" { xsize = $5; }
+/matrix size/ && $3 == "[2]" { ysize = $5; }
+/number of projections/ { zsize += $5; }
+/number of bytes per pixel/ { byte_per_pixel = $7; }
+END { print xsize * ysize * zsize * byte_per_pixel; }' "${input_interfile}")
+
+local raw_interfile="$(dirname "${input_interfile}")/$(awk -F' := ' '/name of data file/ { print $2; }' "${input_interfile}")"
+
+test -f "${raw_interfile}" || return 1
+test $(stat -c%s "${raw_interfile}") -eq ${header_byte_size} || return 1
+}
+
+function write_mhd_header {
+local input_interfile="${1:?"provide input interfile"}"
+local output_mhd="$(dirname "${input_interfile}")/$(basename "${input_interfile}" ".hdr").mhd"
+
+check_interfile "${input_interfile}" || error "${input_interfile} isn't an interfile image"
+
+local header_start='ObjectType = Image
+NDims = 3
+AcquisitionDate = none
+BinaryData = True
+BinaryDataByteOrderMSB = False
+CompressedData = False
+TransformMatrix = 1 0 0 0 1 0 0 0 1
+Offset = 0 0 0
+CenterOfRotation = 0 0 0
+DistanceUnits = mm
+AnatomicalOrientation = RIP'
+
+echo "${header_start}" > "${output_mhd}"
+
+awk -F' ' '
+/scaling factor/ && $4 == "[1]" { xspacing = $6; }
+/scaling factor/ && $4 == "[2]" { yspacing = $6; }
+END { print "ElementSpacing = " xspacing " " yspacing " 1"; }' "${input_interfile}" >> "${output_mhd}"
+
+awk -F' ' '
+BEGIN { zsize = 0; }
+/matrix size/ && $3 == "[1]" { xsize = $5; }
+/matrix size/ && $3 == "[2]" { ysize = $5; }
+/number of projections/ { zsize += $5; }
+END { print "DimSize = " xsize " " ysize " " zsize; }' "${input_interfile}" >> "${output_mhd}"
+
+awk -F' := ' '
+/number format/ { format = $2; }
+/number of bytes per pixel/ { byte_per_pixel = $2 }
+END {
+if (format == "unsigned integer" && byte_per_pixel == 8) { print "ElementType = MET_ULONG"; exit };
+if (format == "unsigned integer" && byte_per_pixel == 4) { print "ElementType = MET_UINT"; exit };
+if (format == "unsigned integer" && byte_per_pixel == 2) { print "ElementType = MET_USHORT"; exit };
+if (format == "unsigned integer" && byte_per_pixel == 1) { print "ElementType = MET_UCHAR"; exit };
+if (format == "integer" && byte_per_pixel == 8) { print "ElementType = MET_LONG"; exit };
+if (format == "integer" && byte_per_pixel == 4) { print "ElementType = MET_INT"; exit };
+if (format == "integer" && byte_per_pixel == 2) { print "ElementType = MET_SHORT"; exit };
+if (format == "integer" && byte_per_pixel == 1) { print "ElementType = MET_CHAR"; exit };
+if (format == "float" && byte_per_pixel == 8) { print "ElementType = MET_FLOAT"; exit };
+if (format == "float" && byte_per_pixel == 4) { print "ElementType = MET_DOUBLE"; exit };
+print "ElementType = MET_INT";
+}' "${input_interfile}" >> "${output_mhd}"
+
+awk -F' := ' '
+/name of data file/ { print "ElementDataFile = " $2; }' "${input_interfile}" >> "${output_mhd}"
+}
+
+rootMerger="clitkMergeRootFiles"
+test -x "./clitkMergeRootFiles" && rootMerger="./clitkMergeRootFiles"
+
+function merge_root {
+local merged="$1"
+shift
+echo "  ${indent}entering root merger"
+echo "  ${indent}merger is ${rootMerger}"
+echo "  ${indent}creating ${merged}"
+#echo "######## $#"
+#echo "######## $*"
+
+if test $# -eq 1
+then
+    echo "  ${indent}just one partial file => just copy it"
+    cp "$1" "${merged}"
+    return
+fi
+
+local count=0
+local arguments=" -o ${merged}"
+while test $# -gt 0
+do
+    local partial="$1"
+    shift
+    let count++
+    local arguments=" -i ${partial} ${arguments}"
+done
+${rootMerger} ${arguments} > /dev/null || error "error while calling ${rootMerger}"
+echo "  ${indent}merged ${count} files"
+}
+
+statMerger="mergeStatFile.py"
+test -x "./mergeStatFile.sh" && statMerger="./mergeStatFile.sh"
+
+function merge_stat {
+local merged="$1"
+shift
+echo "  ${indent}entering stat merger"
+echo "  ${indent}merger is ${statMerger}"
+echo "  ${indent}creating ${merged}"
+local count=0
+start_bar $#
+while test $# -gt 0
+do
+    local partial="$1"
+    shift
+    let count++
+
+    if test ! -f "${merged}"
+    then
+        update_bar ${count} "copying first partial result ${partial}"
+        cp "${partial}" "${merged}"
+        continue
+    fi
+
+    update_bar ${count} "adding ${partial}"
+    ${statMerger} -i "${merged}" -j "${partial}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${statMerger}"
+done
+end_bar
+echo "  ${indent}merged ${count} files"
+}
+
+doseMerger="mergeDosePerEnegryFile.sh"
+test -x "./mergeDosePerEnergyFile.sh" && doseMerger="./mergeDosePerEnergyFile.sh"
+
+function merge_dose {
+local merged="$1"
+shift
+echo "  ${indent}entering dose merger"
+echo "  ${indent}merger is ${doseMerger}"
+echo "  ${indent}creating ${merged}"
+local count=0
+start_bar $#
+while test $# -gt 0
+do
+    local partial="$1"
+    shift
+    let count++
+
+    if test ! -f "${merged}"
+    then
+        update_bar ${count} "copying first partial result ${partial}"
+        cp "${partial}" "${merged}"
+        continue
+    fi
+
+    update_bar ${count} "adding ${partial}"
+    ${doseMerger} -i "${merged}" -j "${partial}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${doseMerger}"
+done
+end_bar
+echo "  ${indent}merged ${count} files"
+}
+
+txtImageMerger="clitkMergeAsciiDoseActor"
+test -f "./clitkMergeAsciiDoseActor" && txtImageMerger="./clitkMergeAsciiDoseActor"
+
+function merge_txt_image {
+local merged="$1"
+shift
+echo "  ${indent}entering text image merger"
+echo "  ${indent}merger is ${txtImageMerger}"
+echo "  ${indent}creating ${merged}"
+local count=0
+start_bar $#
+while test $# -gt 0
+do
+    local partial="$1"
+    shift
+    let count++
+
+    if test ! -f "${merged}"
+    then
+        update_bar ${count} "copying first partial result ${partial}"
+        cp "${partial}" "${merged}"
+        continue
+    fi
+
+    update_bar ${count} "adding ${partial}"
+    local header="$(cat "${merged}" | head -n 6)"
+    local tmp="$(mktemp)"
+    ${txtImageMerger} -i "${partial}" -j "${merged}" -o "${tmp}" 2> /dev/null > /dev/null || warning "error while calling ${txtImageMerger}"
+    echo "${header}" > "${merged}"
+    grep -v '## Merge' "${tmp}" >> "${merged}"
+    rm "${tmp}"
+done
+end_bar
+echo "  ${indent}merged ${count} files"
+}
+
+hdrImageMerger="clitkImageArithm"
+test -x "./clitkImageArithm" && hdrImageMerger="./clitkImageArithm"
+
+function merge_hdr_image {
+local merged="$1"
+local merged_bin="${merged%.*}.img"
+shift
+echo "  ${indent}entering hdr image merger"
+echo "  ${indent}merger is ${hdrImageMerger}"
+echo "  ${indent}creating ${merged}"
+local count=0
+start_bar $#
+while test $# -gt 0
+do
+    local partial="$1"
+    local partial_bin="${partial%.*}.img"
+    shift
+    let count++
+
+    if test ! -f "${merged}"
+    then
+        update_bar ${count} "copying first partial result ${partial}"
+        cp "${partial}" "${merged}"
+        cp "${partial_bin}" "${merged_bin}"
+        continue
+    fi
+
+    update_bar ${count} "adding ${partial}"
+    ${hdrImageMerger} -t 0 -i "${partial}" -j "${merged}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${hdrImageMerger}"
+done
+end_bar
+echo "  ${indent}merged ${count} files"
+}
+
+mhdImageMerger="clitkImageArithm"
+test -x "./clitkImageArithm" && mhdImageMerger="./clitkImageArithm"
+
+function merge_mhd_image {
+local merged="$1"
+local merged_bin="${merged%.*}.raw"
+shift
+echo "  ${indent}entering mhd image merger"
+echo "  ${indent}merger is ${mhdImageMerger}"
+echo "  ${indent}creating ${merged}"
+local count=0
+start_bar $#
+while test $# -gt 0
+do
+    local partial="$1"
+    local partial_bin="$(dirname "${partial}")/$(awk -F' = ' '/ElementDataFile/ { print $2; }' "${partial}")"
+    shift
+    let count++
+
+    if test ! -f "${merged}"
+    then
+        update_bar ${count} "copying first partial result ${partial}"
+        cp "${partial}" "${merged}"
+        cp "${partial_bin}" "${merged_bin%.*}.${partial_bin##*.}"
+        continue
+    fi
+
+    update_bar ${count} "adding ${partial}"
+    ${mhdImageMerger} -t 0 -i "${partial}" -j "${merged}" -o "${merged}" 2> /dev/null > /dev/null || warning "error while calling ${mhdImageMerger}"
+    mv "${merged_bin}" "${merged_bin%.*}.${partial_bin##*.}"
+    sed -i "s/$(basename "${merged_bin}")/$(basename "${merged_bin%.*}.${partial_bin##*.}")/" "${merged}"
+done
+end_bar
+echo "  ${indent}merged ${count} files"
+}
+
+function merge_dispatcher {
+    local indent="  ** "
+    local outputfile="${1:?"provide output filename"}"
+    echo "merging ${outputfile}"
+
+    local partialoutputfiles="$(find "${rundir}" -mindepth 2 -type f -name "${outputfile}")"
+    local nboutputfiles="$(echo "${partialoutputfiles}" | wc -l)"
+    if test ${nboutputdirs} -ne ${nboutputfiles}
+    then
+        warning "missing files"
+        return
+    fi
+
+    local firstpartialoutputfile="$(echo "${partialoutputfiles}" | head -n 1)"
+    local firstpartialoutputextension="${firstpartialoutputfile##*.}"
+    echo "${indent}testing file type on ${firstpartialoutputfile}"
+
+    if test "${firstpartialoutputextension}" == "hdr" && grep -qs 'INTERFILE' "${firstpartialoutputfile}"
+    then
+        echo "${indent}this is a interfile image"
+        echo "${indent}creating mhd headers"
+        for partialoutputfile in $partialoutputfiles; do write_mhd_header "${partialoutputfile}"; done
+        local mhd_partialoutputfiles="$(for partialoutputfile in $partialoutputfiles; do echo "${partialoutputfile%.*}.mhd"; done)"
+        local mhd_mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}" ".hdr").mhd"
+        merge_mhd_image "${mhd_mergedfile}" ${mhd_partialoutputfiles} || error "error while merging"
+        echo "${indent}cleaning mhd headers"
+        for mhd_partialoutputfile in $mhd_partialoutputfiles; do rm "${mhd_partialoutputfile}"; done
+        rm "${mhd_mergedfile}"
+        echo "${indent}copy interfile header"
+        cp "${firstpartialoutputfile}" "${outputdir}"
+        return
+    fi
+
+    if test "${firstpartialoutputextension}" == "hdr"
+    then
+        echo "${indent}this is a analyse image"
+        local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+        merge_hdr_image "${mergedfile}" ${partialoutputfiles} || error "error while merging"
+        return
+    fi
+
+    if test "${firstpartialoutputextension}" == "mhd"
+    then
+        echo "${indent}this is a mhd image"
+        local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+        merge_mhd_image "${mergedfile}" ${partialoutputfiles} || error "error while merging"
+        return
+    fi
+
+    if test "${firstpartialoutputextension}" == "root"
+    then
+        echo "${indent}this is a root file"
+        local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+        merge_root "${mergedfile}" ${partialoutputfiles} || error "error while merging"
+        return
+    fi
+
+    if test "${firstpartialoutputextension}" == "txt" && grep -qs 'NumberOfEvent' "${firstpartialoutputfile}"
+    then
+        echo "${indent}this is a stat file"
+        local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+        merge_stat "${mergedfile}" ${partialoutputfiles} || error "error while merging"
+        return
+    fi
+
+    if test "${firstpartialoutputextension}" == "txt" && grep -qs 'energydose' "${firstpartialoutputfile}"
+    then
+        echo "${indent}this is a dose file"
+        local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+        merge_dose "${mergedfile}" ${partialoutputfiles} || error "error while merging"
+        return
+    fi
+
+
+
+    if test "${firstpartialoutputextension}" == "txt" && grep -qs 'Resol' "${firstpartialoutputfile}"
+    then
+        local resol="$(sed -nr '/Resol/s/^.*=\s+\((.+)\)\s*$/\1/p' "${firstpartialoutputfile}")"
+        local resolx="$(echo "${resol}" | cut -d',' -f1)"
+        local resoly="$(echo "${resol}" | cut -d',' -f2)"
+        local resolz="$(echo "${resol}" | cut -d',' -f3)"
+        if test "${resol}" == "1,1,1"
+        then
+            echo "${indent}this is a txt integral value"
+            local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+            merge_txt_image "${mergedfile}" ${partialoutputfiles} || error "error while merging"
+            return
+        fi
+        if test \( "${resolx}" == "1" -a "${resoly}" == "1" \) -o \( "${resoly}" == "1" -a "${resolz}" == "1" \) -o \( "${resolz}" == "1" -a "${resolx}" == "1" \)
+        then
+            echo "${indent}this is a txt profile"
+            local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+            merge_txt_image "${mergedfile}" ${partialoutputfiles} || error "error while merging"
+            return
+        fi
+    fi
+
+    if test "${firstpartialoutputextension}" == "txt"
+    then
+        echo "${indent}this is a non specific txt output"
+        local mergedfile="${outputdir}/$(basename "${firstpartialoutputfile}")"
+        local nbdifferent="$(md5sum ${partialoutputfiles} | awk '{ print $1; }' | sort | uniq | wc -l)"
+        echo "  ${indent}${nbdifferent} different files"
+        if test ${nbdifferent} -gt 1
+        then
+            echo "  ${indent}catting to ${mergedfile}"
+            cat ${partialoutputfiles} > "${mergedfile}" || error "error while merging"
+            return
+        else
+            echo "  ${indent}moving to ${mergedfile}"
+            cp "${firstpartialoutputfile}" "${mergedfile}" || error "error while merging"
+            return
+        fi
+    fi
+
+    error "unknown file type"
+}
+
+echo "!!!! this is $0 v0.3k !!!!"
+
+rundir="${1?"provide run dir"}"
+rundir="$(echo "${rundir}" | sed 's|/*$||')"
+nboutputdirs="$(find "${rundir}" -mindepth 1 -type d -name 'output*' | wc -l)"
+
+test ${nboutputdirs} -gt 0 || error "no output dir found"
+echo "found ${nboutputdirs} partial output dirs"
+
+outputdir="results"
+if [ "${rundir}" != "." -a "${rundir##*.}" != "${rundir}" ]
+then
+    outputdir="results.${rundir##*.}"
+fi
+outputdir="$(basename "${outputdir}")"
+echo "output dir is ${outputdir}"
+test -d "${outputdir}" && rm -r "${outputdir}"
+mkdir "${outputdir}"
+
+for outputfile in $(find "${rundir}" -regextype 'posix-extended' -type f -regex "${rundir}/output.*\.(hdr|mhd|root|txt)" | awk -F '/' '{ print $NF; }' | sort | uniq)
+do
+    merge_dispatcher "${outputfile}"
+done
+
+if [ -f "${rundir}/params.txt" ]
+then
+       echo "copying params file"
+       cp "${rundir}/params.txt" "${outputdir}/params.txt"
+fi
+
+echo "these was ${warning_count} warning(s)"
diff --git a/cluster_tools/gate_run_submit_cluster.sh b/cluster_tools/gate_run_submit_cluster.sh
new file mode 100755 (executable)
index 0000000..0248a1e
--- /dev/null
@@ -0,0 +1,92 @@
+#!/usr/bin/env bash 
+
+set -u
+SCRIPTNAME="$(basename "${0}")"
+
+# -------------------------------------------------
+function error {
+    echo "ERROR: $1"
+    usage
+    exit 1
+}
+# -------------------------------------------------
+
+DEFAULTRELEASESUFFIX="NONE"
+DEFAULTNUMBEROFJOBS="10"
+
+# -------------------------------------------------
+function usage {
+    echo "${SCRIPTNAME} mac/main.mac njobs releasesuffix paramtogate"
+    echo "default njobs = ${DEFAULTNUMBEROFJOBS}"
+    echo "default releasesuffix = ${DEFAULTRELEASESUFFIX} (NONE means use Gate in PATH)"
+}
+# -------------------------------------------------
+
+test $# -eq 0 && usage && exit 0
+
+SCRIPTDIR="${HOME}/git/gate-tests/bin"
+RELEASESUFFIX=${3:-"${DEFAULTRELEASESUFFIX}"}
+RELEASEDIR="${HOME}/releases/grid_release${RELEASESUFFIX}"
+JOBFILE="$(dirname $0)/gate_job_cluster.job"
+
+echo "Checking stuff"
+test -f ${JOBFILE} || error "can't find job file ${JOBFILE}"
+if test "${RELEASESUFFIX}" = "${DEFAULTRELEASESUFFIX}"
+then
+    RELEASEDIR="NONE"
+    which Gate 2>&1 >/dev/null || error "there is no Gate in the PATH"
+else
+    test -d ${RELEASEDIR} || error "invalid release dir ${RELEASEDIR}"
+fi
+MACRODIR=$(pwd)
+test -d ${MACRODIR}/mac && test -d ${MACRODIR}/data || error "invalid path"
+MACROFILE=${1:?"provide relative macro path"}
+test -f ${MACRODIR}/${MACROFILE} || error "invalid macro"
+OUTPUTDIR=$(mktemp --tmpdir=${MACRODIR} -d run.XXXX || error "can't create temp dir")
+test -d ${OUTPUTDIR} || error "can't locate output dir"
+RUNID=${OUTPUTDIR##*.}
+NJOBS=${2:-"${DEFAULTNUMBEROFJOBS}"}
+NJOBSMAX=${NJOBS}
+PARAM="${4:-""}"
+
+echo "Lets roll!!"
+echo "runid is ${RUNID}"
+
+QSUB=$(which qsub 2> /dev/null)
+# echo "qsub is $(which qsub)"
+test -z "${QSUB}" && QSUB="noqsub"
+if test "${QSUB}" = "noqsub"
+then
+    echo "qsub is not found. Simply run Gate on multiple cores."
+fi
+
+test -z "${PARAM}" && echo "no param" || echo "param is ${PARAM}"
+if test "$RELEASESUFFIX" = "$DEFAULTRELEASESUFFIX"
+then
+    echo "Gate is $(which Gate)"
+else
+    echo "Gate release is $(basename ${RELEASEDIR})"
+fi
+echo "submitting ${NJOBS} jobs"
+
+PARAMFILE="${OUTPUTDIR}/params.txt"
+echo "njobs = ${NJOBS}" >> "${PARAMFILE}"
+echo "macro = ${MACROFILE}" >> "${PARAMFILE}"
+test -z "${PARAM}" || echo "param = ${PARAM}" >> "${PARAMFILE}"
+
+while test $NJOBS -gt 0; do
+
+    if test "${QSUB}" = "noqsub"
+    then 
+        echo "Launching Gate log in ${OUTPUTDIR}/gate_${NJOBS}.log"
+        PARAM=\"${PARAM}\" INDEX=${NJOBS} INDEXMAX=${NJOBSMAX} SCRIPTDIR=${SCRIPTDIR} OUTPUTDIR=${OUTPUTDIR}  RELEASEDIR=${RELEASEDIR} MACROFILE=${MACROFILE} MACRODIR=${MACRODIR} PBS_JOBID="local_${NJOBS}" bash "${JOBFILE}" > ${OUTPUTDIR}/gate_${NJOBS}.log &
+    else
+        qsub -N "gatejob.${RUNID}" -o "${OUTPUTDIR}" \
+           -v "PARAM=\"${PARAM}\",INDEX=${NJOBS},INDEXMAX=${NJOBSMAX},SCRIPTDIR=${SCRIPTDIR},OUTPUTDIR=${OUTPUTDIR},RELEASEDIR=${RELEASEDIR},MACROFILE=${MACROFILE},MACRODIR=${MACRODIR}" \
+           "${JOBFILE}" || error "submission error"
+    fi
+
+    let NJOBS--
+done
+
+echo "runid is ${RUNID}"
diff --git a/cluster_tools/gate_upload_release.sh b/cluster_tools/gate_upload_release.sh
new file mode 100755 (executable)
index 0000000..784cd45
--- /dev/null
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+. gate_common.sh
+
+releasearchive="${1:?"provide path to release archive"}"
+test -f ${releasearchive} || error "file ${releasearchive} doesn't exist"
+releasename="${prefix}$(basename "${releasearchive}")"
+
+ensure_proxy || error "no valid proxy"
+
+echo "releasearchive=${releasearchive}"
+echo "releasename=${releasename}"
+echo "lfnrelease=${lfnrelease}"
+check_user || exit 2
+
+upload_file "${releasearchive}" "${lfnrelease}${releasename}" && echo "success" || echo "failed"
diff --git a/cluster_tools/mergeDosePerEnergyFile.sh b/cluster_tools/mergeDosePerEnergyFile.sh
new file mode 100755 (executable)
index 0000000..b63c5ae
--- /dev/null
@@ -0,0 +1,33 @@
+#!/bin/bash
+set -u
+
+function usage {
+       echo "$0 -i <file1> -j <file2> -o <result>"
+       exit 1
+}
+
+if [ $# != 6 ]
+then
+       usage
+fi
+
+IN1=$2
+IN2=$4
+RESULT=$6
+
+test -f ${IN1} && test -f ${IN2} || usage
+
+TMP="$(mktemp)"
+echo "merging dose file"
+for PARAM in `awk '$1 == "#" {print $3}' ${IN1}`
+do
+        echo "merging ${PARAM}"
+       V1=`awk -v P=${PARAM} '$3 == P {print $4}' ${IN1} `
+       V2=`awk -v P=${PARAM} '$3 == P {print $4}' ${IN2} `
+        V1=`echo ${V1} | sed -e 's/[eE]+*/\*10\^/'`
+        V2=`echo ${V2} | sed -e 's/[eE]+*/\*10\^/'`
+       R=`echo "scale=30; ${V1} + ${V2}" | bc -l`
+        test -z "${R}" && continue
+        echo "# energydose ${PARAM} ${R}" >> ${TMP}
+done
+mv -f ${TMP} ${RESULT}
diff --git a/cluster_tools/mergeStatFile.py b/cluster_tools/mergeStatFile.py
new file mode 100755 (executable)
index 0000000..a1cd573
--- /dev/null
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+import sys
+import re
+import datetime
+
+linere = re.compile(r'''^#\s+([a-zA-Z]+)\s+=\s(.*)$''')
+mergedlines = ['NumberOfRun', 'NumberOfEvents', 'NumberOfTracks', 'NumberOfSteps', 'NumberOfGeometricalSteps', 'NumberOfPhysicalSteps', 'ElapsedTimeWoInit', 'ElapsedTime', 'StartDate', 'EndDate']
+
+assert(len(sys.argv)==7)
+assert(sys.argv[1]=="-i")
+assert(sys.argv[3]=="-j")
+assert(sys.argv[5]=="-o")
+
+def total_seconds(deltat):
+    try:
+        return float(deltat.total_seconds())
+    except AttributeError: # total_seconds defined in 2.7
+        total = 0.
+        total += deltat.seconds
+        total += deltat.microseconds*1e-6
+        total += deltat.days*3600.*24.
+        return total
+
+def parse_stat_file(filename):
+    keys = {}
+    for line in open(filename,"r").readlines():
+        match = linere.match(line)
+        assert(match is not None)
+        groups = match.groups()
+        if groups[0] not in mergedlines:
+            continue
+        keys[groups[0]]=groups[1]
+    return keys
+
+def merge_keys(ikeys,jkeys):
+    mindate = None
+    maxdate = None
+    keys = {}
+    for line in mergedlines:
+        value = None
+
+        if value is None:
+            try:
+                ivalue = int(ikeys[line])
+                jvalue = int(jkeys[line])
+                value = ivalue + jvalue
+                value = str(value)
+            except ValueError:
+                pass
+
+        if value is None:
+            try:
+                ivalue = float(ikeys[line])
+                jvalue = float(jkeys[line])
+                value = ivalue + jvalue
+                value = str(value)
+            except ValueError:
+                pass
+
+        if value is None:
+                ivalue = datetime.datetime.strptime(ikeys[line],"%a %b %d %H:%M:%S %Y")
+                jvalue = datetime.datetime.strptime(jkeys[line],"%a %b %d %H:%M:%S %Y")
+                if line=="StartDate":
+                    value = min(ivalue,jvalue)
+                    mindate = value
+                if line=="EndDate":
+                    value = max(ivalue,jvalue)
+                    maxdate = value
+                value = value.strftime("%a %b %d %H:%M:%S %Y")
+
+        assert(value is not None)
+        keys[line] = value
+    if mindate is not None and maxdate is not None:
+        speedup = float(keys["ElapsedTime"])/total_seconds(maxdate-mindate)
+        keys["Speedup"] = str(speedup)
+    return keys
+
+def format_keys(keys):
+    output = "\n".join("# %s = %s" % (line,keys[line]) for line in mergedlines)
+    if "Speedup" in keys:
+        output += "\n# Speedup = %s" % keys["Speedup"]
+    return output
+
+ikeys = parse_stat_file(sys.argv[2])
+jkeys = parse_stat_file(sys.argv[4])
+keys  = merge_keys(ikeys,jkeys)
+output = format_keys(keys)
+open(sys.argv[6],"w").write(output)
+
diff --git a/cluster_tools/mergeStatFile.sh b/cluster_tools/mergeStatFile.sh
new file mode 100755 (executable)
index 0000000..12c7c05
--- /dev/null
@@ -0,0 +1,32 @@
+#!/bin/bash
+set -u
+
+function usage {
+       echo "$0 -i <file1> -j <file2> -o <result>"
+       exit 1
+}
+
+if [ $# != 6 ]
+then
+       usage
+fi
+
+IN1=$2
+IN2=$4
+RESULT=$6
+
+
+test -f ${IN1} && test -f ${IN2} || usage
+
+TMP="$(mktemp)"
+echo "merging stat file"
+for PARAM in `awk '$1 == "#" {print $2}' ${IN1}`
+do
+        echo "merging ${PARAM}"
+       V1=`awk -v P=${PARAM} '$2 == P {print $4}' ${IN1} `
+       V2=`awk -v P=${PARAM} '$2 == P {print $4}' ${IN2} `
+       R=`echo "${V1} + ${V2}" | bc`
+        test -z "${R}" && continue
+        echo "# ${PARAM} = ${R}" >> ${TMP}
+done
+mv -f ${TMP} ${RESULT}
index ba65d2821b390e3124bb50885193433df3b1bec0..6c1096a6ca384017eb03c936da33969de416d4e3 100644 (file)
@@ -66,8 +66,7 @@ ENDIF(CLITK_MEMORY_INFO)
 IF (CLITK_USE_SYSTEM_GDCM)
   FIND_PACKAGE(GDCM REQUIRED)
   include(${GDCM_USE_FILE})
-  TARGET_LINK_LIBRARIES(clitkCommon vtkgdcm)
-  # gdcmDICT gdcmMSFF)
+  TARGET_LINK_LIBRARIES(clitkCommon vtkgdcm gdcmDICT gdcmMSFF gdcmMEXD)
 ENDIF()
 #=========================================================
 
index b66135322a39042bfc243dae308e392a8a844b5a..1fd9fe1f1c6b806bd0f08be65c15d32d47a42f7f 100644 (file)
@@ -475,6 +475,16 @@ void clitk::DicomRT_ROI::Read(vtkSmartPointer<vtkGDCMPolyDataReader> & reader, i
 
   //mColor = //FIXME !!  
 
+  // gdcm::Attribute<0x3006,0x002a> color = {};
+  
+  // const gdcm::DataSet & nestedds = mItemContour->GetNestedDataSet();
+  // color.SetFromDataSet( nestedds );
+  // assert( color.GetNumberOfValues() == 3 );
+  // mColor[0] = color.GetValue(0);
+  // mColor[1] = color.GetValue(1);
+  // mColor[2] = color.GetValue(2);
+
+
   SetDicomUptodateFlag(true);
   // Get the contour
   mMesh =  reader->GetOutput(roiindex);  
index 924bcc01db3d458a8add1d10daf712939c0e0835..b6a74b2fe7829ff7fb59b7cce19b0c343f0427a1 100644 (file)
@@ -41,25 +41,25 @@ namespace clitk {
     typedef typename clitk::DicomRT_StructureSet::Pointer DicomRTStructPointer;
 
     // Set inputs
-    itkSetMacro(Input, ImagePointer);
-    itkGetConstMacro(Input, ImagePointer);
+    itkSetMacro(InputFilenames, std::vector<std::string> );
     itkSetMacro(StructureSetFilename, std::string);
     itkSetMacro(DicomFolder, std::string);
     itkSetMacro(OutputFilename, std::string);
-    void SetROIName(std::string name, std::string type);
+    void SetROIType(std::string type);
     itkSetMacro(ThresholdValue, PixelType);
+    itkSetMacro(SkipInitialStructuresFlag, bool);
     
     // Run filter
     void Update();    
     
   protected:
-    ImagePointer m_Input;
     std::string m_StructureSetFilename;
     std::string m_DicomFolder;
     std::string m_OutputFilename;
-    std::string m_ROIName;
     std::string m_ROIType;
     PixelType m_ThresholdValue;
+    std::vector<std::string> m_InputFilenames;
+    bool m_SkipInitialStructuresFlag;
   };
   //--------------------------------------------------------------------
 
index 1b241a99824a233a094058f1b8196d09c4e99772..77c5e07044f1684b502021b8196c13ec15c5c975 100644 (file)
@@ -45,6 +45,7 @@
 #include "vtkCamera.h"
 #include "vtkProperty.h"
 #include "vtkProperty2D.h"
+#include <vtksys/SystemTools.hxx>
 
 // itk
 #include <itkImage.h>
@@ -61,6 +62,7 @@ clitk::Image2DicomRTStructFilter<PixelType>::Image2DicomRTStructFilter()
   m_DicomFolder = "";
   m_OutputFilename = "default-output.dcm";
   m_ThresholdValue = 0.5;
+  m_SkipInitialStructuresFlag = false;
 }
 //--------------------------------------------------------------------
 
@@ -76,9 +78,8 @@ clitk::Image2DicomRTStructFilter<PixelType>::~Image2DicomRTStructFilter()
 //--------------------------------------------------------------------
 template<class PixelType>
 void
-clitk::Image2DicomRTStructFilter<PixelType>::SetROIName(std::string name, std::string type)
+clitk::Image2DicomRTStructFilter<PixelType>::SetROIType(std::string type)
 {
-  m_ROIName = name;
   m_ROIType = type;
 }
 //--------------------------------------------------------------------
@@ -112,9 +113,18 @@ void clitk::Image2DicomRTStructFilter<PixelType>::Update()
               << p->GetNumberOfStructureSetROIs() << std::endl;
   }
   
+
+  // number of additional contours
+  int m = m_InputFilenames.size();
+
   // Init writer
   vtkGDCMPolyDataWriter * writer = vtkGDCMPolyDataWriter::New();
-  int numMasks = reader->GetNumberOfOutputPorts() + 1;//add one more
+  int numMasks = reader->GetNumberOfOutputPorts() + m;
+
+  if (m_SkipInitialStructuresFlag) {
+    numMasks = m;
+  }
+
   writer->SetNumberOfInputPorts(numMasks);    
   writer->SetFileName(m_OutputFilename.c_str());
   writer->SetMedicalImageProperties(reader->GetMedicalImageProperties());
@@ -128,18 +138,46 @@ void clitk::Image2DicomRTStructFilter<PixelType>::Update()
   roiTypes->SetNumberOfValues(numMasks);
   
   // Convert the image into a mesh
-  typedef clitk::BinaryImageToMeshFilter<ImageType> BinaryImageToMeshFilterType;
-  typename BinaryImageToMeshFilterType::Pointer convert = BinaryImageToMeshFilterType::New();
-  convert->SetThresholdValue(m_ThresholdValue);
-  convert->SetInput(m_Input);
-  convert->Update();
-  vtkPolyData* mesh = convert->GetOutputMesh();
-  if (GetVerboseFlag()) {
-    std::cout << "Mesh has " << mesh->GetNumberOfLines() << " lines." << std::endl;
+  std::vector<vtkSmartPointer<vtkPolyData> > meshes;
+  std::vector<std::string> m_ROINames;
+  meshes.resize(m);
+  m_ROINames.resize(m);
+  for(unsigned int i=0; i<m; i++) {
+    
+    // read image
+    //    typedef float PixelType;
+    //typedef itk::Image<PixelType, 3> ImageType;
+    ImagePointer input = clitk::readImage<ImageType>(m_InputFilenames[i], false);
+
+    std::ostringstream oss;
+    oss << vtksys::SystemTools::
+      GetFilenameName(vtksys::SystemTools::GetFilenameWithoutLastExtension(m_InputFilenames[i]));
+    std::string name = oss.str();
+    m_ROINames[i] = name;
+    
+    // convert to mesh
+    typedef clitk::BinaryImageToMeshFilter<ImageType> BinaryImageToMeshFilterType;
+    typename BinaryImageToMeshFilterType::Pointer convert = BinaryImageToMeshFilterType::New();
+    convert->SetThresholdValue(m_ThresholdValue);
+    convert->SetInput(input);
+    convert->Update();
+    meshes[i] = convert->GetOutputMesh();
+    if (GetVerboseFlag()) {
+      std::cout << "Mesh has " << meshes[i]->GetNumberOfLines() << " lines." << std::endl;
+    }
+    
+    /*
+    // debug mesh write  FIXME
+    vtkSmartPointer<vtkPolyDataWriter> wr = vtkSmartPointer<vtkPolyDataWriter>::New();
+    wr->SetInputConnection(convert->GetOutputPort()); //psurface->GetOutputPort()
+    wr->SetFileName("bidon.obj");
+    wr->Update();
+    wr->Write();
+    */
   }
-
+    
   // Copy previous contours
-  for (unsigned int i = 0; i < numMasks-1; ++i) {
+  for (unsigned int i = 0; i < numMasks-m; ++i) {
     writer->SetInput(i, reader->GetOutput(i));
     std::string theString = reader->GetRTStructSetProperties()->GetStructureSetROIName(i);
     roiNames->InsertValue(i, theString);
@@ -149,13 +187,14 @@ void clitk::Image2DicomRTStructFilter<PixelType>::Update()
     roiTypes->InsertValue(i, theString);
   }  
 
-  // Add new one
-  int last = numMasks-1;
-  writer->SetInput(last, mesh);
-  roiNames->InsertValue(last, m_ROIName);
-  roiAlgorithms->InsertValue(last, "CLITK_CREATED");
-  roiTypes->InsertValue(last, m_ROIType);
-
+  // Add new ones
+  for (unsigned int i = numMasks-m; i < numMasks; ++i) {
+    writer->SetInput(i, meshes[i-numMasks+m]);
+    roiNames->InsertValue(i, m_ROINames[i-numMasks+m]);
+    roiAlgorithms->InsertValue(i, "CLITK_CREATED");
+    roiTypes->InsertValue(i, m_ROIType);
+  }
+    
   /*
   //  Visu DEBUG
   vtkPolyDataMapper *cubeMapper = vtkPolyDataMapper::New();
index ef210d70ae3318c5dbb778557787d9d86b51677e..832d5983eed109898fe427337ec9b4f6dd0a8483 100644 (file)
@@ -67,4 +67,32 @@ itk::Matrix<double, 3, 3> GetRotationMatrix<3>(itk::Array<double> rotationParame
   return GetRotationMatrix3D(rotationParameters);
 }
 
+//--------------------------------------------------------------------
+itk::Matrix<double, 4, 4> ReadMatrix3D(std::string fileName)
+{
+  // read input matrix
+  std::ifstream is;
+  openFileForReading(is, fileName);
+  std::vector<double> nb;
+  double x;
+  skipComment(is);
+  is >> x;
+  while (is && !is.eof()) {
+    nb.push_back(x);
+    skipComment(is);
+    is >> x;
+  }
+
+  if(nb.size() != 16)
+    itkGenericExceptionMacro(<< "Could not read 4x4 matrix in " << fileName);
+
+  //copy it to the matrix
+  itk::Matrix<double, 4, 4> matrix;
+  unsigned int index=0;
+  for (unsigned int i=0;i<4;i++)
+    for (unsigned int j=0;j<4;j++)
+      matrix[i][j]=nb[index++];
+  return matrix;
+}
+
 }
index cbc30b97ac806aa0cd3c91d531dc4cac082bd2e8..d3644b19206f830eab30397f2c209ef721eca6f4 100644 (file)
@@ -255,29 +255,7 @@ namespace clitk
     return matrix; 
   }
    
-  inline itk::Matrix<double, 4, 4> ReadMatrix3D(std::string fileName)
-  {
-    // read input matrix
-    std::ifstream is;
-    openFileForReading(is, fileName);
-    std::vector<double> nb;
-    double x;
-    skipComment(is);
-    is >> x;
-    while (!is.eof()) {
-      nb.push_back(x);
-      skipComment(is);
-      is >> x;  
-    }
-     
-    //copy it to the matrix
-    itk::Matrix<double, 4, 4> matrix;
-    unsigned int index=0;
-    for (unsigned int i=0;i<4;i++)
-      for (unsigned int j=0;j<4;j++)
-       matrix[i][j]=nb[index++];
-    return matrix; 
-  }
+  itk::Matrix<double, 4, 4> ReadMatrix3D(std::string fileName);
  
   inline vtkMatrix4x4* ReadVTKMatrix3D(std::string fileName) {
     // read input matrix
index f4f1a907214912b61b4bbe295a1c2652c6359c8e..ddc4ad3833ae2e60050fee3debd5c58e106ee4c7 100644 (file)
@@ -161,7 +161,15 @@ void vvImageReader::ReadMatImageTransform()
   if(f.is_open()) {
     f.close();
 
-    itk::Matrix<double, 4, 4> itkMat = clitk::ReadMatrix3D(filename);
+    itk::Matrix<double, 4, 4> itkMat;
+    itkMat.SetIdentity();
+    try {
+      itkMat = clitk::ReadMatrix3D(filename);
+    }
+    catch (itk::ExceptionObject & err) {
+      itkWarningMacro(<< "Found " << filename
+                      << " but this is not a 4x4 matrix so it is ignored.");
+    }
 
     vtkSmartPointer<vtkMatrix4x4> matrix = vtkSmartPointer<vtkMatrix4x4>::New();
     matrix->Identity();
index 0705197fd49b4f4d1394eb864c2175e2681464f2..81e45ee03b91344b0f60f7829b1aa0c2ac5997bd 100644 (file)
@@ -38,6 +38,8 @@ void vvImageWriter::UpdateWithDim(std::string OutputPixelType)
     UpdateWithDimAndOutputPixelType<unsigned char,VImageDimension>();
   } else if (OutputPixelType == "int") {
     UpdateWithDimAndOutputPixelType<int,VImageDimension>();
+  } else if (OutputPixelType == "unsigned_int") {
+    UpdateWithDimAndOutputPixelType<unsigned int,VImageDimension>();
   } else if (OutputPixelType == "double") {
     UpdateWithDimAndOutputPixelType<double,VImageDimension>();
   } else if (OutputPixelType == "float") {
index 44b99a201f468840b8a1e9c12c5154562f098699..f31f71812fa60c3bb4eca17f348c93b9f18cc9c3 100644 (file)
@@ -36,7 +36,7 @@ option "skipslices"                  -  "0: Number of slices to skip before sear
 option "upperThresholdForTrachea"    - "0: Initial upper threshold for trachea"  double        no  default="-900"
 option "multiplierForTrachea"       -  "0: Multiplier for the region growing"    double        no  default="5"
 option "thresholdStepSizeForTrachea" - "0: Threshold step size"                       int      no  default="64"
-option "seed"                        - "0,1: Index of the trachea seed point (in pixel, not in mm)"      int   no  multiple
+option "seed"                        - "0,1: Index of the trachea seed point (in mm  NOT IN PIXELS)"      float        no  multiple
 option "doNotCheckTracheaVolume"     -  "0,1: If set, do not check the trachea volume" flag off
 option "verboseRG"                   -  "0,1: Verbose RegionGrowing"   flag off
 option "maxElongation"               -  "1: Maximum allowed elongation of candidate regions for the trachea" float no default="0.5"
@@ -63,7 +63,7 @@ option "opencloseRadius"      -  "OpenClose radius"                           int no
 
 section "Step 6 : fill holes"
 option "doNotFillHoles"                -  "Do not fill holes if set"                 flag on
-option "dir"                   d  "Directions (axes) to perform filling (defaults to 2,1,0)"   int multiple no
+#option "dir"                  d  "Directions (axes) to perform filling (defaults to 2,1,0)"   int multiple no
 
 section "Step 7 : lung separation (labelling)"
 option "doNotSeparateLungs"   -  "Do not separate lungs if set"                 flag off
index 6f31cee1626a334d53f8042684bf8bbdb62a96d6..6c50b1c2d96c85e3c9b8485d970d9ad6e0a261b3 100644 (file)
@@ -164,7 +164,8 @@ namespace clitk {
     itkSetMacro(SeedPreProcessingThreshold, int);
     itkGetConstMacro(SeedPreProcessingThreshold, int);
 
-    void AddSeed(InternalIndexType s);
+    void AddSeedInPixels(InternalIndexType s);
+    void AddSeed(InputImagePointType s);
     std::vector<InternalIndexType> & GetSeeds() { return  m_Seeds; }
 
     itkSetMacro(TracheaVolumeMustBeCheckedFlag, bool);
@@ -252,6 +253,7 @@ namespace clitk {
     InputImagePixelType m_ThresholdStepSizeForTrachea;
     double m_MultiplierForTrachea;
     std::vector<InternalIndexType> m_Seeds;
+    std::vector<InputImagePointType> m_SeedsInMM;
     int m_NumberOfSlicesToSkipBeforeSearchingSeed;
     bool m_TracheaVolumeMustBeCheckedFlag;
     bool m_VerboseRegionGrowingFlag;
index a0acdd5ad2ab48aa3669d58f8d9d22171b74d329..b76001f0491397c36648ddb86653c120ddcfb49c 100644 (file)
@@ -90,8 +90,7 @@ ExtractLungFilter():
   TracheaVolumeMustBeCheckedFlagOn();
   SetNumSlices(50);
   SetMaxElongation(0.5);
-  SetSeedPreProcessingThreshold(-400);
+  SetSeedPreProcessingThreshold(-400); 
   
   // Step 3 default values
   SetNumberOfHistogramBins(500);
@@ -135,7 +134,19 @@ SetInput(const ImageType * image)
 template <class ImageType>
 void 
 clitk::ExtractLungFilter<ImageType>::
-AddSeed(InternalIndexType s) 
+//AddSeed(InternalIndexType s) 
+AddSeed(InputImagePointType s) 
+{ 
+  m_SeedsInMM.push_back(s);
+}
+//--------------------------------------------------------------------
+
+
+//--------------------------------------------------------------------
+template <class ImageType>
+void 
+clitk::ExtractLungFilter<ImageType>::
+AddSeedInPixels(InternalIndexType s) 
 { 
   m_Seeds.push_back(s);
 }
@@ -461,7 +472,6 @@ GenerateOutputInformation()
     // smalest one (sometimes appends with the stomach
     if (initialNumberOfLabels >1) {
       if (GetRemoveSmallLabelBeforeSeparationFlag()) {
-        DD(GetRemoveSmallLabelBeforeSeparationFlag());
         typedef itk::RelabelComponentImageFilter<MaskImageType, MaskImageType> RelabelFilterType;
         typename RelabelFilterType::Pointer relabelFilter = RelabelFilterType::New();
         relabelFilter->SetInput(working_mask);
@@ -577,7 +587,7 @@ SearchForTracheaSeed(int skip)
       it.GoToBegin();
       while (!it.IsAtEnd()) {
         if(it.Get() < GetUpperThresholdForTrachea() ) {
-          AddSeed(it.GetIndex());
+          AddSeedInPixels(it.GetIndex());
           // DD(it.GetIndex());
         }
         ++it;
@@ -853,7 +863,7 @@ TracheaRegionGrowing()
   f->SetVerbose(GetVerboseRegionGrowingFlag());
   for(unsigned int i=0; i<m_Seeds.size();i++) {
     f->AddSeed(m_Seeds[i]);
-    // DD(m_Seeds[i]);
+    //DD(m_Seeds[i]);
   }  
   f->Update();
   PrintMemory(GetVerboseMemoryFlag(), "After RG update");
@@ -907,6 +917,15 @@ SearchForTrachea()
   // compute trachea volume
   // if volume not plausible  -> skip more slices and restart 
 
+  // If initial seed, convert from mm to pixels
+  if (m_SeedsInMM.size() > 0) {
+    for(unsigned int i=0; i<m_SeedsInMM.size(); i++) {
+      InputImageIndexType index;
+      working_input->TransformPhysicalPointToIndex(m_SeedsInMM[i], index);
+      m_Seeds.push_back(index);
+    }
+  }
+
   bool has_seed;
   bool stop = false;
   double volume = 0.0;
index b9a94d86aeef3b3b273ff9176786eb5ba214bc50..01455c84c3ba65d77ecb811da8e8a8afa442e029 100644 (file)
@@ -94,10 +94,10 @@ SetOptionsFromArgsInfoToFilter(FilterType * f)
   f->SetMaxElongation(mArgsInfo.maxElongation_arg);
   f->SetSeedPreProcessingThreshold(mArgsInfo.seedPreProcessingThreshold_arg);
 
-  typename FilterType::InputImageIndexType s;
+  typename FilterType::InputImagePointType s;
   if (mArgsInfo.seed_given) {
     ConvertOptionMacro(mArgsInfo.seed, s, 3, false);
-  f->AddSeed(s);
+    f->AddSeed(s);
   }
 
   f->SetMinimalComponentSize(mArgsInfo.minSize_arg);
index 319908b8dca0edac6540ec1c82fb3b2019e4bf09..77f12f9f863e301e089cc28c49aab05a20276fe8 100644 (file)
@@ -108,6 +108,12 @@ IF (CLITK_BUILD_TOOLS)
   TARGET_LINK_LIBRARIES(clitkAffineTransform clitkCommon )
   SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkAffineTransform)
 
+  WRAP_GGO(clitkElastixTransformToMatrix_GGO_C clitkElastixTransformToMatrix.ggo)
+  ADD_EXECUTABLE(clitkElastixTransformToMatrix clitkElastixTransformToMatrix.cxx ${clitkElastixTransformToMatrix_GGO_C})
+  TARGET_LINK_LIBRARIES(clitkElastixTransformToMatrix clitkCommon )
+  SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkElastixTransformToMatrix)
+
+
   WRAP_GGO(clitkSetBackground_GGO_C clitkSetBackground.ggo)
   ADD_EXECUTABLE(clitkSetBackground clitkSetBackground.cxx clitkSetBackgroundGenericFilter.cxx ${clitkSetBackground_GGO_C})
   TARGET_LINK_LIBRARIES(clitkSetBackground clitkCommon)
@@ -271,7 +277,7 @@ IF (CLITK_BUILD_TOOLS)
   IF(CLITK_EXPERIMENTAL)
     WRAP_GGO(clitkBinaryImageToMesh_GGO_C clitkBinaryImageToMesh.ggo)
     ADD_EXECUTABLE(clitkBinaryImageToMesh clitkBinaryImageToMesh.cxx ${clitkBinaryImageToMesh_GGO_C})
-    TARGET_LINK_LIBRARIES(clitkBinaryImageToMesh itksys ${ITK_LIBRARIES} ${VTK_LIBRARIES})
+    TARGET_LINK_LIBRARIES(clitkBinaryImageToMesh clitkCommon)
     SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkBinaryImageToMesh)
 
     WRAP_GGO(clitkMeshToBinaryImage_GGO_C clitkMeshToBinaryImage.ggo)
@@ -281,7 +287,7 @@ IF (CLITK_BUILD_TOOLS)
 
     WRAP_GGO(clitkMeshViewer_GGO_C clitkMeshViewer.ggo)
     ADD_EXECUTABLE(clitkMeshViewer clitkMeshViewer.cxx ${clitkMeshViewer_GGO_C})
-    TARGET_LINK_LIBRARIES(clitkMeshViewer ${ITK_LIBRARIES} ${VTK_LIBRARIES})
+    TARGET_LINK_LIBRARIES(clitkMeshViewer clitkCommon)
     SET(TOOLS_INSTALL ${TOOLS_INSTALL} clitkMeshViewer)
   ENDIF(CLITK_EXPERIMENTAL)
 
index 6cd2a91f17127ac6c3e3195440a319046100324c..39506326f572a2bb614da44524c64547879eda41 100644 (file)
@@ -87,6 +87,11 @@ namespace clitk
     //----------------------------------------  
     void Update();
 
+    template<unsigned int Dimension, class PixelType>
+      static
+      typename itk::Matrix<double, Dimension+1, Dimension+1>
+      createMatrixFromElastixFile(std::vector<std::string> & filename, bool verbose=true);
+
   protected:
 
     //----------------------------------------  
@@ -103,13 +108,9 @@ namespace clitk
     template <unsigned int Dimension, class PixelType>  void UpdateWithDimAndPixelType();
     template <unsigned int Dimension, class PixelType>  void UpdateWithDimAndVectorType();
 
-    template<unsigned int Dimension, class PixelType>
-      typename itk::Matrix<double, Dimension+1, Dimension+1>
-      createMatrixFromElastixFile(std::vector<std::string> & filename);
-
-    bool GetElastixValueFromTag(std::ifstream & is, std::string tag, std::string & value); 
-    void GetValuesFromValue(const std::string & s, 
-                            std::vector<std::string> & values);
+    static bool GetElastixValueFromTag(std::ifstream & is, std::string tag, std::string & value); 
+    static void GetValuesFromValue(const std::string & s, 
+                                   std::vector<std::string> & values);
 
     //----------------------------------------  
     // Data members
index d3cd19e265a84b411ffd27a0791672502ca1d0d2..03cf862e2ee261548d6707761648821823b9e1fb 100644 (file)
@@ -183,7 +183,7 @@ namespace clitk
           if (m_ArgsInfo.elastix_given) {
             std::vector<std::string> s;
             for(uint i=0; i<m_ArgsInfo.elastix_given; i++) s.push_back(m_ArgsInfo.elastix_arg[i]);
-            matrix = createMatrixFromElastixFile<Dimension,PixelType>(s);
+            matrix = createMatrixFromElastixFile<Dimension,PixelType>(s, m_Verbose);
           }
           else 
             matrix.SetIdentity();
@@ -497,7 +497,7 @@ namespace clitk
   template<class args_info_type>
   template<unsigned int Dimension, class PixelType>
   typename itk::Matrix<double, Dimension+1, Dimension+1>
-                                                           AffineTransformGenericFilter<args_info_type>::createMatrixFromElastixFile(std::vector<std::string> & filename)
+                                                           AffineTransformGenericFilter<args_info_type>::createMatrixFromElastixFile(std::vector<std::string> & filename, bool verbose)
   {
     if (Dimension != 3) {
       FATAL("Only 3D yet" << std::endl);
@@ -509,7 +509,7 @@ namespace clitk
     for(uint j=0; j<filename.size(); j++) {
       
       // Open file
-      if (m_Verbose) std::cout << "Read elastix parameters in " << filename[j] << std::endl;
+      if (verbose) std::cout << "Read elastix parameters in " << filename[j] << std::endl;
       std::ifstream is;
       clitk::openFileForReading(is, filename[j]);
       
@@ -553,7 +553,7 @@ namespace clitk
         p[i+6] = atof(results[i+3].c_str()); // Translation
       mat->SetParameters(p);
     
-      if (m_Verbose) {
+      if (verbose) {
         std::cout << "Rotation      (deg) : " << rad2deg(p[0]) << " " << rad2deg(p[1]) << " " << rad2deg(p[2]) << std::endl;
         std::cout << "Center of rot (phy) : " << p[3] << " " << p[4] << " " << p[5] << std::endl;
         std::cout << "Translation   (phy) : " << p[6] << " " << p[7] << " " << p[8] << std::endl;
@@ -562,7 +562,7 @@ namespace clitk
       // Compose with previous if needed
       if (j!=0) {
         mat->Compose(previous);
-        if (m_Verbose) {
+        if (verbose) {
           std::cout << "Composed rotation      (deg) : " << rad2deg(mat->GetAngleX()) << " " << rad2deg(mat->GetAngleY()) << " " << rad2deg(mat->GetAngleZ()) << std::endl;
           std::cout << "Composed center of rot (phy) : " << mat->GetCenter() << std::endl;
           std::cout << "Compsoed translation   (phy) : " << mat->GetTranslation() << std::endl;
index c7b2e23f076420ecac828eba3bde2a5aa541dd0e..b25b001dea3a19c75edec04b1bcf790891c5cf6a 100644 (file)
@@ -82,12 +82,13 @@ int main(int argc, char * argv[])
     
     gdcm::Attribute<0x28, 0x100> pixel_size;
     pixel_size.SetFromDataSet(ds);
-    if (pixel_size.GetValue() != 16)
-    {
-      std::cerr << "Pixel type 2 bytes ! " << std::endl;
-      std::cerr << "In file " << input_files[i] << std::endl;
-      exit(0);
-    }
+    /* if (pixel_size.GetValue() != 16)
+       {
+       std::cerr << "Pixel type not 2 bytes ! " << std::endl;
+       std::cerr << "In file " << input_files[i] << std::endl;
+       exit(0);
+       }
+    */
 #else
     if (args_info.verbose_flag)
       std::cout << "Not using GDCM-2.x" << std::endl;
@@ -107,11 +108,12 @@ int main(int argc, char * argv[])
   theorigin[series_number][2] = header->GetZOrigin();
   sliceLocations[series_number].push_back(theorigin[series_number][2]);
   seriesFiles[series_number].push_back(input_files[i]);
-  if (header->GetPixelSize() != 2) {
+  /*if (header->GetPixelSize() != 2) {
     std::cerr << "Pixel type 2 bytes ! " << std::endl;
     std::cerr << "In file " << input_files[i] << std::endl;
     exit(0);
   }
+  */
 #endif
   }
 
index fe2d869d8bbf2e756068ae0b488c30dc45641583..202afc762feab842659b4421915b6faf74a4e275 100644 (file)
 // itk (gdcm) include
 #include "gdcmFile.h"
 #if GDCM_MAJOR_VERSION == 2
-  #include "gdcmReader.h"
-  #include "gdcmPrinter.h"
+#include "gdcmReader.h"
+#include "gdcmPrinter.h"
+#include "gdcmDict.h"
+#include "gdcmStringFilter.h"
 #endif
 
 //--------------------------------------------------------------------
@@ -43,8 +45,46 @@ int main(int argc, char * argv[])
   // check arg
   if (args_info.inputs_num == 0) return 0;
 
+  // Study ID
+ #if GDCM_MAJOR_VERSION == 2
+ if (args_info.studyID_flag) {
+    std::set<std::string> l;
+    for(unsigned int i=0; i<args_info.inputs_num; i++) {
+      gdcm::Reader reader;
+      reader.SetFileName(args_info.inputs[i]);
+      std::set<gdcm::Tag> tags;
+      gdcm::Tag StudyInstanceUIDTag(0x0020, 0x000d);
+      gdcm::Tag SeriesDateTag(0x0008, 0x0021);
+      tags.insert(StudyInstanceUIDTag);
+      tags.insert(SeriesDateTag);
+      if (reader.ReadSelectedTags(tags)) {
+        gdcm::StringFilter sf;
+        sf.SetFile(reader.GetFile());
+        std::pair<std::string, std::string> p = sf.ToStringPair(StudyInstanceUIDTag);
+        std::pair<std::string, std::string> d = sf.ToStringPair(SeriesDateTag);
+        if (args_info.uniq_flag) {
+          if (l.insert(p.second).second == true) {
+            if (args_info.filename_flag)
+              std::cout << args_info.inputs[i] << " " << p.second << " " << d.second << std::endl;
+            else
+              std::cout << p.second << std::endl;
+          }
+        }
+        else {
+          if (args_info.filename_flag)
+            std::cout << args_info.inputs[i] << " " << p.second << " " << d.second << std::endl;
+          else
+            std::cout << p.second << std::endl;
+        }
+      } // tag not found
+    }
+  }
+#endif
+
   // Loop files
+ if (!args_info.studyID_flag)
   for(unsigned int i=0; i<args_info.inputs_num; i++) {
+    if (args_info.filename_flag) std::cout << args_info.inputs[i] << std::endl;
 #if GDCM_MAJOR_VERSION == 2
     gdcm::Reader reader;
     reader.SetFileName(args_info.inputs[i]);
index 27786a47ff23be29fa932db58789fd4046eacab9..064fa29ca67ede1aaa9a450dd9e35fd6ce0fdb41 100644 (file)
@@ -3,3 +3,6 @@ package "clitk"
 version "read and print Dicom header"
 
 option "config"                 -      "Config file"            string         no
+option "studyID"        s      "Only display Study UID" flag           off
+option "uniq"           u      "Do not display if same study UID"      flag           off
+option "filename"       f      "Display filename"       flag           off
index 33fb3b75c4931a70c1625c4aaa1e984f70381575..8a09da1c5b4baa33a2e915d3eb303ae5a48c4e3c 100644 (file)
@@ -7,7 +7,7 @@ option "verbose"         v "Verbose"                         flag       off
 option "verboseFile"     - "Verbose file content"            flag      off
 option "input"          i "Input Dicom file"                string     yes
 option "image"          j "Used to read image info (spacing, origin)"    string        yes
-option "output"                 o "Output image filename"       string yes
+option "output"                 o "Output image base filename (roi number and extension will be append)"        string yes
 
 defgroup "ROIoption" groupdesc="an option of this group is required" 
 groupoption "roi"     r "ROI to binarize (if -1 = all roi)"            int    no default="-1" group="ROIoption"
diff --git a/tools/clitkElastixTransformToMatrix.cxx b/tools/clitkElastixTransformToMatrix.cxx
new file mode 100644 (file)
index 0000000..59bf49e
--- /dev/null
@@ -0,0 +1,51 @@
+/*=========================================================================
+  Program:   vv                     http://www.creatis.insa-lyon.fr/rio/vv
+
+  Authors belong to:
+  - University of LYON              http://www.universite-lyon.fr/
+  - Léon Bérard cancer center       http://www.centreleonberard.fr
+  - CREATIS CNRS laboratory         http://www.creatis.insa-lyon.fr
+
+  This software is distributed WITHOUT ANY WARRANTY; without even
+  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+  PURPOSE.  See the copyright notices for more information.
+
+  It is distributed under dual licence
+
+  - BSD        See included LICENSE.txt file
+  - CeCILL-B   http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
+===========================================================================**/
+
+// clitk
+#include "clitkElastixTransformToMatrix_ggo.h"
+#include "clitkAffineTransformGenericFilter.h"
+
+//--------------------------------------------------------------------
+int main(int argc, char * argv[])
+{
+
+  // Init command line
+  GGO(clitkElastixTransformToMatrix, args_info);
+  CLITK_INIT;
+
+  // Use static fct of AffineTransformGenericFilter
+  typedef clitk::AffineTransformGenericFilter<args_info_clitkElastixTransformToMatrix> FilterType;
+  std::vector<std::string> l;
+  l.push_back(args_info.input_arg);
+  itk::Matrix<double, 4, 4> m = 
+    FilterType::createMatrixFromElastixFile<3, int>(l, args_info.verbose_flag);
+
+  // Print matrix
+  std::ofstream os;
+  clitk::openFileForWriting(os, args_info.output_arg);
+  for(unsigned int i=0; i<4; i++) {
+    for(unsigned int j=0; j<4; j++)
+      os << m[i][j] << " ";
+    os << std::endl;
+  }
+  os.close();  
+
+  return EXIT_SUCCESS;
+}// end main
+
+//--------------------------------------------------------------------
diff --git a/tools/clitkElastixTransformToMatrix.ggo b/tools/clitkElastixTransformToMatrix.ggo
new file mode 100644 (file)
index 0000000..f7f0758
--- /dev/null
@@ -0,0 +1,9 @@
+#File clitkElastixTransformToMatrix.ggo
+package "clitkElastixTransformToMatrix"
+version "1.0"
+purpose ""
+
+option "config"                -       "Config file"                     string        optional
+option "verbose"       v       "Verbose"                         flag          off
+option "input"         i       "Input elastix filename"          string        required
+option "output"                o       "Output matrix filename"          string        required
index 264eb05c6bdba56199f07081a43efe8969307a3f..807b073f8c1f01fd214fb2f8fc3ea77394d1640c 100644 (file)
@@ -27,20 +27,22 @@ int main(int argc, char * argv[]) {
   // Init command line
   GGO(clitkImage2DicomRTStruct, args_info);
 
-  // Read initial 3D image
-  typedef float PixelType;
-  typedef itk::Image<PixelType, 3> ImageType;
-  ImageType::Pointer input = clitk::readImage<ImageType>(args_info.input_arg, args_info.verbose_flag);
+  // Set initial 3D image filenames
+  std::vector<std::string> filenames;
+  for(unsigned int i=0; i< args_info.input_given; i++)
+    filenames.push_back(args_info.input_arg[i]);
 
   // Create a filter to convert image into dicomRTStruct and write to disk
+  typedef float PixelType;
   clitk::Image2DicomRTStructFilter<PixelType> filter;
   filter.SetVerboseFlag(args_info.verbose_flag);
-  filter.SetInput(input);
+  filter.SetInputFilenames(filenames);
   filter.SetDicomFolder(args_info.dicom_arg);
   filter.SetStructureSetFilename(args_info.rtstruct_arg);
   filter.SetOutputFilename(args_info.output_arg);
-  filter.SetROIName(args_info.roiname_arg, args_info.roitype_arg);
+  filter.SetROIType(args_info.roitype_arg);
   filter.SetThresholdValue(args_info.threshold_arg);
+  filter.SetSkipInitialStructuresFlag(args_info.skip_flag);
   filter.Update();
 
   // This is the end my friend 
index 928d11b5ed4ca205abf494253d62f7a3d6981b45..7416d32b5c40fdf911258d5d742b7fb0933c9c20 100644 (file)
@@ -5,12 +5,12 @@ version "Add a (binary) image inside a DICOM RT Structure Set (contours)"
 option "config"                 - "Config file"                     string     no
 option "verbose"         v "Verbose"                        flag       off
 
-option "input"          i "Input image file (binary image) to be converted into contours"  string      yes
+option "input"          i "Input image file (binary image) to be converted into contours"  string multiple yes
 option "rtstruct"       r "Input rt struct"                  string    yes
 option "dicom"          d "Input folder where the initial dicom ct is"                 string  yes
 option "output"                 o "Output DicomRT filename"          string    yes
 
 option "threshold"       t "Threshold for binary image"                         float   no default = "0.5"
-option "roiname"        - "Name of the roi added into the rt-struct"           string  yes
+option "skip"           s "Do not write in output the structures that was in input"   flag off
 option "roitype"        - "Name of the type of roi added into the rt-struct"   string  no default = "ORGAN"
 
index cb5effe22a5949ead40c8cb5613fda31bc41357e..b1cf3f973be3b6fda50b0eabb0939c5b9dab9abd 100644 (file)
@@ -42,8 +42,13 @@ namespace clitk
 
     if (Components==3)
       {
-         if (m_Verbose) std::cout  << "Launching filter in "<< Dimension <<"D and 3D float..." << std::endl;
-         UpdateWithDimAndPixelType<Dimension, itk::Vector<float, 3> >();
+        if (PixelType == "unsigned_char")
+            UpdateWithDimAndPixelType<Dimension, itk::Vector<unsigned char, 3> >();
+        else
+        {
+            if (m_Verbose) std::cout  << "Launching filter in "<< Dimension <<"D and 3D float..." << std::endl;
+            UpdateWithDimAndPixelType<Dimension, itk::Vector<float, 3> >();
+        }
       }
     else std::cerr<<"Number of components is "<<Components<<", not supported!"<<std::endl;
   }
index 48f3f483f8244f5460448b78ad29b7d2918c01d2..13ad66198b00ecaee0a5f3263191c8874f70c2e0 100644 (file)
@@ -2,7 +2,7 @@
 #define __vvPacsSettingsDialog_H
 #include "ui_vvPacsSettingsDialog.h"
 #include <QFileDialog>
-#include <QTGUI/QWidget>
+#include <QWidget>
 
  class vvPacsSettingsDialog : public QDialog 
   {
@@ -19,4 +19,4 @@ private slots:
        void accept();
 
  };
-#endif //__vvPacsSettingsDialog_H
\ No newline at end of file
+#endif //__vvPacsSettingsDialog_H
index 28a3d41bf815dbb2658e25d817eb4921e2621043..bafea832c9ad016a8d585a7a513a9a95bbaa1780 100644 (file)
@@ -1,11 +1,11 @@
 #ifndef __vvQPacsConnection_h_INCLUDED__
 #define __vvQPacsConnection_h_INCLUDED__
 
-#include <QTGUI/QWidget>
+#include <QWidget>
 #include "ui_vvPacsConnection.h"
 #include "gdcmCompositeNetworkFunctions.h"
-#include <QtGui/QStandardItemModel.h>
-#include <QtGui/QStringListModel.h>
+#include <QStandardItemModel>
+#include <QStringListModel>
 #include <QFileDialog>
 #include "vvDicomServerQueryFactory.h"
 
 
 
 
-  class vvQPacsConnection : public QDialog 
-  {
+class vvQPacsConnection : public QDialog
+{
          Q_OBJECT
-  public:
+public:
          //vvQPacsConnection(){}    
          vvQPacsConnection(QWidget *parent=0);