File exiv2-update-to-0.26-branch.patch of Package exiv2.7770
diff --git a/.travis.yml b/.travis.yml
index 8af0ea6d..66877a6a 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,40 +1,26 @@
language: cpp
-sudo: false
+matrix:
+ include:
+ - os: linux
+ dist: trusty
+ sudo: required
+ compiler: gcc
+ - os: linux
+ dist: trusty
+ sudo: required
+ compiler: clang
+ - os: osx
+ osx_image: xcode9
+ compiler: clang
+ env: PYTHON=3.6.2 CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=Release -DEXIV2_ENABLE_VIDEO=ON -DEXIV2_ENABLE_WEBREADY=ON -DEXIV2_BUILD_UNIT_TESTS=ON" # All enabled
-addons:
- apt:
- packages:
- - cmake
- - zlib1g-dev
- - libssh-dev
- - libssh
- - libcurl4-openssl-dev
- - gettext
- sources:
- - kalakris-cmake
-
-compiler:
- - gcc
- - clang
+env:
+ #- CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=Release" # Default
+ #- CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=Debug -DBUILD_SHARED_LIBS=OFF" # Default (Debug mode + static libs)
+ - CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=Release -DEXIV2_ENABLE_VIDEO=ON -DEXIV2_ENABLE_WEBREADY=ON" # All enabled
+ #- CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=Release -DEXIV2_ENABLE_XMP=OFF -DEXIV2_ENABLE_NLS=OFF -DEXIV2_ENABLE_LENSDATA=OFF" # All disabled
+ #- CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=Release -DEXIV2_ENABLE_WEBREADY=ON -DEXIV2_ENABLE_CURL=OFF -DEXIV2_ENABLE_SSH=OFF" # WebReady without SSH nor CURL
-before_install:
- - echo $LANG
- - echo $LC_ALL
- - if [ $TRAVIS_OS_NAME == osx ]; then brew update && brew install libssh curl; fi
- - rvm use $RVM --install --binary --fuzzy
- - gem update --system
-# - gem --version
-
-script:
- - cmake -DCMAKE_INSTALL_PREFIX=..\dist -EXIV2_ENABLE_NLS=ON -DEXIV2_ENABLE_CURL=OFF -DEXIV2_ENABLE_SSH=OFF .
- - cmake --build . && cmake --build . --target install
- - cmake -DCMAKE_INSTALL_PREFIX=..\dist2 -EXIV2_ENABLE_NLS=ON -DEXIV2_ENABLE_CURL=ON -DEXIV2_ENABLE_SSH=ON -DEXIV2_ENABLE_WEBREADY=ON .
- - cmake --build . && cmake --build . --target install
-
-notifications:
- email: false
-
-os:
- - linux
- - osx
\ No newline at end of file
+install: ./.travis/install.sh
+script: ./.travis/run.sh
diff --git a/.travis/install.sh b/.travis/install.sh
new file mode 100755
index 00000000..67ee42bf
--- /dev/null
+++ b/.travis/install.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -e # Enables cheking of return values from each command
+set -x # Prints every command
+
+if [[ "$(uname -s)" == 'Linux' ]]; then
+ sudo apt-get install cmake zlib1g-dev libssh-dev gettext expat libcurl4-openssl-dev libxml2-utils
+ sudo pip install virtualenv
+else
+ brew update
+ brew install gettext libssh expat zlib curl md5sha1sum
+ brew upgrade python
+fi
diff --git a/.travis/run.sh b/.travis/run.sh
new file mode 100755
index 00000000..1a32a2c2
--- /dev/null
+++ b/.travis/run.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -e
+set -x
+
+mkdir test/tmp/
+mkdir build && cd build
+cmake ${CMAKE_OPTIONS} -DCMAKE_INSTALL_PREFIX=install ..
+make -j2
+make tests
+make install
+
+cd ../tests/
+python3 runner.py
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 7034bb67..2e179bf5 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -20,12 +20,20 @@ endif()
CMAKE_MINIMUM_REQUIRED( VERSION 3.1.0 )
PROJECT( exiv2 )
+include( GNUInstallDirs )
+
if( POLICY CMP0042 )
cmake_policy(SET CMP0042 NEW) # enable MACOSX_RPATH support
else()
SET(CMAKE_MACOSX_RPATH 1)
endif()
+if (NOT MSVC)
+ set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
+ set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
+ set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
+endif()
+
SET( PACKAGE_COPYRIGHT "Andreas Huggel" )
SET( PACKAGE_BUGREPORT "ahuggel@gmx.net" )
SET( PACKAGE "exiv2" )
@@ -64,8 +72,8 @@ ENDIF()
# set include path for FindXXX.cmake files
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/config/")
-IF( MINGW OR UNIX )
- IF ( CMAKE_CXX_COMPILER STREQUAL "g++" OR CMAKE_C_COMPILER STREQUAL "gcc" )
+if( MINGW OR UNIX )
+ if (${CMAKE_CXX_COMPILER_ID} STREQUAL GNU)
ADD_DEFINITIONS(-Wall
-Wcast-align
-Wpointer-arith
@@ -76,18 +84,8 @@ IF( MINGW OR UNIX )
)
ENDIF()
- execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE COMPILER_VERSION)
- string(REGEX MATCHALL "[a-z\+]+" GCC_COMPILER_COMPONENTS ${COMPILER_VERSION})
- list(GET GCC_COMPILER_COMPONENTS 0 COMPILER)
-
- execute_process(COMMAND ${CMAKE_CXX_COMPILER} -dumpversion OUTPUT_VARIABLE GCC_VERSION)
- string(REGEX MATCHALL "[0-9]+" GCC_VERSION_COMPONENTS ${GCC_VERSION})
- list(GET GCC_VERSION_COMPONENTS 0 GCC_MAJOR)
- list(GET GCC_VERSION_COMPONENTS 1 GCC_MINOR)
-
- message(STATUS Compiler: ${COMPILER} " Major:" ${GCC_MAJOR} " Minor:" ${GCC_MINOR})
-
- IF ( CYGWIN OR ( ${GCC_MAJOR} GREATER 5 ))
+ message(STATUS "Compiler info: ${CMAKE_CXX_COMPILER_ID} (${CMAKE_CXX_COMPILER}) ; version: ${CMAKE_CXX_COMPILER_VERSION}")
+ IF ( CYGWIN OR (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 5.0))
ADD_DEFINITIONS( -std=gnu++98 ) # to support snprintf
ELSE()
ADD_DEFINITIONS( -std=c++98 )
@@ -120,7 +118,6 @@ IF( EXIV2_ENABLE_XMP )
IF (NOT MINGW)
set(THREADS_PREFER_PTHREAD_FLAG ON)
ENDIF()
- find_package(Threads REQUIRED)
ENDIF( EXIV2_ENABLE_XMP )
INCLUDE( config/CMakeChecks.txt )
diff --git a/README b/README
index 1b28bfd7..2d0743f2 100644
--- a/README
+++ b/README
@@ -1,3 +1,5 @@
+https://travis-ci.org/Exiv2/exiv2.svg?branch=0.26
+
@@@Marco@@@@@b ;mm /##Gilles###\
j@@@#Robin", Brad /@@@Thomas@@@@Q
@@@# \ ## @@@b |@@@b
diff --git a/config/CMakeChecks.txt b/config/CMakeChecks.txt
index 77922930..0b458695 100644
--- a/config/CMakeChecks.txt
+++ b/config/CMakeChecks.txt
@@ -37,8 +37,6 @@ INCLUDE( CheckSymbolExists )
INCLUDE( CheckCSourceCompiles )
INCLUDE( CheckCXXSourceCompiles )
-INCLUDE( GNUInstallDirs )
-
INCLUDE( FindIconv )
SET( STDC_HEADERS ON )
@@ -51,6 +49,8 @@ INCLUDE_DIRECTORIES( ${CMAKE_INCLUDE_PATH} ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_C
LINK_DIRECTORIES( ${CMAKE_LIBRARY_PATH} )
SET( CMAKE_REQUIRED_INCLUDES ${CMAKE_INCLUDE_PATH} )
+find_package(Threads REQUIRED)
+
IF( EXIV2_ENABLE_PNG )
FIND_PACKAGE( ZLIB REQUIRED )
INCLUDE_DIRECTORIES( ${ZLIB_INCLUDE_DIR} )
@@ -95,15 +95,13 @@ ELSE( EXIV2_ENABLE_SHARED )
ENDIF( EXIV2_ENABLE_SHARED )
IF( EXIV2_ENABLE_NLS )
- #FIND_PACKAGE(Intl REQUIRED)
- #INCLUDE_DIRECTORIES(${Intl_INCLUDE_DIRS})
- IF( NOT LOCALEDIR )
- SET( LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale" )
- IF( WIN32 )
- STRING( REPLACE "/" "\\\\" LOCALEDIR ${LOCALEDIR} )
- ENDIF( WIN32 )
- ENDIF( NOT LOCALEDIR )
- ADD_DEFINITIONS( -DEXV_LOCALEDIR="${LOCALEDIR}" )
+ FIND_PACKAGE(Intl)
+ if(Intl_FOUND)
+ INCLUDE_DIRECTORIES(${Intl_INCLUDE_DIRS})
+ SET(LIBINTL_LIBRARIES ${Intl_LIBRARIES})
+ else()
+ SET(LIBINTL_LIBRARIES)
+ endif()
SET( ENABLE_NLS 1 )
ENDIF( EXIV2_ENABLE_NLS )
diff --git a/config/FindMSGFMT.cmake b/config/FindMSGFMT.cmake
index 9a73f2bc..393c3a9b 100644
--- a/config/FindMSGFMT.cmake
+++ b/config/FindMSGFMT.cmake
@@ -81,7 +81,7 @@ MACRO(ADD_TRANSLATIONS _baseName)
COMMAND ${MSGFMT_EXECUTABLE} -o ${_out} ${_in}
DEPENDS ${_in} )
INSTALL(FILES ${_out}
- DESTINATION ${LOCALEDIR}/${_file_we}/LC_MESSAGES/
+ DESTINATION ${CMAKE_INSTALL_LOCALEDIR}/${_file_we}/LC_MESSAGES/
RENAME ${_baseName}.mo )
SET(_outputs ${_outputs} ${_out})
ENDFOREACH(_file)
diff --git a/config/exiv2.pc.cmake b/config/exiv2.pc.cmake
index 256f8ac6..afc16e2b 100644
--- a/config/exiv2.pc.cmake
+++ b/config/exiv2.pc.cmake
@@ -1,7 +1,7 @@
prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=${prefix}
-libdir=${prefix}/lib
-includedir=${prefix}/include
+libdir=@CMAKE_INSTALL_FULL_LIBDIR@
+includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
Name: exiv2
Description: Exif and IPTC metadata library and tools
diff --git a/include/exiv2/value.hpp b/include/exiv2/value.hpp
index 64a8ca7f..b7d76fef 100644
--- a/include/exiv2/value.hpp
+++ b/include/exiv2/value.hpp
@@ -44,6 +44,7 @@
#include <sstream>
#include <memory>
#include <cstring>
+#include <climits>
// *****************************************************************************
// namespace extensions
@@ -1658,11 +1659,13 @@ namespace Exiv2 {
ok_ = true;
return static_cast<long>(value_[n]);
}
+// #55 crash when value_[n].first == LONG_MIN
+#define LARGE_INT 1000000
// Specialization for rational
template<>
inline long ValueType<Rational>::toLong(long n) const
{
- ok_ = (value_[n].second != 0);
+ ok_ = (value_[n].second != 0 && -LARGE_INT < value_[n].first && value_[n].first < LARGE_INT);
if (!ok_) return 0;
return value_[n].first / value_[n].second;
}
@@ -1670,7 +1673,7 @@ namespace Exiv2 {
template<>
inline long ValueType<URational>::toLong(long n) const
{
- ok_ = (value_[n].second != 0);
+ ok_ = (value_[n].second != 0 && value_[n].first < LARGE_INT);
if (!ok_) return 0;
return value_[n].first / value_[n].second;
}
diff --git a/po/CMakeLists.txt b/po/CMakeLists.txt
index 63b37733..0403fc24 100644
--- a/po/CMakeLists.txt
+++ b/po/CMakeLists.txt
@@ -8,10 +8,6 @@
# automatically include all po files in the directory
FILE(GLOB PO_FILES *.po)
-if ( NOT MSVC )
- set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
-endif()
-
UPDATE_TRANSLATIONS(exiv2 ${PO_FILES})
ADD_TRANSLATIONS(exiv2 ${PO_FILES})
diff --git a/samples/CMakeLists.txt b/samples/CMakeLists.txt
index 9690aa0e..cc30a324 100644
--- a/samples/CMakeLists.txt
+++ b/samples/CMakeLists.txt
@@ -5,10 +5,6 @@
# Redistribution and use is allowed according to the terms of the BSD license.
# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
-if ( NOT MSVC )
- set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
-endif()
-
include_directories("${CMAKE_SOURCE_DIR}/include" "${CMAKE_SOURCE_DIR}/src")
SET( SAMPLES addmoddel.cpp
@@ -49,7 +45,7 @@ FOREACH(entry ${SAMPLES})
ADD_EXECUTABLE( ${target} ${target}.cpp )
ADD_TEST( ${target}_test ${target} )
TARGET_LINK_LIBRARIES( ${target} ${PRIVATE_VAR} exiv2lib Threads::Threads ${EXPAT_LIBRARIES})
- INSTALL( TARGETS ${target} ${INSTALL_TARGET_STANDARD_ARGS} )
+ INSTALL( TARGETS ${target} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
ENDFOREACH(entry ${SAMPLES})
###################################
@@ -62,25 +58,25 @@ ENDIF( MSVC )
SET( MC_SRC ${MC_SRC} metacopy.cpp ../src/utils.cpp )
ADD_EXECUTABLE( metacopy ${MC_SRC} )
TARGET_LINK_LIBRARIES( metacopy ${PRIVATE_VAR} exiv2lib Threads::Threads ${EXPAT_LIBRARIES} ${ZLIB_LIBRARIES})
-INSTALL( TARGETS metacopy ${INSTALL_TARGET_STANDARD_ARGS} )
+INSTALL( TARGETS metacopy RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
SET ( PATHTEST_SRC ${PATHTEST_SRC} path-test.cpp ../src/utils.cpp )
ADD_EXECUTABLE ( pathtest ${PATHTEST_SRC} )
SET_TARGET_PROPERTIES( pathtest PROPERTIES OUTPUT_NAME path-test )
TARGET_LINK_LIBRARIES( pathtest ${PRIVATE_VAR} exiv2lib Threads::Threads ${EXPAT_LIBRARIES} ${ZLIB_LIBRARIES})
-INSTALL ( TARGETS pathtest ${INSTALL_TARGET_STANDARD_ARGS} )
+INSTALL ( TARGETS pathtest RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
SET( EXIV2JSON_SRC exiv2json.cpp Jzon.cpp )
ADD_EXECUTABLE( exiv2json ${EXIV2JSON_SRC} )
SET_TARGET_PROPERTIES( exiv2json PROPERTIES OUTPUT_NAME exiv2json )
TARGET_LINK_LIBRARIES( exiv2json ${PRIVATE_VAR} ${PRIVATE_VAR} exiv2lib Threads::Threads ${EXPAT_LIBRARIES} ${ZLIB_LIBRARIES})
-INSTALL( TARGETS exiv2json ${INSTALL_TARGET_STANDARD_ARGS} )
+INSTALL( TARGETS exiv2json RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
SET( GEOTAG_SRC geotag.cpp )
ADD_EXECUTABLE( geotag ${GEOTAG_SRC} )
SET_TARGET_PROPERTIES( geotag PROPERTIES OUTPUT_NAME geotag )
TARGET_LINK_LIBRARIES( geotag ${PRIVATE_VAR} exiv2lib Threads::Threads ${EXPAT_LIBRARIES} ${ZLIB_LIBRARIES})
-INSTALL( TARGETS geotag ${INSTALL_TARGET_STANDARD_ARGS} )
+INSTALL( TARGETS geotag RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
# ******************************************************************************
# Man page
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index aecd6215..d4dc6375 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -6,10 +6,6 @@
# Redistribution and use is allowed according to the terms of the BSD license.
# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
-if ( NOT MSVC )
- set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
-endif()
-
# Private headers which are only needed for the library itself
SET( LIBEXIV2_PRIVATE_HDR canonmn_int.hpp
casiomn_int.hpp
@@ -246,6 +242,8 @@ SET_TARGET_PROPERTIES( exiv2lib PROPERTIES
OUTPUT_NAME exiv2
)
+target_compile_definitions(exiv2lib PRIVATE EXV_LOCALEDIR="${CMAKE_INSTALL_LOCALEDIR}" )
+
IF ( UNIX )
IF ( NOT CYGWIN AND NOT MINGW )
SET (LINUX 1)
@@ -268,10 +266,12 @@ else()
TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} dl ${EXPAT_LIBRARIES} )
TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} dl ${CURL_LIBRARIES} )
TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} dl ${SSH_LIBRARIES} )
+ TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} dl ${CMAKE_THREAD_LIBS_INIT} )
else()
TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} ${EXPAT_LIBRARIES} )
TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} ${CURL_LIBRARIES} )
TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} ${SSH_LIBRARIES} )
+ TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} ${CMAKE_THREAD_LIBS_INIT} )
endif()
endif()
@@ -308,7 +308,11 @@ IF (CYGWIN OR MINGW)
TARGET_LINK_LIBRARIES( exiv2lib ${PRIVATE_VAR} psapi ws2_32 )
ENDIF(CYGWIN OR MINGW)
-INSTALL( TARGETS exiv2lib ${INSTALL_TARGET_STANDARD_ARGS} )
+INSTALL(TARGETS exiv2lib
+ RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
+)
include(../CMake_msvc.txt)
msvc_runtime_configure(${EXIV2_ENABLE_SHARED} ${EXIV2_ENABLE_DYNAMIC_RUNTIME})
@@ -316,13 +320,14 @@ msvc_runtime_configure(${EXIV2_ENABLE_SHARED} ${EXIV2_ENABLE_DYNAMIC_RUNTIME})
# ******************************************************************************
# exiv2 application
ADD_EXECUTABLE( exiv2 ${EXIV2_SRC} ${EXIV2_HDR} )
-TARGET_LINK_LIBRARIES( exiv2 exiv2lib )
-INSTALL( TARGETS exiv2 ${INSTALL_TARGET_STANDARD_ARGS} )
+target_compile_definitions(exiv2 PRIVATE EXV_LOCALEDIR="${CMAKE_INSTALL_LOCALEDIR}" )
+TARGET_LINK_LIBRARIES( exiv2 exiv2lib ${LIBINTL_LIBRARIES} )
+INSTALL( TARGETS exiv2 RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
# ******************************************************************************
# connection test application
ADD_EXECUTABLE( conntest ${CONNTEST} )
-TARGET_LINK_LIBRARIES( conntest ${PRIVATE_VAR} exiv2lib ${CURL_LIBRARIES} ${SSH_LIBRARIES})
+TARGET_LINK_LIBRARIES( conntest ${PRIVATE_VAR} exiv2lib ${CURL_LIBRARIES} ${SSH_LIBRARIES} ${LIBINTL_LIBRARIES} )
# ******************************************************************************
# exifprint application
@@ -332,7 +337,7 @@ TARGET_LINK_LIBRARIES( conntest ${PRIVATE_VAR} exiv2lib ${CURL_LIBRARIES} ${SSH
# ******************************************************************************
# remotetest application
ADD_EXECUTABLE( remotetest ${REMOTETEST} )
-TARGET_LINK_LIBRARIES( remotetest exiv2lib )
+TARGET_LINK_LIBRARIES( remotetest exiv2lib ${LIBINTL_LIBRARIES} )
# ******************************************************************************
# Headers
diff --git a/src/actions.cpp b/src/actions.cpp
index 0ebe8505..9f850097 100644
--- a/src/actions.cpp
+++ b/src/actions.cpp
@@ -59,6 +59,7 @@ EXIV2_RCSID("@(#) $Id$")
#include <ctime>
#include <cmath>
#include <cassert>
+#include <stdexcept>
#include <sys/types.h> // for stat()
#include <sys/stat.h> // for stat()
#ifdef EXV_HAVE_UNISTD_H
@@ -236,33 +237,43 @@ namespace Action {
}
int Print::run(const std::string& path)
- try {
- path_ = path;
- int rc = 0;
- Exiv2::PrintStructureOption option = Exiv2::kpsNone ;
- switch (Params::instance().printMode_) {
- case Params::pmSummary: rc = printSummary(); break;
- case Params::pmList: rc = printList(); break;
- case Params::pmComment: rc = printComment(); break;
- case Params::pmPreview: rc = printPreviewList(); break;
- case Params::pmStructure: rc = printStructure(std::cout,Exiv2::kpsBasic) ; break;
- case Params::pmRecursive: rc = printStructure(std::cout,Exiv2::kpsRecursive) ; break;
-
- case Params::pmXMP:
- option = option == Exiv2::kpsNone ? Exiv2::kpsXMP : option; // drop
- case Params::pmIccProfile:{
- option = option == Exiv2::kpsNone ? Exiv2::kpsIccProfile : option;
- _setmode(_fileno(stdout),O_BINARY);
- rc = printStructure(std::cout,option);
- } break;
+ {
+ try {
+ path_ = path;
+ int rc = 0;
+ Exiv2::PrintStructureOption option = Exiv2::kpsNone ;
+ switch (Params::instance().printMode_) {
+ case Params::pmSummary: rc = printSummary(); break;
+ case Params::pmList: rc = printList(); break;
+ case Params::pmComment: rc = printComment(); break;
+ case Params::pmPreview: rc = printPreviewList(); break;
+ case Params::pmStructure: rc = printStructure(std::cout,Exiv2::kpsBasic) ; break;
+ case Params::pmRecursive: rc = printStructure(std::cout,Exiv2::kpsRecursive) ; break;
+
+ case Params::pmXMP:
+ if (option == Exiv2::kpsNone)
+ option = Exiv2::kpsXMP;
+ // drop
+ case Params::pmIccProfile:
+ if (option == Exiv2::kpsNone)
+ option = Exiv2::kpsIccProfile;
+ _setmode(_fileno(stdout),O_BINARY);
+ rc = printStructure(std::cout,option);
+ break;
+ }
+ return rc;
+ }
+ catch(const Exiv2::AnyError& e) {
+ std::cerr << "Exiv2 exception in print action for file "
+ << path << ":\n" << e << "\n";
+ return 1;
+ }
+ catch(const std::overflow_error& e) {
+ std::cerr << "std::overflow_error exception in print action for file "
+ << path << ":\n" << e.what() << "\n";
+ return 1;
}
- return rc;
}
- catch(const Exiv2::AnyError& e) {
- std::cerr << "Exiv2 exception in print action for file "
- << path << ":\n" << e << "\n";
- return 1;
- } // Print::run
int Print::printStructure(std::ostream& out, Exiv2::PrintStructureOption option)
{
diff --git a/src/basicio.cpp b/src/basicio.cpp
index 95589cd2..f2e1518b 100644
--- a/src/basicio.cpp
+++ b/src/basicio.cpp
@@ -990,6 +990,7 @@ namespace Exiv2 {
DataBuf FileIo::read(long rcount)
{
assert(p_->fp_ != 0);
+ if ( (size_t) rcount > size() ) throw Error(57);
DataBuf buf(rcount);
long readCount = read(buf.pData_, buf.size_);
buf.size_ = readCount;
diff --git a/src/error.cpp b/src/error.cpp
index 80378c19..5d63957d 100644
--- a/src/error.cpp
+++ b/src/error.cpp
@@ -106,6 +106,11 @@ namespace {
{ 52, N_("%1 has invalid XMP value type `%2'") }, // %1=key, %2=value type
{ 53, N_("Not a valid ICC Profile") },
{ 54, N_("Not valid XMP") },
+ { 55, N_("tiff directory length is too large") },
+ { 56, N_("invalid type value detected in Image::printIFDStructure") },
+ { 57, N_("invalid memory allocation request") },
+ { 58, N_("corrupted image metadata") },
+ { 59, N_("Arithmetic operation overflow") },
};
}
diff --git a/src/image.cpp b/src/image.cpp
index 0d828045..929d4976 100644
--- a/src/image.cpp
+++ b/src/image.cpp
@@ -399,7 +399,13 @@ namespace Exiv2 {
;
// if ( offset > io.size() ) offset = 0; // Denial of service?
- DataBuf buf(size*count + pad+20); // allocate a buffer
+
+ // #55 and #56 memory allocation crash test/data/POC8
+ long long allocate = (long long) size*count + pad+20;
+ if ( allocate > (long long) io.size() ) {
+ throw Error(57);
+ }
+ DataBuf buf(allocate); // allocate a buffer
std::memcpy(buf.pData_,dir.pData_+8,4); // copy dir[8:11] into buffer (short strings)
if ( count*size > 4 ) { // read into buffer
size_t restore = io.tell(); // save
diff --git a/src/jp2image.cpp b/src/jp2image.cpp
index 1892fd43..a308bfd9 100644
--- a/src/jp2image.cpp
+++ b/src/jp2image.cpp
@@ -41,6 +41,7 @@ EXIV2_RCSID("@(#) $Id$")
#include "error.hpp"
#include "futils.hpp"
#include "types.hpp"
+#include "safe_op.hpp"
// + standard includes
#include <string>
@@ -269,10 +270,16 @@ namespace Exiv2
std::cout << "Exiv2::Jp2Image::readMetadata: "
<< "Color data found" << std::endl;
#endif
- long pad = 3 ; // 3 padding bytes 2 0 0
- DataBuf data(subBox.length+8);
+
+ const long pad = 3 ; // 3 padding bytes 2 0 0
+ DataBuf data(Safe::add(subBox.length, static_cast<uint32_t>(8)));
io_->read(data.pData_,data.size_);
- long iccLength = getULong(data.pData_+pad, bigEndian);
+ const long iccLength = getULong(data.pData_+pad, bigEndian);
+ // subtracting pad from data.size_ is safe:
+ // size_ is at least 8 and pad = 3
+ if (iccLength > data.size_ - pad) {
+ throw Error(58);
+ }
DataBuf icc(iccLength);
::memcpy(icc.pData_,data.pData_+pad,icc.size_);
#ifdef DEBUG
diff --git a/src/nikonmn.cpp b/src/nikonmn.cpp
index 571ab806..34bf601c 100644
--- a/src/nikonmn.cpp
+++ b/src/nikonmn.cpp
@@ -299,6 +299,8 @@ namespace Exiv2 {
const Value& value,
const ExifData* exifData)
{
+ if ( ! exifData ) return os << "undefined" ;
+
if ( value.count() >= 9 ) {
ByteOrder bo = getKeyString("Exif.MakerNote.ByteOrder",exifData) == "MM" ? bigEndian : littleEndian;
byte p[4];
diff --git a/src/pentaxmn.cpp b/src/pentaxmn.cpp
index 4fc38be0..b22cb43b 100644
--- a/src/pentaxmn.cpp
+++ b/src/pentaxmn.cpp
@@ -1167,6 +1167,8 @@ namespace Exiv2 {
std::ostream& PentaxMakerNote::printShutterCount(std::ostream& os, const Value& value, const ExifData* metadata)
{
+ if ( ! metadata ) return os << "undefined" ;
+
ExifData::const_iterator dateIt = metadata->findKey(
ExifKey("Exif.PentaxDng.Date"));
if (dateIt == metadata->end()) {
diff --git a/src/pngchunk.cpp b/src/pngchunk.cpp
index da4ccd01..4ad74e1e 100644
--- a/src/pngchunk.cpp
+++ b/src/pngchunk.cpp
@@ -68,6 +68,8 @@ namespace Exiv2 {
int* outWidth,
int* outHeight)
{
+ assert(data.size_ >= 8);
+
// Extract image width and height from IHDR chunk.
*outWidth = getLong((const byte*)data.pData_, bigEndian);
@@ -107,15 +109,17 @@ namespace Exiv2 {
{
// From a tEXt, zTXt, or iTXt chunk,
// we get the key, it's a null terminated string at the chunk start
- if (data.size_ <= (stripHeader ? 8 : 0)) throw Error(14);
- const byte *key = data.pData_ + (stripHeader ? 8 : 0);
+ const int offset = stripHeader ? 8 : 0;
+ if (data.size_ <= offset) throw Error(14);
+ const byte *key = data.pData_ + offset;
// Find null string at end of key.
int keysize=0;
- for ( ; key[keysize] != 0 ; keysize++)
+ while (key[keysize] != 0)
{
+ keysize++;
// look if keysize is valid.
- if (keysize >= data.size_)
+ if (keysize+offset >= data.size_)
throw Error(14);
}
diff --git a/src/pngimage.cpp b/src/pngimage.cpp
index 11b41982..ed7399a2 100644
--- a/src/pngimage.cpp
+++ b/src/pngimage.cpp
@@ -441,7 +441,9 @@ namespace Exiv2 {
#ifdef DEBUG
std::cout << "Exiv2::PngImage::readMetadata: Found IHDR chunk (length: " << dataOffset << ")\n";
#endif
- PngChunk::decodeIHDRChunk(cdataBuf, &pixelWidth_, &pixelHeight_);
+ if (cdataBuf.size_ >= 8) {
+ PngChunk::decodeIHDRChunk(cdataBuf, &pixelWidth_, &pixelHeight_);
+ }
}
else if (!memcmp(cheaderBuf.pData_ + 4, "tEXt", 4))
{
diff --git a/src/safe_op.hpp b/src/safe_op.hpp
new file mode 100644
index 00000000..014b7f3a
--- /dev/null
+++ b/src/safe_op.hpp
@@ -0,0 +1,310 @@
+// ********************************************************* -*- C++ -*-
+/*
+ * Copyright (C) 2004-2017 Exiv2 maintainers
+ *
+ * This program is part of the Exiv2 distribution.
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, 5th Floor, Boston, MA 02110-1301 USA.
+ */
+/*!
+ @file safe_op.hpp
+ @brief Overflow checks for integers
+ @author Dan Čermák (D4N)
+ <a href="mailto:dan.cermak@cgc-instruments.com">dan.cermak@cgc-instruments.com</a>
+ @date 14-Dec-17, D4N: created
+ */
+
+#ifndef SAFE_OP_HPP_
+#define SAFE_OP_HPP_
+
+#include <limits>
+#include <stdexcept>
+
+#ifdef _MSC_VER
+#include <Intsafe.h>
+#endif
+
+/*!
+ * @brief Arithmetic operations with overflow checks
+ */
+namespace Safe
+{
+ /*!
+ * @brief Helper structs for providing integer overflow checks.
+ *
+ * This namespace contains the internal helper structs fallback_add_overflow
+ * and builtin_add_overflow. Both have a public static member function add
+ * with the following interface:
+ *
+ * bool add(T summand_1, T summand_2, T& result)
+ *
+ * where T is the type over which the struct is templated.
+ *
+ * The function performs a check whether the addition summand_1 + summand_2
+ * can be performed without an overflow. If the operation would overflow,
+ * true is returned and the addition is not performed if it would result in
+ * undefined behavior. If no overflow occurs, the sum is saved in result and
+ * false is returned.
+ *
+ * fallback_add_overflow implements a portable but slower overflow check.
+ * builtin_add_overflow uses compiler builtins (when available) and should
+ * be considerably faster. As builtins are not available for all types,
+ * builtin_add_overflow falls back to fallback_add_overflow when no builtin
+ * is available.
+ */
+ namespace Internal
+ {
+ /*!
+ * @brief Helper struct to determine whether a type is signed or unsigned
+
+ * This struct is a backport of std::is_signed from C++11. It has a public
+ * enum with the property VALUE which is true when the type is signed or
+ * false if it is unsigned.
+ */
+ template <typename T>
+ struct is_signed
+ {
+ enum
+ {
+ VALUE = T(-1) < T(0)
+ };
+ };
+
+ /*!
+ * @brief Helper struct for SFINAE, from C++11
+
+ * This struct has a public typedef called type typedef'd to T if B is
+ * true. Otherwise there is no typedef.
+ */
+ template <bool B, class T = void>
+ struct enable_if
+ {
+ };
+
+ /*!
+ * @brief Specialization of enable_if for the case B == true
+ */
+ template <class T>
+ struct enable_if<true, T>
+ {
+ typedef T type;
+ };
+
+ /*!
+ * @brief Fallback overflow checker, specialized via SFINAE
+ *
+ * This struct implements a 'fallback' addition with an overflow check,
+ * i.e. it does not rely on compiler intrinsics. It is specialized via
+ * SFINAE for signed and unsigned integer types and provides a public
+ * static member function add.
+ */
+ template <typename T, typename = void>
+ struct fallback_add_overflow;
+
+ /*!
+ * @brief Overload of fallback_add_overflow for signed integers
+ */
+ template <typename T>
+ struct fallback_add_overflow<T, typename enable_if<is_signed<T>::VALUE>::type>
+ {
+ /*!
+ * @brief Adds the two summands only if no overflow occurs
+ *
+ * This function performs a check if summand_1 + summand_2 would
+ * overflow and returns true in that case. If no overflow occurs,
+ * the sum is saved in result and false is returned.
+ *
+ * @return true on overflow, false on no overflow
+ *
+ * The check for an overflow is performed before the addition to
+ * ensure that no undefined behavior occurs. The value in result is
+ * only valid when the function returns false.
+ *
+ * Further information:
+ * https://wiki.sei.cmu.edu/confluence/display/c/INT32-C.+Ensure+that+operations+on+signed+integers+do+not+result+in+overflow
+ */
+ static bool add(T summand_1, T summand_2, T& result)
+ {
+ if (((summand_2 >= 0) && (summand_1 > std::numeric_limits<T>::max() - summand_2)) ||
+ ((summand_2 < 0) && (summand_1 < std::numeric_limits<T>::min() - summand_2))) {
+ return true;
+ } else {
+ result = summand_1 + summand_2;
+ return false;
+ }
+ }
+ };
+
+ /*!
+ * @brief Overload of fallback_add_overflow for unsigned integers
+ */
+ template <typename T>
+ struct fallback_add_overflow<T, typename enable_if<!is_signed<T>::VALUE>::type>
+ {
+ /*!
+ * @brief Adds the two summands only if no overflow occurs
+ *
+ * This function performs a check if summand_1 + summand_2 would
+ * overflow and returns true in that case. If no overflow occurs,
+ * the sum is saved in result and false is returned.
+ *
+ * @return true on overflow, false on no overflow
+ *
+ * Further information:
+ * https://wiki.sei.cmu.edu/confluence/display/c/INT30-C.+Ensure+that+unsigned+integer+operations+do+not+wrap
+ */
+ static bool add(T summand_1, T summand_2, T& result)
+ {
+ if (summand_1 > std::numeric_limits<T>::max() - summand_2) {
+ return true;
+ } else {
+ result = summand_1 + summand_2;
+ return false;
+ }
+ }
+ };
+
+ /*!
+ * @brief Overflow checker using compiler intrinsics
+ *
+ * This struct provides an add function with the same interface &
+ * behavior as fallback_add_overload::add but it relies on compiler
+ * intrinsics instead. This version should be considerably faster than
+ * the fallback version as it can fully utilize available CPU
+ * instructions & the compiler's diagnostic.
+ *
+ * However, as some compilers don't provide intrinsics for certain
+ * types, the default implementation of add is the version from falback.
+ *
+ * The struct is explicitly specialized for each type via #ifdefs for
+ * each compiler.
+ */
+ template <typename T>
+ struct builtin_add_overflow
+ {
+ /*!
+ * @brief Add summand_1 and summand_2 and check for overflows.
+ *
+ * This is the default add() function that uses
+ * fallback_add_overflow<T>::add(). All specializations must have
+ * exactly the same interface and behave the same way.
+ */
+ static inline bool add(T summand_1, T summand_2, T& result)
+ {
+ return fallback_add_overflow<T>::add(summand_1, summand_2, result);
+ }
+ };
+
+#if defined(__GNUC__) || defined(__clang__)
+#if __GNUC__ >= 5
+
+/*!
+ * This macro pastes a specialization of builtin_add_overflow using gcc's &
+ * clang's __builtin_(s/u)add(l)(l)_overlow()
+ *
+ * The add function is implemented by forwarding the parameters to the intrinsic
+ * and returning its value.
+ *
+ * The intrinsics are documented here:
+ * https://gcc.gnu.org/onlinedocs/gcc/Integer-Overflow-Builtins.html#Integer-Overflow-Builtins
+ */
+#define SPECIALIZE_builtin_add_overflow(type, builtin_name) \
+ template <> \
+ struct builtin_add_overflow<type> \
+ { \
+ static inline bool add(type summand_1, type summand_2, type& result) \
+ { \
+ return builtin_name(summand_1, summand_2, &result); \
+ } \
+ }
+
+ SPECIALIZE_builtin_add_overflow(int, __builtin_sadd_overflow);
+ SPECIALIZE_builtin_add_overflow(long, __builtin_saddl_overflow);
+ SPECIALIZE_builtin_add_overflow(long long, __builtin_saddll_overflow);
+
+ SPECIALIZE_builtin_add_overflow(unsigned int, __builtin_uadd_overflow);
+ SPECIALIZE_builtin_add_overflow(unsigned long, __builtin_uaddl_overflow);
+ SPECIALIZE_builtin_add_overflow(unsigned long long, __builtin_uaddll_overflow);
+
+#undef SPECIALIZE_builtin_add_overflow
+#endif
+
+#elif defined(_MSC_VER)
+
+/*!
+ * This macro pastes a specialization of builtin_add_overflow using MSVC's
+ * U(Int/Long/LongLong)Add.
+ *
+ * The add function is implemented by forwarding the parameters to the
+ * intrinsic. As MSVC's intrinsics return S_OK on success, this specialization
+ * returns whether the intrinsics return value does not equal S_OK. This ensures
+ * a uniform interface of the add function (false is returned when no overflow
+ * occurs, true on overflow).
+ *
+ * The intrinsics are documented here:
+ * https://msdn.microsoft.com/en-us/library/windows/desktop/ff516460(v=vs.85).aspx
+ */
+#define SPECIALIZE_builtin_add_overflow_WIN(type, builtin_name) \
+ template <> \
+ struct builtin_add_overflow<type> \
+ { \
+ static inline bool add(type summand_1, type summand_2, type& result) \
+ { \
+ return builtin_name(summand_1, summand_2, &result) != S_OK; \
+ } \
+ }
+
+ SPECIALIZE_builtin_add_overflow_WIN(unsigned int, UIntAdd);
+ SPECIALIZE_builtin_add_overflow_WIN(unsigned long, ULongAdd);
+ SPECIALIZE_builtin_add_overflow_WIN(unsigned long long, ULongLongAdd);
+
+#undef SPECIALIZE_builtin_add_overflow_WIN
+
+#endif
+
+ } // namespace Internal
+
+ /*!
+ * @brief Safe addition, throws an exception on overflow.
+ *
+ * This function returns the result of summand_1 and summand_2 only when the
+ * operation would not overflow, otherwise an exception of type
+ * std::overflow_error is thrown.
+ *
+ * @param[in] summand_1, summand_2 summands to be summed up
+ * @return the sum of summand_1 and summand_2
+ * @throws std::overflow_error if the addition would overflow
+ *
+ * This function utilizes compiler builtins when available and should have a
+ * very small performance hit then. When builtins are unavailable, a more
+ * extensive check is required.
+ *
+ * Builtins are available for the following configurations:
+ * - GCC/Clang for signed and unsigned int, long and long long (not char & short)
+ * - MSVC for unsigned int, long and long long
+ */
+ template <typename T>
+ T add(T summand_1, T summand_2)
+ {
+ T res = 0;
+ if (Internal::builtin_add_overflow<T>::add(summand_1, summand_2, res)) {
+ throw std::overflow_error("Overflow in addition");
+ }
+ return res;
+ }
+
+} // namespace Safe
+
+#endif // SAFE_OP_HPP_
diff --git a/src/tiffvisitor.cpp b/src/tiffvisitor.cpp
index 74f8d078..49fbf961 100644
--- a/src/tiffvisitor.cpp
+++ b/src/tiffvisitor.cpp
@@ -47,6 +47,7 @@ EXIV2_RCSID("@(#) $Id$")
#include <iostream>
#include <iomanip>
#include <cassert>
+#include <limits>
// *****************************************************************************
namespace {
@@ -1294,11 +1295,12 @@ namespace Exiv2 {
}
uint16_t tag = getUShort(p, byteOrder());
TiffComponent::AutoPtr tc = TiffCreator::create(tag, object->group());
- // The assertion typically fails if a component is not configured in
- // the TIFF structure table
- assert(tc.get());
- tc->setStart(p);
- object->addChild(tc);
+ if (tc.get()) {
+ tc->setStart(p);
+ object->addChild(tc);
+ } else {
+ EXV_WARNING << "Unable to handle tag " << tag << ".\n";
+ }
p += 12;
}
@@ -1493,6 +1495,10 @@ namespace Exiv2 {
}
p += 4;
uint32_t isize= 0; // size of Exif.Sony1.PreviewImage
+
+ if (count > std::numeric_limits<uint32_t>::max() / typeSize) {
+ throw Error(59);
+ }
uint32_t size = typeSize * count;
uint32_t offset = getLong(p, byteOrder());
byte* pData = p;
@@ -1516,7 +1522,19 @@ namespace Exiv2 {
size = 0;
}
if (size > 4) {
+ // setting pData to pData_ + baseOffset() + offset can result in pData pointing to invalid memory,
+ // as offset can be arbitrarily large
+ if ((static_cast<uintptr_t>(baseOffset()) > std::numeric_limits<uintptr_t>::max() - static_cast<uintptr_t>(offset))
+ || (static_cast<uintptr_t>(baseOffset() + offset) > std::numeric_limits<uintptr_t>::max() - reinterpret_cast<uintptr_t>(pData_)))
+ {
+ throw Error(59);
+ }
+ if (pData_ + static_cast<uintptr_t>(baseOffset()) + static_cast<uintptr_t>(offset) > pLast_) {
+ throw Error(58);
+ }
pData = const_cast<byte*>(pData_) + baseOffset() + offset;
+
+ // check for size being invalid
if (size > static_cast<uint32_t>(pLast_ - pData)) {
#ifndef SUPPRESS_WARNINGS
EXV_ERROR << "Upper boundary of data for "
@@ -1536,7 +1554,9 @@ namespace Exiv2 {
}
}
Value::AutoPtr v = Value::create(typeId);
- assert(v.get());
+ if (!v.get()) {
+ throw Error(58);
+ }
if ( !isize ) {
v->read(pData, size, byteOrder());
} else {
diff --git a/test/bugfixes-test.sh b/test/bugfixes-test.sh
index f91c6759..b10b839e 100755
--- a/test/bugfixes-test.sh
+++ b/test/bugfixes-test.sh
@@ -602,6 +602,7 @@ source ./functions.source
runTest exiv2 -pX $filename | xmllint --format -
num=1231
+ printf "$num " >&3
for X in a b; do
filename=exiv2-bug$num$X.jpg
echo '------>' Bug $filename '<-------' >&2
@@ -622,6 +623,7 @@ source ./functions.source
runTest exiv2 -pa $filename
num=1252
+ printf "$num " >&3
for X in a b; do
filename=exiv2-bug$num$X.exv
echo '------>' Bug $filename '<-------' >&2
@@ -629,6 +631,118 @@ source ./functions.source
runTest exiv2 -pa --grep lens/i $filename
done
+ num=1305
+ printf "$num " >&3
+ filename=IMGP0006-min.jpg
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g55
+ printf "$num " >&3
+ filename=POC8
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g57
+ printf "$num " >&3
+ filename=POC
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g79
+ printf "$num " >&3
+ filename=POC2
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g52
+ printf "$num " >&3
+ filename=POC5
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g51
+ printf "$num " >&3
+ filename=POC4
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g50
+ printf "$num " >&3
+ filename=POC3
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g53
+ printf "$num " >&3
+ filename=POC6
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g54
+ printf "$num " >&3
+ filename=POC9
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g58
+ printf "$num " >&3
+ filename=POC11
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g59
+ printf "$num " >&3
+ filename=POC12
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g60
+ printf "$num " >&3
+ filename=POC13
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g71
+ printf "$num " >&3
+ filename=003-heap-buffer-over
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g73
+ printf "$num " >&3
+ filename=02-Invalid-mem-def
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g74
+ printf "$num " >&3
+ filename=005-invalid-mem
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
+ num=g75
+ printf "$num " >&3
+ filename=008-invalid-mem
+ echo '------>' Bug $filename '<-------' >&2
+ copyTestFile $filename
+ runTest exiv2 $filename
+
) 3>&1 > $results 2>&1
printf "\n"
diff --git a/test/data/003-heap-buffer-over b/test/data/003-heap-buffer-over
new file mode 100644
index 00000000..2c490f60
Binary files /dev/null and b/test/data/003-heap-buffer-over differ
diff --git a/test/data/005-invalid-mem b/test/data/005-invalid-mem
new file mode 100644
index 00000000..40f0a393
Binary files /dev/null and b/test/data/005-invalid-mem differ
diff --git a/test/data/008-invalid-mem b/test/data/008-invalid-mem
new file mode 100644
index 00000000..8397f174
Binary files /dev/null and b/test/data/008-invalid-mem differ
diff --git a/test/data/02-Invalid-mem-def b/test/data/02-Invalid-mem-def
new file mode 100644
index 00000000..e506eea4
Binary files /dev/null and b/test/data/02-Invalid-mem-def differ
diff --git a/test/data/IMGP0006-min.jpg b/test/data/IMGP0006-min.jpg
new file mode 100644
index 00000000..e2cd107b
Binary files /dev/null and b/test/data/IMGP0006-min.jpg differ
diff --git a/test/data/POC b/test/data/POC
new file mode 100755
index 00000000..c697bf39
Binary files /dev/null and b/test/data/POC differ
diff --git a/test/data/POC11 b/test/data/POC11
new file mode 100644
index 00000000..b7b8a24b
Binary files /dev/null and b/test/data/POC11 differ
diff --git a/test/data/POC12 b/test/data/POC12
new file mode 100644
index 00000000..1355fccf
Binary files /dev/null and b/test/data/POC12 differ
diff --git a/test/data/POC13 b/test/data/POC13
new file mode 100644
index 00000000..fed558f6
Binary files /dev/null and b/test/data/POC13 differ
diff --git a/test/data/POC2 b/test/data/POC2
new file mode 100755
index 00000000..a49d49c1
Binary files /dev/null and b/test/data/POC2 differ
diff --git a/test/data/POC3 b/test/data/POC3
new file mode 100644
index 00000000..70eb1960
Binary files /dev/null and b/test/data/POC3 differ
diff --git a/test/data/POC4 b/test/data/POC4
new file mode 100644
index 00000000..7cb73902
Binary files /dev/null and b/test/data/POC4 differ
diff --git a/test/data/POC5 b/test/data/POC5
new file mode 100644
index 00000000..1eabee74
Binary files /dev/null and b/test/data/POC5 differ
diff --git a/test/data/POC6 b/test/data/POC6
new file mode 100644
index 00000000..04a43ede
Binary files /dev/null and b/test/data/POC6 differ
diff --git a/test/data/POC8 b/test/data/POC8
new file mode 100755
index 00000000..8a1c03b9
Binary files /dev/null and b/test/data/POC8 differ
diff --git a/test/data/POC9 b/test/data/POC9
new file mode 100644
index 00000000..e45c270f
Binary files /dev/null and b/test/data/POC9 differ
diff --git a/test/data/bugfixes-test.out b/test/data/bugfixes-test.out
index d8754025..2192fa1f 100644
Binary files a/test/data/bugfixes-test.out and b/test/data/bugfixes-test.out differ
diff --git a/test/data/issue_170_poc b/test/data/issue_170_poc
new file mode 100644
index 00000000..439b7687
Binary files /dev/null and b/test/data/issue_170_poc differ
diff --git a/test/data/issue_187 b/test/data/issue_187
new file mode 100644
index 00000000..3e05cc9e
Binary files /dev/null and b/test/data/issue_187 differ
diff --git a/test/data/poc_2017-12-12_issue188 b/test/data/poc_2017-12-12_issue188
new file mode 100644
index 00000000..6f91c3e9
Binary files /dev/null and b/test/data/poc_2017-12-12_issue188 differ
diff --git a/test/functions.source b/test/functions.source
index 38c9a8d0..cc8d67ff 100644
--- a/test/functions.source
+++ b/test/functions.source
@@ -109,6 +109,7 @@ reportTest()
echo "all testcases passed."
else
diff $diffargs $lhs $rhs
+ exit 3
fi
}
diff --git a/test/tiff-test.sh b/test/tiff-test.sh
old mode 100644
new mode 100755
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 00000000..40a96afc
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-
diff --git a/tests/bugfixes/__init__.py b/tests/bugfixes/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/bugfixes/github/__init__.py b/tests/bugfixes/github/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/bugfixes/github/test_CVE_2017_17669.py b/tests/bugfixes/github/test_CVE_2017_17669.py
new file mode 100644
index 00000000..803bb92a
--- /dev/null
+++ b/tests/bugfixes/github/test_CVE_2017_17669.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+import system_tests
+
+
+class RunPocFile(system_tests.Case):
+
+ filename = "{data_path}/issue_187"
+ commands = ["{exiv2} " + filename]
+ retval = [1]
+ stdout = [""]
+ stderr = [
+ """{exiv2_exception_msg} """ + filename + """:
+{error_14_message}
+"""
+ ]
diff --git a/tests/bugfixes/github/test_CVE_2017_17725.py b/tests/bugfixes/github/test_CVE_2017_17725.py
new file mode 100644
index 00000000..8273a49a
--- /dev/null
+++ b/tests/bugfixes/github/test_CVE_2017_17725.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+import system_tests
+
+
+class TestCvePoC(system_tests.Case):
+
+ url = "https://github.com/Exiv2/exiv2/issues/188"
+ found_by = ["Wei You", "@youwei1988"]
+
+ filename = "{data_path}/poc_2017-12-12_issue188"
+ commands = ["{exiv2} " + filename]
+ stdout = [""]
+ stderr = ["""std::overflow_error exception in print action for file """ + filename + """:
+Overflow in addition
+"""]
+ retval = [1]
diff --git a/tests/bugfixes/github/test_issue_170.py b/tests/bugfixes/github/test_issue_170.py
new file mode 100644
index 00000000..77bc7b1b
--- /dev/null
+++ b/tests/bugfixes/github/test_issue_170.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+import system_tests
+
+
+class decodeIHDRChunkOutOfBoundsRead(system_tests.Case):
+
+ url = "https://github.com/Exiv2/exiv2/issues/170"
+
+ filename = "{data_path}/issue_170_poc"
+
+ commands = ["{exiv2} " + filename]
+ stdout = [""]
+ stderr = ["""{exiv2_exception_msg} """ + filename + """:
+{error_14_message}
+"""]
+ retval = [1]
diff --git a/tests/doc.md b/tests/doc.md
new file mode 100644
index 00000000..ad31bd57
--- /dev/null
+++ b/tests/doc.md
@@ -0,0 +1,426 @@
+# TL;DR
+
+If you just want to write a simple test case, check out the file
+`writing_tests.md`.
+
+# Introduction
+
+This test suite is intended for system tests, i.e. for running a binary with
+certain parameters and comparing the output against an expected value. This is
+especially useful for a regression test suite, but can be also used for testing
+of new features where unit testing is not feasible, e.g. to test new command
+line parameters.
+
+The test suite is configured via `INI` style files using Python's builtin
+[ConfigParser](https://docs.python.org/3/library/configparser.html)
+module. Such a configuration file looks roughly like this:
+``` ini
+[DEFAULT]
+some_var: some_val
+
+[section 1]
+empty_var:
+multiline_var: this is a multiline string
+ as long as the indentation
+ is present
+# comments can be inserted
+# some_var is implicitly present in this section by the DEFAULT section
+
+[section 2]
+# set some_var for this section to something else than the default
+some_var: some_other_val
+# values from other sections can be inserted
+vars can have whitespaces: ${some_var} ${section 1: multiline var}
+multiline var: multiline variables can have
+
+ empty lines too
+```
+
+For further details concerning the syntax, please consult the official
+documentation. The `ConfigParser` module is used with the following defaults:
+- Comments are started by `#` only
+- The separator between a variable and the value is `:`
+- Multiline comments can have empty lines
+- Extended Interpolation is used (this allows to refer to other sections when
+ inserting values using the `${section:variable}` syntax)
+
+Please keep in mind that leading and trailing whitespaces are **stripped** from
+strings when extracting variable values. So this:
+
+``` ini
+some_var: some value with whitespaces before and after
+```
+is equivalent to this:
+``` ini
+some_var:some value with whitespaces before and after
+```
+
+The test suite itself uses the builtin `unittest` module of Python to discover
+and run the individual test cases. The test cases themselves are implemented in
+Python source files, but the required Python knowledge is minimal.
+
+## Test suite
+
+The test suite is configured via one configuration file whose location
+automatically sets the root directory of the test suite. The `unittest` module
+then recursively searches all sub-directories with a `__init__.py` file for
+files of the form `test_*.py`, which it automatically interprets as test cases
+(more about these in the next section). Python will automatically interpret each
+directory as a module and use this to format the output, e.g. the test case
+`regression/crashes/test_bug_15.py` will be interpreted as the module
+`regression.crashes.test_bug_15`. Thus one can use the directory structure to
+group test cases.
+
+The test suite's configuration file should have the following form:
+
+``` ini
+[General]
+timeout: 0.1
+
+[paths]
+binary: ../build/bin/binary
+important_file: ../conf/main.cfg
+
+[variables]
+abort_error: ERROR
+abort_exit value: 1
+```
+
+The General section only contains the `timeout` parameter, which is actually
+optional (when left out 1.0 is assumed). The timeout sets the maximum time in
+seconds for each command that is run before it is aborted. This allows for test
+driven development with tests that cause infinite loops or similar hangs in the
+test suite.
+
+The paths and variables sections define global variables for the system test
+suite, which every test case can read. Following the DRY principle, one can put
+common outputs of the tested binary in a variable, so that changing an error
+message does not result in an hour long update of the test suite. Both sections
+are merged together before being passed on to the test cases, thus they must not
+contain variables with the same name (doing so results in an error).
+
+While the values in the variables section are simply passed on to the test cases
+the paths section is special as its contents are interpreted as relative paths
+(with respect to the test suite's root) and are expanded to absolute paths
+before being passed to the test cases. This can be used to inform each test case
+about the location of a built binary or a configuration file without having to
+rely on environment variables.
+
+However, sometimes environment variables are very handy to implement variable
+paths or platform differences (like different build directories or file
+extensions). For this, the test suite supports the `ENV` and `ENV fallback`
+sections. In conjunction with the extended interpolation of the `ConfigParser`
+module, these can be quite useful. Consider the following example:
+
+``` ini
+[General]
+timeout: 0.1
+
+[ENV]
+variable_prefix: PREFIX
+file_extension: FILE_EXT
+
+[ENV fallback]
+variable_prefix: ../build
+
+[paths]
+binary: ${ENV:variable_prefix}/bin/binary${ENV:file_extension}
+important_file: ../conf/main.cfg
+
+[variables]
+abort_error: ERROR
+abort_exit value: 1
+```
+
+The `ENV` section is, similarly to the `paths` section, special insofar as the
+variables are extracted from the environment with the given name. E.g. the
+variable `file_extension` would be set to the value of the environment variable
+`FILE_EXT`. If the environment variable is not defined, then the test suite will
+look in the `ENV fallback` section for a fallback. E.g. in the above example
+`variable_prefix` has the fallback or default value of `../build` which will be
+used if the environment variable `PREFIX` is not set. If no fallback is provided
+then an empty string is used instead, which would happen to `file_extension` if
+`FILE_EXT` would be unset.
+
+This can be combined with the extended interpolation of Python's `ConfigParser`,
+which allows to include variables from arbitrary sections into other variables
+using the `${sect:var_name}` syntax. This would be expanded to the value of
+`var_name` from the section `sect`. The above example only utilizes this in the
+`paths` section, but it can also be used in the `variables` section, if that
+makes sense for the use case.
+
+Returning to the example config file, the path `binary` would be inferred in the
+following steps:
+1. extract `PREFIX` & `FILE_EXT` from the environment, if they don't exist use
+ the default values from `ENV fallback` or ""
+2. substitute the strings `${ENV:variable_prefix}` and `${ENV:file_extension}`
+3. expand the relative path to an absolute path
+
+Please note that while the `INI` file allows for variables with whitespaces or
+`-` in their names, such variables will cause errors as they are invalid
+variable names in Python.
+
+
+## Test cases
+
+The test cases are defined in Python source files utilizing the unittest module,
+thus every file must also be a valid Python file. Each file defining a test case
+must start with `test_` and have the file extension `py`. To be discovered by
+the unittest module it must reside in a directory with a (empty) `__init__.py`
+file.
+
+A test case should test one logical unit, e.g. test for regressions of a certain
+bug or check if a command line option works. Each test case can run multiple
+commands which results are compared to an expected standard output, standard
+error and return value. Should differences arise or should one of the commands
+take too long, then an error message with the exact differences is shown to the
+user.
+
+An example test case file would look like this:
+
+``` python
+# -*- coding: utf-8 -*-
+
+import system_tests
+
+
+class AnInformativeName(system_tests.Case):
+
+ filename = "invalid_input_file"
+ commands = [
+ "{binary} -c {import_file} -i {filename}"
+ ]
+ retval = ["{abort_exit_value}"]
+ stdout = ["Reading {filename}"]
+ stderr = [
+ """{abort_error}
+error in {filename}
+"""
+ ]
+```
+
+The first 6 lines are necessary boilerplate to pull in the necessary routines to
+run the actual tests (these are implemented in the module `system_tests` with
+the class `system_tests.Case` extending `unittest.TestCase`). When adding new
+tests one should choose a new class name that briefly summarizes the test. Note
+that the file name (without the extension) with the directory structure is
+interpreted as the module by Python and pre-pended to the class name when
+reporting about the tests. E.g. the file `regression/crashes/test_bug_15.py`
+with the class `OutOfBoundsRead` gets reported as
+`regression.crashes.test_bug_15.OutOfBoundsRead** already including a brief
+summary of this test.
+
+**Caution:** Always import `system_tests` in the aforementioned syntax and don't
+use `from system_tests import Case`. This will not work, as the `system_tests`
+module stores the suite's config internally which will not be available if you
+perform a `from system_tests import Case` (this causes Python to create a copy
+of the class `system_tests.Case` for your module, without reading the
+configuration file).
+
+In the following lines the lists `commands`, `retval`, `stdout` and `stderr`
+should be defined. These are lists of strings and must all have the same amount
+of elements.
+
+The test suite at first takes all these strings and substitutes all values in
+curly braces with variables either defined in this class alongside (like
+`filename` in the above example) or with the values defined in the test suite's
+configuration file. Please note that defining a variable with the same name as a
+variable in the suite's configuration file will result in an error (otherwise
+one of the variables would take precedence leading to unexpected results). The
+substitution of values in performed using Python's string `format()` method and
+more elaborate format strings can be used when necessary.
+
+In the above example the command would thus expand to:
+``` shell
+/path/to/the/dir/build/bin/binary -c /path/to/the/dir/conf/main.cfg -i invalid_input_file
+```
+and similarly for `stdout` and `stderr`.
+
+Once the substitution is performed, each command is run using Python's
+`subprocess` module, its output is compared to the values in `stdout` and
+`stderr` and its return value to `retval`. Please note that for portability
+reasons the subprocess module is run with `shell=False`, thus shell expansions
+or pipes will not work.
+
+As the test cases are implemented in Python, one can take full advantage of
+Python for the construction of the necessary lists. For example when 10 commands
+should be run and all return 0, one can write `retval = 10 * [0]` instead of
+writing 0 ten times. The same is of course possible for strings.
+
+There are however some peculiarities with multiline strings in Python. Normal
+strings start and end with a single `"` but multiline strings start with three
+`"`. Also, while the variable names must be indented, new lines in multiline
+strings must not or additional whitespaces will be added. E.g.:
+
+``` python
+ stderr = [
+ """something
+ else"""
+ ]
+```
+will actually result in the string:
+
+```
+something
+ else
+```
+and not:
+```
+something
+else
+```
+as the indentation might have suggested.
+
+Also note that in this example the string will not be terminated with a newline
+character. To achieve that put the `"""` on the following line.
+
+
+## Advanced test cases
+
+This section describes more advanced features that are probably not necessary
+the "standard" usage of the test suite.
+
+
+### Creating file copies
+
+For tests that modify their input file it is useful to run these with a
+disposable copy of the input file and not with the original. For this purpose
+the test suite features a decorator which creates a copy of the supplied files
+and deletes the copies after the test ran.
+
+Example:
+``` python
+# -*- coding: utf-8 -*-
+
+import system_tests
+
+
+@system_tests.CopyFiles("{filename}", "{some_path}/another_file.txt")
+class AnInformativeName(system_tests.Case):
+
+ filename = "invalid_input_file"
+ commands = [
+ "{binary} -c {import_file} -i {filename}"
+ ]
+ retval = ["{abort_exit_value}"]
+ stdout = ["Reading {filename}"]
+ stderr = [
+ """{abort_error}
+error in {filename}
+"""
+ ]
+```
+
+In this example, the test suite would automatically create a copy of the files
+`invalid_input_file` and `{some_path}/another_file.txt` (`some_path` would be of
+course expanded too) named `invalid_input_file_copy` and
+`{some_path}/another_file_copy.txt`. After the test ran, the copies are
+deleted. Please note that variable expansion in the filenames is possible.
+
+
+### Customizing the output check
+
+Some tests do not require a "brute-force" comparison of the whole output of a
+program but only a very simple check (e.g. that a string is present). For these
+cases, one can customize how stdout and stderr checked for errors.
+
+The `system_tests.Case` class has two public functions for the check of stdout &
+stderr: `compare_stdout` & `compare_stderr`. They have the following interface:
+``` python
+compare_stdout(self, i, command, got_stdout, expected_stdout)
+compare_stderr(self, i, command, got_stderr, expected_stderr)
+```
+with the parameters:
+- i: index of the command in the `commands` list
+- command: a string of the actually invoked command
+- got_stdout/stderr: the obtained stdout, post-processed depending on the
+ platform so that lines always end with `\n`
+- expected_stdout/stderr: the expected output extracted from
+ `self.stdout`/`self.stderr`
+
+These functions can be overridden in child classes to perform custom checks (or
+to omit them completely, too). Please however note, that it is not possible to
+customize how the return value is checked. This is indented, as the return value
+is often used by the OS to indicate segfaults and ignoring it (in combination
+with flawed checks of the output) could lead to crashes not being noticed.
+
+
+### Manually expanding variables in strings
+
+In case completely custom checks have to be run but one still wants to access
+the variables from the test suite, the class `system_test.Case` provides the
+function `expand_variables(self, string)`. It performs the previously described
+variable substitution using the test suite's configuration file.
+
+Unfortunately, it has to run in a class member function. The `setUp()` function
+can be used for this, as it is run before each test. For example like this:
+``` python
+class SomeName(system_tests.Case):
+
+ def setUp(self):
+ self.commands = [self.expand_variables("{some_var}/foo.txt")]
+ self.stderr = [""]
+ self.stdout = [self.expand_variables("{success_message}")]
+ self.retval = [0]
+```
+
+This example will work, as the test runner reads the data for `commands`,
+`stderr`, `stdout` and `retval` from the class instance. What however will not
+work is creating a new member in `setUp()` and trying to use it as a variable
+for expansion, like this:
+``` python
+class SomeName(system_tests.Case):
+
+ def setUp(self):
+ self.new_var = "foo"
+ self.another_string = self.expand_variables("{new_var}")
+```
+
+This example fails in `self.expand_variables` because the expansion uses only
+static class members (which `new_var` is not). Also, if you modify a static
+class member in `setUp()` the changed version will **not** be used for variable
+expansion, as the variables are saved in a new dictionary **before** `setUp()`
+runs. Thus this:
+``` python
+class SomeName(system_tests.Case):
+
+ new_var = "foo"
+
+ def setUp(self):
+ self.new_var = "bar"
+ self.another_string = self.expand_variables("{new_var}")
+```
+
+will result in `another_string` being "foo" and not "bar".
+
+
+### Possible pitfalls
+
+- Do not provide a custom `setUpClass()` function for the test
+ cases. `setUpClass()` is used by `system_tests.Case` to store the variables
+ for expansion.
+
+- Keep in mind that the variable expansion uses Python's `format()`
+ function. This can make it more cumbersome to include formatted strings into
+ variables like `commands` which will likely contain other variables from the
+ test suite. E.g.: `commands = ["{binary} {:s}".format(f) for f in files]` will
+ not work as `format()` will expect a value for binary. This can be worked
+ around using either the old Python formatting via `%` or by formatting first
+ and then concatenating the problematic parts.
+
+
+## Running the test suite
+
+The test suite is written for Python 3 but is in principle also compatible with
+Python 2, albeit it is not regularly tested, so its functionality is not
+guaranteed with Python 2.
+
+Then navigate to the `tests/` subdirectory and run:
+``` shell
+python3 runner.py
+```
+
+The runner script also supports the optional arguments `--config_file` which
+allows to provide a different test suite configuration file than the default
+`suite.conf`. It also forwards the verbosity setting via the `-v`/`--verbose`
+flags to Python's unittest module.
diff --git a/tests/runner.py b/tests/runner.py
new file mode 100644
index 00000000..745dcebd
--- /dev/null
+++ b/tests/runner.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+
+if __name__ == '__main__':
+
+ import argparse
+ import os
+ import unittest
+ import sys
+
+ import system_tests
+
+ parser = argparse.ArgumentParser(description="The system test suite")
+
+ parser.add_argument(
+ "--config_file",
+ type=str,
+ nargs=1,
+ default=['suite.conf']
+ )
+ parser.add_argument(
+ "--verbose", "-v",
+ action='count',
+ default=1
+ )
+ args = parser.parse_args()
+ conf_file = args.config_file[0]
+ discovery_root = os.path.dirname(conf_file)
+
+ system_tests.configure_suite(conf_file)
+
+ discovered_tests = unittest.TestLoader().discover(discovery_root)
+ test_res = unittest.runner.TextTestRunner(verbosity=args.verbose)\
+ .run(discovered_tests)
+
+ sys.exit(0 if len(test_res.failures) + len(test_res.errors) == 0 else 1)
diff --git a/tests/suite.conf b/tests/suite.conf
new file mode 100644
index 00000000..134b1471
--- /dev/null
+++ b/tests/suite.conf
@@ -0,0 +1,21 @@
+[General]
+timeout: 1
+
+[ENV]
+exiv2_path: EXIV2_PATH
+binary_extension: EXIV2_EXT
+
+[ENV fallback]
+exiv2_path: ../build/bin
+
+[paths]
+exiv2: ${ENV:exiv2_path}/exiv2${ENV:binary_extension}
+exiv2json: ${ENV:exiv2_path}/exiv2json${ENV:binary_extension}
+data_path: ../test/data
+tiff-test: ${ENV:exiv2_path}/tiff-test${ENV:binary_extension}
+
+[variables]
+error_14_message: Failed to read image data
+error_58_message: corrupted image metadata
+error_57_message: invalid memory allocation request
+exiv2_exception_msg: Exiv2 exception in print action for file
diff --git a/tests/system_tests.py b/tests/system_tests.py
new file mode 100644
index 00000000..e22c78e5
--- /dev/null
+++ b/tests/system_tests.py
@@ -0,0 +1,331 @@
+# -*- coding: utf-8 -*-
+
+import configparser
+import os
+import inspect
+import subprocess
+import threading
+import shlex
+import sys
+import shutil
+import unittest
+
+
+if sys.platform == 'win32':
+ def _cmd_splitter(cmd):
+ return cmd
+
+ def _process_output_post(output):
+ return output.replace('\r\n', '\n')
+
+else:
+ def _cmd_splitter(cmd):
+ return shlex.split(cmd)
+
+ def _process_output_post(output):
+ return output
+
+
+def _disjoint_dict_merge(d1, d2):
+ """
+ Merges two dictionaries with no common keys together and returns the result.
+
+ >>> d1 = {"a": 1}
+ >>> d2 = {"b": 2, "c": 3}
+ >>> _disjoint_dict_merge(d1, d2) == {"a": 1, "b": 2, "c": 3}
+ True
+
+ Calling this function with dictionaries that share keys raises a ValueError:
+ >>> _disjoint_dict_merge({"a": 1, "b": 6}, {"b": 2, "a": 3})
+ Traceback (most recent call last):
+ ..
+ ValueError: Dictionaries have common keys.
+
+ """
+ inter = set(d1.keys()).intersection(set(d2.keys()))
+ if len(inter) > 0:
+ raise ValueError("Dictionaries have common keys.")
+ res = d1.copy()
+ res.update(d2)
+ return res
+
+
+_parameters = {}
+
+
+def configure_suite(config_file):
+ """
+ Populates a global datastructure with the parameters from the suite's
+ configuration file.
+
+ This function performs the following steps:
+ 1. read in the file ``config_file`` via the ConfigParser module using
+ extended interpolation
+ 2. check that the sections ``variables`` and ``paths`` are disjoint
+ 3. extract the environment variables given in the ``ENV`` section
+ 4. save all entries from the ``variables`` section in the global
+ datastructure
+ 5. interpret all entries in the ``paths`` section as relative paths from the
+ configuration file, expand them to absolute paths and save them in the
+ global datastructure
+
+ For further information concerning the rationale behind this, please consult
+ the documentation in ``doc.md``.
+ """
+
+ if not os.path.exists(config_file):
+ raise ValueError(
+ "Test suite config file {:s} does not exist"
+ .format(os.path.abspath(config_file))
+ )
+
+ config = configparser.ConfigParser(
+ interpolation=configparser.ExtendedInterpolation(),
+ delimiters=(':'),
+ comment_prefixes=('#')
+ )
+ config.read(config_file)
+
+ _parameters["suite_root"] = os.path.split(os.path.abspath(config_file))[0]
+ _parameters["timeout"] = config.getfloat("General", "timeout", fallback=1.0)
+
+ if 'variables' in config and 'paths' in config:
+ intersecting_keys = set(config["paths"].keys())\
+ .intersection(set(config["variables"].keys()))
+ if len(intersecting_keys) > 0:
+ raise ValueError(
+ "The sections 'paths' and 'variables' must not share keys, "
+ "but they have the following common key{:s}: {:s}"
+ .format(
+ 's' if len(intersecting_keys) > 1 else '',
+ ', '.join(k for k in intersecting_keys)
+ )
+ )
+
+ # extract variables from the environment
+ for key in config['ENV']:
+ if key in config['ENV fallback']:
+ fallback = config['ENV fallback'][key]
+ else:
+ fallback = ""
+ config['ENV'][key] = os.getenv(config['ENV'][key]) or fallback
+
+ if 'variables' in config:
+ for key in config['variables']:
+ _parameters[key] = config['variables'][key]
+
+ if 'paths' in config:
+ for key in config['paths']:
+ rel_path = config['paths'][key]
+ abs_path = os.path.abspath(
+ os.path.join(_parameters["suite_root"], rel_path)
+ )
+ if not os.path.exists(abs_path):
+ raise ValueError(
+ "Path replacement for {short}: {abspath} does not exist"
+ " (was expanded from {rel})".format(
+ short=key,
+ abspath=abs_path,
+ rel=rel_path)
+ )
+ _parameters[key] = abs_path
+
+
+def _setUp_factory(old_setUp, *files):
+ """
+ Factory function that returns a setUp function suitable to replace the
+ existing setUp of a unittest.TestCase. The returned setUp calls at first
+ old_setUp(self) and then creates a copy of all files in *files with the
+ name: fname.ext -> fname_copy.ext
+
+ All file names in *files are at first expanded using self.expand_variables()
+ and the path to the copy is saved in self._file_copies
+ """
+ def setUp(self):
+ old_setUp(self)
+ self._file_copies = []
+ for f in files:
+ expanded_fname = self.expand_variables(f)
+ fname, ext = os.path.splitext(expanded_fname)
+ new_name = fname + '_copy' + ext
+ self._file_copies.append(
+ shutil.copyfile(expanded_fname, new_name)
+ )
+ return setUp
+
+
+def _tearDown_factory(old_tearDown):
+ """
+ Factory function that returns a new tearDown method to replace an existing
+ tearDown method. It at first deletes all files in self._file_copies and then
+ calls old_tearDown(self).
+ This factory is intended to be used in conjunction with _setUp_factory
+ """
+ def tearDown(self):
+ for f in self._file_copies:
+ os.remove(f)
+ old_tearDown(self)
+ return tearDown
+
+
+def CopyFiles(*files):
+ """
+ Decorator for subclasses of system_test.Case that automatically creates a
+ copy of the files specified as the parameters to the decorator.
+
+ Example:
+ >>> @CopyFiles("{some_var}/file.txt", "{another_var}/other_file.png")
+ class Foo(Case):
+ pass
+
+ The decorator will inject new setUp method that at first calls the already
+ defined setUp(), then expands all supplied file names using
+ Case.expand_variables and then creates copies by appending '_copy' before
+ the file extension. The paths to the copies are stored in self._file_copies.
+
+ The decorator also injects a new tearDown method that deletes all files in
+ self._file_copies and then calls the original tearDown method.
+
+ This function will also complain if it is called without arguments or
+ without paranthesis, which is valid decorator syntax but is obviously a bug
+ in this case.
+ """
+ if len(files) == 0:
+ raise ValueError("No files to copy supplied.")
+ elif len(files) == 1:
+ if isinstance(files[0], type):
+ raise UserWarning(
+ "Decorator used wrongly, must be called with filenames in paranthesis"
+ )
+
+ def wrapper(cls):
+ old_setUp = cls.setUp
+ cls.setUp = _setUp_factory(old_setUp, *files)
+
+ old_tearDown = cls.tearDown
+ cls.tearDown = _tearDown_factory(old_tearDown)
+
+ return cls
+
+ return wrapper
+
+
+class Case(unittest.TestCase):
+ """
+ System test case base class, provides the functionality to interpret static
+ class members as system tests and runs them.
+
+ This class reads in the members commands, retval, stdout, stderr and runs
+ the format function on each, where format is called with the kwargs being a
+ merged dictionary of all variables that were extracted from the suite's
+ configuration file and all static members of the current class.
+
+ The resulting commands are then run using the subprocess module and compared
+ against the expected values that were provided in the static
+ members. Furthermore a threading.Timer is used to abort the execution if a
+ configured timeout is reached.
+
+ The class itself must be inherited from, otherwise it is not useful at all,
+ as it does not provide any static members that could be used to run system
+ tests. However, a class that inherits from this class needn't provide any
+ member functions at all, the inherited test_run() function performs all
+ required functionality in child classes.
+ """
+
+ """ maxDiff set so that arbitrarily large diffs will be shown """
+ maxDiff = None
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ This function adds the variables variable_dict & work_dir to the class.
+
+ work_dir - set to the file where the current class is defined
+ variable_dict - a merged dictionary of all static members of the current
+ class and all variables extracted from the suite's
+ configuration file
+ """
+ cls.variable_dict = _disjoint_dict_merge(cls.__dict__, _parameters)
+ cls.work_dir = os.path.dirname(inspect.getfile(cls))
+
+ def compare_stdout(self, i, command, got_stdout, expected_stdout):
+ """
+ Function to compare whether the expected & obtained stdout match.
+
+ This function is automatically invoked by test_run with the following
+ parameters:
+ i - the index of the current command that is run in self.commands
+ command - the command that was run
+ got_stdout - the obtained stdout, post-processed depending on the
+ platform so that lines always end with \n
+ expected_stdout - the expected stdout extracted from self.stdout
+
+ The default implementation simply uses assertMultiLineEqual from
+ unittest.TestCase. This function can be overridden in a child class to
+ implement a custom check.
+ """
+ self.assertMultiLineEqual(expected_stdout, got_stdout)
+
+ def compare_stderr(self, i, command, got_stderr, expected_stderr):
+ """
+ Same as compare_stdout only for standard-error.
+ """
+ self.assertMultiLineEqual(expected_stderr, got_stderr)
+
+ def expand_variables(self, string):
+ """
+ Expands all variables in curly braces in the given string using the
+ dictionary variable_dict.
+
+ The expansion itself is performed by the builtin string method format().
+ A KeyError indicates that the supplied string contains a variable
+ in curly braces that is missing from self.variable_dict
+ """
+ return str(string).format(**self.variable_dict)
+
+ def test_run(self):
+ """
+ Actual system test function which runs the provided commands,
+ pre-processes all variables and post processes the output before passing
+ it on to compare_stderr() & compare_stdout().
+ """
+
+ for i, command, retval, stdout, stderr in zip(range(len(self.commands)),
+ self.commands,
+ self.retval,
+ self.stdout,
+ self.stderr):
+ command, retval, stdout, stderr = map(
+ self.expand_variables, [command, retval, stdout, stderr]
+ )
+ retval = int(retval)
+ timeout = {"flag": False}
+
+ proc = subprocess.Popen(
+ _cmd_splitter(command),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=self.work_dir
+ )
+
+ def timeout_reached(timeout):
+ timeout["flag"] = True
+ proc.kill()
+
+ t = threading.Timer(
+ _parameters["timeout"], timeout_reached, args=[timeout]
+ )
+ t.start()
+ got_stdout, got_stderr = proc.communicate()
+ t.cancel()
+
+ self.assertFalse(timeout["flag"] and "Timeout reached")
+ self.compare_stdout(
+ i, command,
+ _process_output_post(got_stdout.decode('utf-8')), stdout
+ )
+ self.compare_stderr(
+ i, command,
+ _process_output_post(got_stderr.decode('utf-8')), stderr
+ )
+ self.assertEqual(retval, proc.returncode)
diff --git a/tests/writing_tests.md b/tests/writing_tests.md
new file mode 100644
index 00000000..e3e9cfe9
--- /dev/null
+++ b/tests/writing_tests.md
@@ -0,0 +1,42 @@
+## Writing new tests
+
+The test suite is intended to run a binary and compare its standard output,
+standard error and return value against provided values. This is implemented
+using Python's `unittest` module and thus all test files are Python files.
+
+The simplest test has the following structure:
+``` python
+# -*- coding: utf-8 -*-
+
+import system_tests
+
+
+class GoodTestName(system_tests.Case):
+
+ filename = "{data_path}/test_file"
+ commands = ["{exiv2} " + filename, "{exiv2} " + filename + '_2']
+ stdout = [""] * 2
+ stderr = ["""{exiv2_exception_msg} """ + filename + """:
+{error_58_message}
+"""] * 2
+ retval = [1] * 2
+```
+
+The test suite will run the provided commands in `commands` and compare them to
+the output in `stdout` and `stderr` and it will compare the return values.
+
+The strings in curly braces are variables either defined in this test's class or
+are taken from the suite's configuration file (see `doc.md` for a complete
+explanation).
+
+When creating new tests, follow roughly these steps:
+
+1. Choose an appropriate subdirectory where the test belongs. If none fits
+ create a new one and put an empty `__init__.py` file there.
+
+2. Create a new file with a name matching `test_*.py`. Copy the class definition
+ from the above example and choose an appropriate class name.
+
+3. Run the test suite via `python3 runner.py` and ensure that your test case is
+ actually run! Either run the suite with the `-v` option which will output all
+ test cases that were run or simply add an error and check if errors occur.
diff --git a/xmpsdk/CMakeLists.txt b/xmpsdk/CMakeLists.txt
index f081d46e..0059a255 100644
--- a/xmpsdk/CMakeLists.txt
+++ b/xmpsdk/CMakeLists.txt
@@ -37,7 +37,10 @@ IF( EXIV2_ENABLE_XMP AND EXIV2_ENABLE_LIBXMP )
TARGET_LINK_LIBRARIES(xmp Threads::Threads ${EXPAT_LIBRARIES})
endif()
# 1119 Install libxmp.a for use by third party applications (Thanks, Emmanuel)
- INSTALL(TARGETS xmp ${INSTALL_TARGET_STANDARD_ARGS} )
+ INSTALL(TARGETS xmp
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+ ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
+ )
ENDIF()
# That's all Folks!