diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..e82fb5df7 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,17 @@ +Dockerfile +tests/test_data +SuperBuild/build +SuperBuild/download +SuperBuild/install +SuperBuild/src +build +opensfm +pmvs +odm_orthophoto +odm_texturing +odm_meshing +odm_georeferencing +images_resize +.git + + diff --git a/.gitignore b/.gitignore index e69de29bb..460fe7037 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1,22 @@ +*~ +bin/ +include/ +lib/ +logs/ +share/ +src/ +download/ + +SuperBuild/build/ +SuperBuild/install/ +build/ + +cmvs.tar.gz +parallel.tar.bz2 +LAStools.zip +pcl.tar.gz +ceres-solver.tar.gz +*.pyc +opencv.zip +settings.yaml +docker.settings.yaml \ No newline at end of file diff --git a/README b/.gitmodules similarity index 100% rename from README rename to .gitmodules diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..a8a441d8c --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,18 @@ +cmake_minimum_required(VERSION 2.8) + +project(OpenDroneMap C CXX) + +# TODO(edgar): add option in order to point to CMAKE_PREFIX_PATH +# if we want to build SuperBuild in an external directory. +# It is assumed that SuperBuild have been compiled. + +# Set third party libs location +set(CMAKE_PREFIX_PATH "${CMAKE_CURRENT_SOURCE_DIR}/SuperBuild/install") + +# move binaries to the same bin directory +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) + +option(ODM_BUILD_SLAM "Build SLAM module" OFF) + +# Add ODM sub-modules +add_subdirectory(modules) diff --git a/CNAME b/CNAME new file mode 100644 index 000000000..ed563e1e0 --- /dev/null +++ b/CNAME @@ -0,0 +1 @@ +opendronemap.org \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..b47d7ceec --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,77 @@ +# Contributing to OpenDroneMap + +:+1::tada: First off, thanks for taking the time to contribute! :tada::+1: + +### Code of Conduct + +This project adheres to the Contributor Covenant [code of conduct](code_of_conduct.md). +By participating, you are expected to uphold this code. +Please report unacceptable behavior to the [Project Maintainer](mailto:svm@clevelandmetroparks.com). + +## How can I contribute? + +### Reporting bugs + +Bugs are tracked as Github issues. Please create an issue in the repository and tag it with the Bug tag. + +Explain the problem and include additional details to help maintainers reproduce the problem: + +* **Use a clear and descriptive title** for the issue to identify the problem. +* **Describe the exact steps which reproduce the problem** in as many details as possible. For example, start by explaining how you run ODM (Docker, Vagrant, etc), e.g. which command exactly you used in the terminal. When listing steps, **don't just say what you did, but explain how you did it**. +* **Provide specific examples to demonstrate the steps**. Include links to files or GitHub projects, or copy/pasteable snippets, which you use in those examples. If you're providing snippets in the issue, use [Markdown code blocks](https://help.github.com/articles/markdown-basics/#multiple-lines). +* **Describe the behavior you observed after following the steps** and point out what exactly is the problem with that behavior. +* **Explain which behavior you expected to see instead and why.** +* **Include screenshots and animated GIFs** which show you following the described steps and clearly demonstrate the problem. If you use the keyboard while following the steps, **record the GIF with the [Keybinding Resolver](https://github.com/atom/keybinding-resolver) shown**. You can use [this tool](http://www.cockos.com/licecap/) to record GIFs on macOS and Windows, and [this tool](https://github.com/colinkeenan/silentcast) or [this tool](https://github.com/GNOME/byzanz) on Linux. +* **If the problem is related to performance**, please post your machine's specs (host and guest machine). +* **If the problem wasn't triggered by a specific action**, describe what you were doing before the problem happened and share more information using the guidelines below. + +Include details about your configuration and environment: + +* **Which version of ODM are you using?** A stable release? a clone of master or dev? +* **What's the name and version of the OS you're using**? +* **Are you running ODM in a virtual machine?** If so, which VM software are you using and which operating systems and versions are used for the host and the guest? + +#### Template For Submitting Bug Reports + + [Short description of problem here] + + **Reproduction Steps:** + + 1. [First Step] + 2. [Second Step] + 3. [Other Steps...] + + **Expected behavior:** + + [Describe expected behavior here] + + **Observed behavior:** + + [Describe observed behavior here] + + **Screenshots and GIFs** + + ![Screenshots and GIFs which follow reproduction steps to demonstrate the problem](url) + + **ODM version:** [Enter Atom version here] + **OS and version:** [Enter OS name and version here] + + **Additional information:** + + * Problem started happening recently, didn't happen in an older version of ODM: [Yes/No] + * Problem can be reliably reproduced, doesn't happen randomly: [Yes/No] + * Problem happens with all datasets and projects, not only some datasets or projects: [Yes/No] + +### Pull Requests +* Include screenshots and animated GIFs in your pull request whenever possible. +* Follow the [PEP8 Python Style Guide](https://www.python.org/dev/peps/pep-0008/). +* End files with a newline. +* Avoid platform-dependent code: + * Use `require('fs-plus').getHomeDirectory()` to get the home directory. + * Use `path.join()` to concatenate filenames. + * Use `os.tmpdir()` rather than `/tmp` when you need to reference the + temporary directory. +* Using a plain `return` when returning explicitly at the end of a function. + * Not `return null`, `return undefined`, `null`, or `undefined` + + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..a480be8f1 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,68 @@ +FROM phusion/baseimage + +# Env variables +ENV DEBIAN_FRONTEND noninteractive + +#Install dependencies +RUN apt-get update -y +RUN apt-get install software-properties-common -y +#Required Requisites +RUN add-apt-repository -y ppa:ubuntugis/ppa +RUN add-apt-repository -y ppa:george-edison55/cmake-3.x +RUN apt-get update -y + +# All packages (Will install much faster) +RUN apt-get install --no-install-recommends -y git cmake python-pip build-essential software-properties-common python-software-properties libgdal-dev gdal-bin libgeotiff-dev \ +libgtk2.0-dev libavcodec-dev libavformat-dev libswscale-dev python-dev python-numpy libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libjasper-dev libflann-dev \ +libproj-dev libxext-dev liblapack-dev libeigen3-dev libvtk5-dev python-networkx libgoogle-glog-dev libsuitesparse-dev libboost-filesystem-dev libboost-iostreams-dev \ +libboost-regex-dev libboost-python-dev libboost-date-time-dev libboost-thread-dev python-pyproj python-empy python-nose python-pyside python-pyexiv2 python-scipy \ +libexiv2-dev liblas-bin python-matplotlib libatlas-base-dev libgmp-dev libmpfr-dev swig2.0 python-wheel libboost-log-dev libjsoncpp-dev + +RUN apt-get remove libdc1394-22-dev +RUN pip install --upgrade pip +RUN pip install setuptools +RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz loky + +ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages" +ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm" +ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" + +# Prepare directories + +RUN mkdir /code +WORKDIR /code + +# Copy repository files +COPY ccd_defs_check.py /code/ccd_defs_check.py +COPY CMakeLists.txt /code/CMakeLists.txt +COPY configure.sh /code/configure.sh +COPY /modules/ /code/modules/ +COPY /opendm/ /code/opendm/ +COPY /patched_files/ /code/patched_files/ +COPY run.py /code/run.py +COPY run.sh /code/run.sh +COPY /scripts/ /code/scripts/ +COPY /SuperBuild/cmake/ /code/SuperBuild/cmake/ +COPY /SuperBuild/CMakeLists.txt /code/SuperBuild/CMakeLists.txt +COPY docker.settings.yaml /code/settings.yaml +COPY VERSION /code/VERSION + + +#Compile code in SuperBuild and root directories + +RUN cd SuperBuild && mkdir build && cd build && cmake .. && make -j$(nproc) && cd ../.. && mkdir build && cd build && cmake .. && make -j$(nproc) + +RUN apt-get -y remove libgl1-mesa-dri git cmake python-pip build-essential +RUN apt-get install -y libvtk5-dev + +# Cleanup APT +RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +# Clean Superbuild + +RUN rm -rf /code/SuperBuild/download +RUN rm -rf /code/SuperBuild/src/opencv/samples /code/SuperBuild/src/pcl/test /code/SuperBuild/src/pcl/doc /code/SuperBuild/src/pdal/test /code/SuperBuild/src/pdal/doc + +# Entry point +ENTRYPOINT ["python", "/code/run.py", "code"] + diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..9cecc1d46 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {one line to give the program's name and a brief idea of what it does.} + Copyright (C) {year} {name of author} + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + {project} Copyright (C) {year} {fullname} + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md new file mode 100644 index 000000000..4812ffa03 --- /dev/null +++ b/README.md @@ -0,0 +1,211 @@ +# OpenDroneMap + +![](https://raw.githubusercontent.com/OpenDroneMap/OpenDroneMap/master/img/odm_image.png) + +## What is it? + +OpenDroneMap is an open source toolkit for processing aerial drone imagery. Typical drones use simple point-and-shoot cameras, so the images from drones, while from a different perspective, are similar to any pictures taken from point-and-shoot cameras, i.e. non-metric imagery. OpenDroneMap turns those simple images into three dimensional geographic data that can be used in combination with other geographic datasets. + +![](https://raw.githubusercontent.com/OpenDroneMap/OpenDroneMap/master/img/tol_ptcloud.png) + +In a word, OpenDroneMap is a toolchain for processing raw civilian UAS imagery to other useful products. What kind of products? + +1. Point Clouds +2. Digital Surface Models +3. Textured Digital Surface Models +4. Orthorectified Imagery +5. Classified Point Clouds (coming soon) +6. Digital Elevation Models +7. etc. + +Open Drone Map now includes state-of-the-art 3D reconstruction work by Michael Waechter, Nils Moehrle, and Michael Goesele. See their publication at http://www.gcc.tu-darmstadt.de/media/gcc/papers/Waechter-2014-LTB.pdf. + +## QUICKSTART + +### Docker (All platforms) + +The easiest way to run ODM is through Docker. If you don't have it installed, +see the [Docker Ubuntu installation tutorial](https://docs.docker.com/engine/installation/linux/ubuntulinux/) and follow the +instructions through "Create a Docker group". The Docker image workflow +has equivalent procedures for Mac OS X and Windows found at [docs.docker.com](docs.docker.com). Then run the following command which will build a pre-built image and run on images found in `$(pwd)/images` (you can change this if you need to, see the [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki/Docker) for more detailed instructions. + +``` +docker run -it --rm -v $(pwd)/images:/code/images -v $(pwd)/odm_orthophoto:/code/odm_orthophoto -v $(pwd)/odm_texturing:/code/odm_texturing opendronemap/opendronemap +``` + +### Native Install (Ubuntu 16.04) + +** Please note that we need help getting ODM updated to work for 16.10+. Look at #659 or drop into the [gitter][https://gitter.im/OpenDroneMap/OpenDroneMap) for more info. + + +**[Download the latest release here](https://github.com/OpenDroneMap/OpenDroneMap/releases)** +Current version: 0.3.1 (this software is in beta) + +1. Extract and enter the OpenDroneMap directory +2. Run `bash configure.sh install` +4. Edit the `settings.yaml` file in your favorite text editor. Set the `project-path` value to an empty directory (you will place sub-directories containing individual projects inside). You can add many options to this file, [see here](https://github.com/OpenDroneMap/OpenDroneMap/wiki/Run-Time-Parameters) +3. Download a sample dataset from [here](https://github.com/OpenDroneMap/odm_data_aukerman/archive/master.zip) (about 550MB) and extract it as a subdirectory in your project directory. +4. Run `./run.sh odm_data_aukerman` +5. Enter dataset directory to view results: + - orthophoto: odm_orthophoto/odm_orthophoto.tif + - textured mesh model: odm_texturing/odm_textured_model_geo.obj + - point cloud (georeferenced): odm_georeferencing/odm_georeferenced_model.ply + +See below for more detailed installation instructions. + +## Diving Deeper + +### Installation + +Extract and enter the downloaded OpenDroneMap directory and compile all of the code by executing a single configuration script: + + bash configure.sh install + +When updating to a newer version of ODM, it is recommended that you run + + bash configure.sh reinstall + +to ensure all the dependent packages and modules get updated. + +For Ubuntu 15.10 users, this will help you get running: + + sudo apt-get install python-xmltodict + sudo ln -s /usr/lib/x86_64-linux-gnu/libproj.so.9 /usr/lib/libproj.so + +### Environment Variables + +There are some environmental variables that need to be set. Open the ~/.bashrc file on your machine and add the following 3 lines at the end. The file can be opened with ```gedit ~/.bashrc``` if you are using an Ubuntu desktop environment. Be sure to replace the "/your/path/" with the correct path to the location where you extracted OpenDroneMap: + + export PYTHONPATH=$PYTHONPATH:/your/path/OpenDroneMap/SuperBuild/install/lib/python2.7/dist-packages + export PYTHONPATH=$PYTHONPATH:/your/path/OpenDroneMap/SuperBuild/src/opensfm + export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/your/path/OpenDroneMap/SuperBuild/install/lib + +Note that using `run.sh` sets these temporarily in the shell. + +### Run OpenDroneMap + +First you need a set of images, taken from a drone or otherwise. Example data can be obtained from https://github.com/OpenDroneMap/odm_data + +Next, you need to edit the `settings.yaml` file. The only setting you must edit is the `project-path` key. Set this to an empty directory within projects will be saved. There are many options for tuning your project. See the [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki/Run-Time-Parameters) or run `python run.py -h` + + +Then run: + + python run.py -i /path/to/images project-name + +The images will be copied over to the project path so you only need to specify the `-i /path/` once. You can also override any variable from settings.yaml here using the command line arguments. If you want to rerun the whole thing, run + + python run.py --rerun-all project-name + +or + + python run.py --rerun-from odm_meshing project-name + +The options for rerunning are: 'resize', 'opensfm', 'slam', 'cmvs', 'pmvs', 'odm_meshing', 'mvs_texturing', 'odm_georeferencing', 'odm_orthophoto' + +### View Results + +When the process finishes, the results will be organized as follows: + + |-- images/ + |-- img-1234.jpg + |-- ... + |-- images_resize/ + |-- img-1234.jpg + |-- ... + |-- opensfm/ + |-- see mapillary/opensfm repository for more info + |-- depthmaps/ + |-- merged.ply # Dense Point cloud (not georeferenced) + |-- odm_meshing/ + |-- odm_mesh.ply # A 3D mesh + |-- odm_meshing_log.txt # Output of the meshing task. May point out errors. + |-- odm_texturing/ + |-- odm_textured_model.obj # Textured mesh + |-- odm_textured_model_geo.obj # Georeferenced textured mesh + |-- texture_N.jpg # Associated textured images used by the model + |-- odm_georeferencing/ + |-- odm_georeferenced_model.ply # A georeferenced dense point cloud + |-- odm_georeferenced_model.ply.laz # LAZ format point cloud + |-- odm_georeferenced_model.csv # XYZ format point cloud + |-- odm_georeferencing_log.txt # Georeferencing log + |-- odm_georeferencing_transform.txt# Transform used for georeferencing + |-- odm_georeferencing_utm_log.txt # Log for the extract_utm portion + |-- odm_orthophoto/ + |-- odm_orthophoto.png # Orthophoto image (no coordinates) + |-- odm_orthophoto.tif # Orthophoto GeoTiff + |-- odm_orthophoto_log.txt # Log file + |-- gdal_translate_log.txt # Log for georeferencing the png file + +Any file ending in .obj or .ply can be opened and viewed in [MeshLab](http://meshlab.sourceforge.net/) or similar software. That includes `opensfm/depthmaps/merged.ply`, `odm_meshing/odm_mesh.ply`, `odm_texturing/odm_textured_model[_geo].obj`, or `odm_georeferencing/odm_georeferenced_model.ply`. Below is an example textured mesh: + +![](https://raw.githubusercontent.com/alexhagiopol/OpenDroneMap/feature-better-docker/toledo_dataset_example_mesh.jpg) + +You can also view the orthophoto GeoTIFF in [QGIS](http://www.qgis.org/) or other mapping software: + +![](https://raw.githubusercontent.com/OpenDroneMap/OpenDroneMap/master/img/bellus_map.png) + +## Build and Run Using Docker + +(Instructions below apply to Ubuntu 14.04, but the Docker image workflow +has equivalent procedures for Mac OS X and Windows. See [docs.docker.com](docs.docker.com)) + +OpenDroneMap is Dockerized, meaning you can use containerization to build and run it without tampering with the configuration of libraries and packages already +installed on your machine. Docker software is free to install and use in this context. If you don't have it installed, +see the [Docker Ubuntu installation tutorial](https://docs.docker.com/engine/installation/linux/ubuntulinux/) and follow the +instructions through "Create a Docker group". Once Docker is installed, the fastest way to use OpenDroneMap is to run a pre-built image by typing: + + docker run -it --rm -v $(pwd)/images:/code/images -v $(pwd)/odm_orthophoto:/code/odm_orthophoto -v $(pwd)/odm_texturing:/code/odm_texturing opendronemap/opendronemap + +If you want to build your own Docker image from sources, type: + + docker build -t my_odm_image . + docker run -it --rm -v $(pwd)/images:/code/images -v $(pwd)/odm_orthophoto:/code/odm_orthophoto -v $(pwd)/odm_texturing:/code/odm_texturing my_odm_image + +Using this method, the containerized ODM will process the images in the OpenDroneMap/images directory and output results +to the OpenDroneMap/odm_orthophoto and OpenDroneMap/odm_texturing directories as described in the [Viewing Results](https://github.com/OpenDroneMap/OpenDroneMap/wiki/Output-and-Results) section. +If you want to view other results outside the Docker image simply add which directories you're interested in to the run command in the same pattern +established above. For example, if you're interested in the dense cloud results generated by PMVS and in the orthophoto, +simply use the following `docker run` command after building the image: + + docker run -it --rm -v $(pwd)/images:/code/images -v $(pwd)/odm_georeferencing:/code/odm_georeferencing -v $(pwd)/odm_orthophoto:/code/odm_orthophoto my_odm_image + +If you want to get all intermediate outputs, run the following command: + + docker run -it --rm -v $(pwd)/images:/code/images -v $(pwd)/odm_georeferencing:/code/odm_georeferencing -v $(pwd)/odm_meshing:/code/odm_meshing -v $(pwd)/odm_orthophoto:/code/odm_orthophoto -v $(pwd)/odm_texturing:/code/odm_texturing -v $(pwd)/opensfm:/code/opensfm -v $(pwd)/pmvs:/code/pmvs opendronemap/opendronemap + +To pass in custom parameters to the run.py script, simply pass it as arguments to the `docker run` command. For example: + + docker run -it --rm -v $(pwd)/images:/code/images v $(pwd)/odm_orthophoto:/code/odm_orthophoto -v $(pwd)/odm_texturing:/code/odm_texturing opendronemap/opendronemap --resize-to 1800 --force-ccd 6.16 + +If you want to pass in custom parameters using the settings.yaml file, you can pass it as a -v volume binding: + + docker run -it --rm -v $(pwd)/images:/code/images v $(pwd)/odm_orthophoto:/code/odm_orthophoto -v $(pwd)/odm_texturing:/code/odm_texturing -v $(pwd)/settings.yaml:/code/settings.yaml opendronemap/opendronemap + + +## User Interface + +A web interface and API to OpenDroneMap is currently under active development in the [WebODM](https://github.com/OpenDroneMap/WebODM) repository. + +## Video Support + +Currently we have an experimental feature that uses ORB_SLAM to render a textured mesh from video. It is only supported on Ubuntu 14.04 on machines with X11 support. See the [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki/Reconstruction-from-Video)for details on installation and use. + +## Examples + +Coming soon... + +## Documentation: + +For documentation, please take a look at our [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki).Check here first if you are having problems. If you still need help, look through the issue queue or create one. There's also a general help chat [here](https://gitter.im/OpenDroneMap/generalhelp). + +## Developers + +Help improve our software! + +[![Join the chat at https://gitter.im/OpenDroneMap/OpenDroneMap](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/OpenDroneMap/OpenDroneMap?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + +1. Try to keep commits clean and simple +2. Submit a pull request with detailed changes and test results + + diff --git a/SuperBuild/CMakeLists.txt b/SuperBuild/CMakeLists.txt new file mode 100644 index 000000000..453c3fb47 --- /dev/null +++ b/SuperBuild/CMakeLists.txt @@ -0,0 +1,134 @@ +cmake_minimum_required(VERSION 3.1) + +project(ODM-SuperBuild) + +# Setup SuperBuild root location +set(SB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}) + +# Path to additional CMake modules +set(CMAKE_MODULE_PATH ${SB_ROOT_DIR}/cmake) + +include(ExternalProject) +include(ExternalProject-Setup) + +option(ODM_BUILD_SLAM "Build SLAM module" OFF) + + +################################ +# Setup SuperBuild directories # +################################ + +# Setup location where source tar-balls are downloaded +set(SB_DOWNLOAD_DIR "${SB_ROOT_DIR}/download" + CACHE PATH "Location where source tar-balls are (to be) downloaded.") +mark_as_advanced(SB_DOWNLOAD_DIR) + +message(STATUS "SuperBuild files will be downloaded to: ${SB_DOWNLOAD_DIR}") + + +# Setup location where source tar-balls are located +set(SB_SOURCE_DIR "${SB_ROOT_DIR}/src" + CACHE PATH "Location where source tar-balls are (will be).") +mark_as_advanced(SB_SOURCE_DIR) + +message(STATUS "SuperBuild source files will be extracted to: ${SB_SOURCE_DIR}") + + +# Setup location where source tar-balls are located +set(SB_INSTALL_DIR "${SB_ROOT_DIR}/install" + CACHE PATH "Location where source tar-balls are (will be) installed.") +mark_as_advanced(SB_SOURCE_DIR) + +message(STATUS "SuperBuild source files will be installed to: ${SB_INSTALL_DIR}") + + +# Setup location where binary files are located +set(SB_BINARY_DIR "${SB_ROOT_DIR}/build" + CACHE PATH "Location where files are (will be) located.") +mark_as_advanced(SB_BINARY_DIR) + +message(STATUS "SuperBuild binary files will be located to: ${SB_BINARY_DIR}") + + +######################################### +# Download and install third party libs # +######################################### + +# --------------------------------------------------------------------------------------------- +# Open Source Computer Vision (OpenCV) +# +set(ODM_OpenCV_Version 2.4.11) +option(ODM_BUILD_OpenCV "Force to build OpenCV library" OFF) + +SETUP_EXTERNAL_PROJECT(OpenCV ${ODM_OpenCV_Version} ${ODM_BUILD_OpenCV}) + + +# --------------------------------------------------------------------------------------------- +# Point Cloud Library (PCL) +# +set(ODM_PCL_Version 1.7.2) +option(ODM_BUILD_PCL "Force to build PCL library" OFF) + +SETUP_EXTERNAL_PROJECT(PCL ${ODM_PCL_Version} ${ODM_BUILD_PCL}) + + +# --------------------------------------------------------------------------------------------- +# Google Flags library (GFlags) +# +set(ODM_GFlags_Version 2.1.2) +option(ODM_BUILD_GFlags "Force to build GFlags library" OFF) + +SETUP_EXTERNAL_PROJECT(GFlags ${ODM_GFlags_Version} ${ODM_BUILD_GFlags}) + + +# --------------------------------------------------------------------------------------------- +# Ceres Solver +# +set(ODM_Ceres_Version 1.10.0) +option(ODM_BUILD_Ceres "Force to build Ceres library" OFF) + +SETUP_EXTERNAL_PROJECT(Ceres ${ODM_Ceres_Version} ${ODM_BUILD_Ceres}) + + +# --------------------------------------------------------------------------------------------- +# CGAL +# +set(ODM_CGAL_Version 4.9) +option(ODM_BUILD_CGAL "Force to build CGAL library" OFF) + +SETUP_EXTERNAL_PROJECT(CGAL ${ODM_CGAL_Version} ${ODM_BUILD_CGAL}) + +# --------------------------------------------------------------------------------------------- +# Hexer +# +SETUP_EXTERNAL_PROJECT(Hexer 1.4 ON) + +# --------------------------------------------------------------------------------------------- +# Open Geometric Vision (OpenGV) +# Open Structure from Motion (OpenSfM) +# Clustering Views for Multi-view Stereo (CMVS) +# Catkin +# Ecto +# + +set(custom_libs OpenGV + OpenSfM + CMVS + Catkin + Ecto + PDAL + MvsTexturing + Lidar2dems +) + +# Dependencies of the SLAM module +if(ODM_BUILD_SLAM) + list(APPEND custom_libs + Pangolin + ORB_SLAM2) +endif() + +foreach(lib ${custom_libs}) + SETUP_EXTERNAL_PROJECT_CUSTOM(${lib}) +endforeach() + diff --git a/SuperBuild/cmake/External-CGAL.cmake b/SuperBuild/cmake/External-CGAL.cmake new file mode 100644 index 000000000..51e09c4b9 --- /dev/null +++ b/SuperBuild/cmake/External-CGAL.cmake @@ -0,0 +1,26 @@ +set(_proj_name cgal) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name} + URL https://github.com/CGAL/cgal/releases/download/releases%2FCGAL-4.9/CGAL-4.9.zip + URL_MD5 31c08d762a72fda785df194c89b833df + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-CMVS.cmake b/SuperBuild/cmake/External-CMVS.cmake new file mode 100644 index 000000000..a58beb5fd --- /dev/null +++ b/SuperBuild/cmake/External-CMVS.cmake @@ -0,0 +1,28 @@ +set(_proj_name cmvs) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name} + URL https://github.com/edgarriba/CMVS-PMVS/archive/master.zip + URL_MD5 dbb1493f49ca099b4208381bd20d1435 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CONFIGURE_COMMAND cmake /program + -DCMAKE_RUNTIME_OUTPUT_DIRECTORY:PATH=${SB_INSTALL_DIR}/bin + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) + diff --git a/SuperBuild/cmake/External-Catkin.cmake b/SuperBuild/cmake/External-Catkin.cmake new file mode 100644 index 000000000..5a0914c17 --- /dev/null +++ b/SuperBuild/cmake/External-Catkin.cmake @@ -0,0 +1,27 @@ +set(_proj_name catkin) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/ros/catkin/archive/0.6.16.zip + URL_MD5 F5D45AE68709CE6E3346FB8C019416F8 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DCATKIN_ENABLE_TESTING=OFF + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-Ceres.cmake b/SuperBuild/cmake/External-Ceres.cmake new file mode 100644 index 000000000..4b3eaf109 --- /dev/null +++ b/SuperBuild/cmake/External-Ceres.cmake @@ -0,0 +1,31 @@ +set(_proj_name ceres) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + DEPENDS gflags + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL http://ceres-solver.org/ceres-solver-1.10.0.tar.gz + URL_MD5 dbf9f452bd46e052925b835efea9ab16 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DCMAKE_C_FLAGS=-fPIC + -DCMAKE_CXX_FLAGS=-fPIC + -DBUILD_EXAMPLES=OFF + -DBUILD_TESTING=OFF + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) \ No newline at end of file diff --git a/SuperBuild/cmake/External-Ecto.cmake b/SuperBuild/cmake/External-Ecto.cmake new file mode 100644 index 000000000..e031cb82c --- /dev/null +++ b/SuperBuild/cmake/External-Ecto.cmake @@ -0,0 +1,30 @@ +set(_proj_name ecto) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + DEPENDS catkin + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name} + URL https://github.com/plasmodic/ecto/archive/c6178ed0102a66cebf503a4213c27b0f60cfca69.zip + URL_MD5 A5C4757B656D536D3E3CC1DC240EC158 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DBUILD_DOC=OFF + -DBUILD_SAMPLES=OFF + -DCATKIN_ENABLE_TESTING=OFF + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-GFlags.cmake b/SuperBuild/cmake/External-GFlags.cmake new file mode 100644 index 000000000..2c9b26792 --- /dev/null +++ b/SuperBuild/cmake/External-GFlags.cmake @@ -0,0 +1,27 @@ +set(_proj_name gflags) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/gflags/gflags/archive/v2.1.2.zip + URL_MD5 5cb0a1b38740ed596edb7f86cd5b3bd8 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DCMAKE_BUILD_TYPE:STRING=Release + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-Hexer.cmake b/SuperBuild/cmake/External-Hexer.cmake new file mode 100644 index 000000000..64de1ae21 --- /dev/null +++ b/SuperBuild/cmake/External-Hexer.cmake @@ -0,0 +1,27 @@ +set(_proj_name hexer) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + DEPENDS + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/hobu/hexer/archive/2898b96b1105991e151696391b9111610276258f.tar.gz + URL_MD5 e8f2788332ad212cf78efa81a82e95dd + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-Lidar2dems.cmake b/SuperBuild/cmake/External-Lidar2dems.cmake new file mode 100644 index 000000000..4772a2462 --- /dev/null +++ b/SuperBuild/cmake/External-Lidar2dems.cmake @@ -0,0 +1,24 @@ +set(_proj_name lidar2dems) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name} + URL https://github.com/OpenDroneMap/lidar2dems/archive/master.zip + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CONFIGURE_COMMAND "" + #--Build step----------------- + BUILD_COMMAND "" + #--Install step--------------- + INSTALL_COMMAND "${SB_SOURCE_DIR}/${_proj_name}/install.sh" "${SB_INSTALL_DIR}" + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-MvsTexturing.cmake b/SuperBuild/cmake/External-MvsTexturing.cmake new file mode 100644 index 000000000..d637f682b --- /dev/null +++ b/SuperBuild/cmake/External-MvsTexturing.cmake @@ -0,0 +1,29 @@ +set(_proj_name mvstexturing) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + DEPENDS + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/OpenDroneMap/mvs-texturing/archive/4f885aff1d92fb20a7d72d320be5b935397c81c9.zip + URL_MD5 cbcccceba4693c6c882eb4aa618a2227 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DRESEARCH=OFF + -DCMAKE_BUILD_TYPE:STRING=Release + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-ORB_SLAM2.cmake b/SuperBuild/cmake/External-ORB_SLAM2.cmake new file mode 100644 index 000000000..9e7047844 --- /dev/null +++ b/SuperBuild/cmake/External-ORB_SLAM2.cmake @@ -0,0 +1,78 @@ +set(_proj_name orb_slam2) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + DEPENDS opencv pangolin + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/paulinus/ORB_SLAM2/archive/7c11f186a53a75560cd17352d327b0bc127a82de.zip + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_COMMAND "" + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) + +# DBoW2 +set(DBoW2_BINARY_DIR "${SB_BINARY_DIR}/DBoW2") +file(MAKE_DIRECTORY "${DBoW2_BINARY_DIR}") + +ExternalProject_Add_Step(${_proj_name} build_DBoW2 + COMMAND make -j2 + DEPENDEES configure_DBoW2 + DEPENDERS configure + WORKING_DIRECTORY ${DBoW2_BINARY_DIR} + ALWAYS 1 +) + +ExternalProject_Add_Step(${_proj_name} configure_DBoW2 + COMMAND ${CMAKE_COMMAND} /Thirdparty/DBoW2 + -DOpenCV_DIR=${SB_INSTALL_DIR}/share/OpenCV + -DCMAKE_BUILD_TYPE=Release + DEPENDEES download + DEPENDERS build_DBoW2 + WORKING_DIRECTORY ${DBoW2_BINARY_DIR} + ALWAYS 1 +) + +# g2o +set(g2o_BINARY_DIR "${SB_BINARY_DIR}/g2o") +file(MAKE_DIRECTORY "${g2o_BINARY_DIR}") + +ExternalProject_Add_Step(${_proj_name} build_g2o + COMMAND make -j2 + DEPENDEES configure_g2o + DEPENDERS configure + WORKING_DIRECTORY ${g2o_BINARY_DIR} + ALWAYS 1 +) + +ExternalProject_Add_Step(${_proj_name} configure_g2o + COMMAND ${CMAKE_COMMAND} /Thirdparty/g2o + -DCMAKE_BUILD_TYPE=Release + DEPENDEES download + DEPENDERS build_g2o + WORKING_DIRECTORY ${g2o_BINARY_DIR} + ALWAYS 1 +) + +# Uncompress Vocabulary +ExternalProject_Add_Step(${_proj_name} uncompress_vocabulary + COMMAND tar -xf ORBvoc.txt.tar.gz + DEPENDEES download + DEPENDERS configure + WORKING_DIRECTORY /Vocabulary + ALWAYS 1 +) diff --git a/SuperBuild/cmake/External-OpenCV.cmake b/SuperBuild/cmake/External-OpenCV.cmake new file mode 100644 index 000000000..232059c9e --- /dev/null +++ b/SuperBuild/cmake/External-OpenCV.cmake @@ -0,0 +1,60 @@ +set(_proj_name opencv) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/Itseez/opencv/archive/2.4.11.zip + URL_MD5 b517e83489c709eee1d8be76b16976a7 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DBUILD_opencv_core=ON + -DBUILD_opencv_imgproc=ON + -DBUILD_opencv_highgui=ON + -DBUILD_opencv_video=ON + -DBUILD_opencv_ml=ON + -DBUILD_opencv_features2d=ON + -DBUILD_opencv_calib3d=ON + -DBUILD_opencv_contrib=ON + -DBUILD_opencv_flann=ON + -DBUILD_opencv_objdetect=ON + -DBUILD_opencv_photo=ON + -DBUILD_opencv_legacy=ON + -DBUILD_opencv_python=ON + -DWITH_FFMPEG=${ODM_BUILD_SLAM} + -DWITH_CUDA=OFF + -DWITH_GTK=${ODM_BUILD_SLAM} + -DWITH_VTK=OFF + -DWITH_EIGEN=OFF + -DWITH_OPENNI=OFF + -DBUILD_EXAMPLES=OFF + -DBUILD_TESTS=OFF + -DBUILD_PERF_TESTS=OFF + -DBUILD_DOCS=OFF + -DBUILD_opencv_apps=OFF + -DBUILD_opencv_gpu=OFF + -DBUILD_opencv_videostab=OFF + -DBUILD_opencv_nonfree=OFF + -DBUILD_opencv_stitching=OFF + -DBUILD_opencv_world=OFF + -DBUILD_opencv_superres=OFF + -DBUILD_opencv_java=OFF + -DBUILD_opencv_ocl=OFF + -DBUILD_opencv_ts=OFF + -DCMAKE_BUILD_TYPE:STRING=Release + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-OpenGV.cmake b/SuperBuild/cmake/External-OpenGV.cmake new file mode 100644 index 000000000..cd1ac76de --- /dev/null +++ b/SuperBuild/cmake/External-OpenGV.cmake @@ -0,0 +1,29 @@ +set(_proj_name opengv) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/paulinus/opengv/archive/7436794df04d85433a966395088e38b107e69fc2.zip + URL_MD5 9B303C3AB9F210B242941E851572D2C8 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DBUILD_TESTS=OFF + -DBUILD_PYTHON=ON + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) + diff --git a/SuperBuild/cmake/External-OpenSfM.cmake b/SuperBuild/cmake/External-OpenSfM.cmake new file mode 100644 index 000000000..9fa30f422 --- /dev/null +++ b/SuperBuild/cmake/External-OpenSfM.cmake @@ -0,0 +1,31 @@ +set(_proj_name opensfm) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + DEPENDS ceres opencv opengv + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/mapillary/OpenSfM/archive/93be3a1bfe46482345ddd57bc1f3a62f63169b86.zip + URL_MD5 2b310420a5c7c2297294a39183fb8b1a + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CONFIGURE_COMMAND cmake /${_proj_name}/src + -DCERES_ROOT_DIR=${SB_INSTALL_DIR} + -DOpenCV_DIR=${SB_INSTALL_DIR}/share/OpenCV + -DOPENSFM_BUILD_TESTS=off + + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_COMMAND "" + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) + diff --git a/SuperBuild/cmake/External-PCL.cmake b/SuperBuild/cmake/External-PCL.cmake new file mode 100644 index 000000000..50d7c1df6 --- /dev/null +++ b/SuperBuild/cmake/External-PCL.cmake @@ -0,0 +1,52 @@ +set(_proj_name pcl) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/PointCloudLibrary/pcl/archive/pcl-1.8.0.tar.gz + URL_MD5 8c1308be2c13106e237e4a4204a32cca + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DBUILD_features=OFF + -DBUILD_filters=OFF + -DBUILD_geometry=OFF + -DBUILD_keypoints=OFF + -DBUILD_outofcore=OFF + -DBUILD_people=OFF + -DBUILD_recognition=OFF + -DBUILD_registration=OFF + -DBUILD_sample_consensus=OFF + -DBUILD_segmentation=OFF + -DBUILD_features=OFF + -DBUILD_surface_on_nurbs=OFF + -DBUILD_tools=OFF + -DBUILD_tracking=OFF + -DBUILD_visualization=OFF + -DWITH_QT=OFF + -DBUILD_OPENNI=OFF + -DBUILD_OPENNI2=OFF + -DWITH_OPENNI=OFF + -DWITH_OPENNI2=OFF + -DWITH_FZAPI=OFF + -DWITH_LIBUSB=OFF + -DWITH_PCAP=OFF + -DWITH_PXCAPI=OFF + -DCMAKE_BUILD_TYPE=Release + -DPCL_VERBOSITY_LEVEL=Error + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) \ No newline at end of file diff --git a/SuperBuild/cmake/External-PDAL.cmake b/SuperBuild/cmake/External-PDAL.cmake new file mode 100644 index 000000000..d1fb938d3 --- /dev/null +++ b/SuperBuild/cmake/External-PDAL.cmake @@ -0,0 +1,47 @@ +set(_proj_name pdal) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + DEPENDS hexer + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/PDAL/PDAL/archive/e881b581e3b91a928105d67db44c567f3b6d1afe.tar.gz + URL_MD5 cadbadf1c83d69d6525cfffd41473323 + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -BUILD_PGPOINTCLOUD_TESTS=OFF + -BUILD_PLUGIN_PCL=ON + -BUILD_PLUGIN_PGPOINTCLOUD=ON + -DBUILD_PLUGIN_CPD=OFF + -DBUILD_PLUGIN_GREYHOUND=OFF + -DBUILD_PLUGIN_HEXBIN=ON + -DBUILD_PLUGIN_ICEBRIDGE=OFF + -DBUILD_PLUGIN_MRSID=OFF + -DBUILD_PLUGIN_NITF=OFF + -DBUILD_PLUGIN_OCI=OFF + -DBUILD_PLUGIN_P2G=OFF + -DBUILD_PLUGIN_SQLITE=OFF + -DBUILD_PLUGIN_RIVLIB=OFF + -DBUILD_PLUGIN_PYTHON=OFF + -DENABLE_CTEST=OFF + -DWITH_APPS=ON + -DWITH_LAZPERF=OFF + -DWITH_GEOTIFF=ON + -DWITH_LASZIP=ON + -DWITH_TESTS=OFF + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) diff --git a/SuperBuild/cmake/External-Pangolin.cmake b/SuperBuild/cmake/External-Pangolin.cmake new file mode 100644 index 000000000..f328c89b4 --- /dev/null +++ b/SuperBuild/cmake/External-Pangolin.cmake @@ -0,0 +1,29 @@ +set(_proj_name pangolin) +set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}") + +ExternalProject_Add(${_proj_name} + PREFIX ${_SB_BINARY_DIR} + TMP_DIR ${_SB_BINARY_DIR}/tmp + STAMP_DIR ${_SB_BINARY_DIR}/stamp + #--Download step-------------- + DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} + URL https://github.com/paulinus/Pangolin/archive/b7c66570b336e012bf3124e2a7411d417a1d35f7.zip + URL_MD5 9b7938d1045d26b27a637b663e647aef + #--Update/Patch step---------- + UPDATE_COMMAND "" + #--Configure step------------- + SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name} + CMAKE_ARGS + -DCPP11_NO_BOOST=1 + -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} + + #--Build step----------------- + BINARY_DIR ${_SB_BINARY_DIR} + #--Install step--------------- + INSTALL_DIR ${SB_INSTALL_DIR} + #--Output logging------------- + LOG_DOWNLOAD OFF + LOG_CONFIGURE OFF + LOG_BUILD OFF +) + diff --git a/SuperBuild/cmake/ExternalProject-Setup.cmake b/SuperBuild/cmake/ExternalProject-Setup.cmake new file mode 100644 index 000000000..eea0b5339 --- /dev/null +++ b/SuperBuild/cmake/ExternalProject-Setup.cmake @@ -0,0 +1,27 @@ +set(ADD_INTERNAL_LIB_MSG "--- Adding internal version") +set(FORCE_BUILD_LIB_MSG "force build ${ADD_INTERNAL_LIB_MSG}") + +macro(SETUP_EXTERNAL_PROJECT name version force_build) + + if(NOT ${force_build}) + + find_package(${name} ${version} EXACT QUIET) + + if(${${name}_FOUND}) + message(STATUS "${name} ${${name}_VERSION} found") + set(${name}_DIR ${${name}_DIR}) + else() + message(STATUS "${name} ${version} not found ${ADD_INTERNAL_LIB_MSG}") + include(External-${name}) + endif() + else() + message(STATUS "${name} ${version} ${FORCE_BUILD_LIB_MSG}") + include(External-${name}) + endif() + +endmacro() + +macro(SETUP_EXTERNAL_PROJECT_CUSTOM name) + message(STATUS "${name} ${FORCE_BUILD_LIB_MSG}") + include(External-${name}) +endmacro() \ No newline at end of file diff --git a/VERSION b/VERSION new file mode 100644 index 000000000..9e11b32fc --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +0.3.1 diff --git a/ccd_defs.pl b/ccd_defs.pl deleted file mode 100644 index edb5508e8..000000000 --- a/ccd_defs.pl +++ /dev/null @@ -1,277 +0,0 @@ -#!/usr/bin/perl - -%ccdWidths = ( - "Asahi Optical Co.,Ltd. PENTAX Optio330RS" => 7.176, # 1/1.8" - "Canon Canon DIGITAL IXUS 400" => 7.176, # 1/1.8" - "Canon Canon DIGITAL IXUS 40" => 5.76, # 1/2.5" - "Canon Canon DIGITAL IXUS 430" => 7.176, # 1/1.8" - "Canon Canon DIGITAL IXUS 500" => 7.176, # 1/1.8" - "Canon Canon DIGITAL IXUS 50" => 5.76, # 1/2.5" - "Canon Canon DIGITAL IXUS 55" => 5.76, # 1/2.5" - "Canon Canon DIGITAL IXUS 60" => 5.76, # 1/2.5" - "Canon Canon DIGITAL IXUS 65" => 5.76, # 1/2.5" - "Canon Canon DIGITAL IXUS 700" => 7.176, # 1/1.8" - "Canon Canon DIGITAL IXUS 750" => 7.176, # 1/1.8" - "Canon Canon DIGITAL IXUS 800 IS" => 5.76, # 1/2.5" - "Canon Canon DIGITAL IXUS II" => 5.27, # 1/2.7" - "Canon Canon DIGITAL IXUS 430" => 6.18, # 1/2.3" - "Canon Canon EOS 10D" => 22.7, - "Canon Canon EOS-1D Mark II" => 28.7, - "Canon Canon EOS-1Ds Mark II" => 35.95, - "Canon Canon EOS 20D" => 22.5, - "Canon Canon EOS 20D" => 22.5, - "Canon Canon EOS 300D DIGITAL" => 22.66, - "Canon Canon EOS 30D" => 22.5, - "Canon Canon EOS 350D DIGITAL" => 22.2, - "Canon Canon EOS 400D DIGITAL" => 22.2, - "Canon Canon EOS 40D" => 22.2, - "Canon Canon EOS 5D" => 35.8, - "Canon Canon EOS DIGITAL REBEL" => 22.66, - "Canon Canon EOS DIGITAL REBEL XT" => 22.2, - "Canon Canon EOS DIGITAL REBEL XTi" => 22.2, - "Canon Canon EOS Kiss Digital" => 22.66, - "Canon Canon IXY DIGITAL 600" => 7.176, # 1/1.8" - "Canon Canon PowerShot A10" => 5.23, # 1/1.8" - "Canon Canon PowerShot A20" => 7.176, # 1/1.8" - "Canon Canon PowerShot A400" => 4.54, # 1/3.2" - "Canon Canon PowerShot A40" => 5.27, # 1/2.7" - "Canon Canon PowerShot A510" => 5.76, # 1/2.5" - "Canon Canon PowerShot A520" => 5.76, # 1/2.5" - "Canon Canon PowerShot A530" => 5.76, # 1/2.5" - "Canon Canon PowerShot A60" => 5.27, # 1/2.7" - "Canon Canon PowerShot A620" => 7.176, # 1/1.8" - "Canon Canon PowerShot A630" => 7.176, # 1/1.8" - "Canon Canon PowerShot A640" => 7.176, # 1/1.8" - "Canon Canon PowerShot A700" => 5.76, # 1/2.5" - "Canon Canon PowerShot A70" => 5.27, # 1/2.7" - "Canon Canon PowerShot A710 IS" => 5.76, # 1/2.5" - "Canon Canon PowerShot A75" => 5.27, # 1/2.7" - "Canon Canon PowerShot A80" => 7.176, # 1/1.8" - "Canon Canon PowerShot A85" => 5.27, # 1/2.7" - "Canon Canon PowerShot A95" => 7.176, # 1/1.8" - "Canon Canon PowerShot G1" => 7.176, # 1/1.8" - "Canon Canon PowerShot G2" => 7.176, # 1/1.8" - "Canon Canon PowerShot G3" => 7.176, # 1/1.8" - "Canon Canon PowerShot G5" => 7.176, # 1/1.8" - "Canon Canon PowerShot G6" => 7.176, # 1/1.8" - "Canon Canon PowerShot G7" => 7.176, # 1/1.8" - "Canon Canon PowerShot G9" => 7.600, # 1/1.7" - "Canon Canon PowerShot Pro1" => 8.8, # 2/3" - "Canon Canon PowerShot S110" => 5.27, # 1/2.7" - "Canon Canon PowerShot S1 IS" => 5.27, # 1/2.7" - "Canon Canon PowerShot S200" => 5.27, # 1/2.7" - "Canon Canon PowerShot S2 IS" => 5.76, # 1/2.5" - "Canon Canon PowerShot S30" => 7.176, # 1/1.8" - "Canon Canon PowerShot S3 IS" => 5.76, # 1/2.5" - "Canon Canon PowerShot S400" => 7.176, # 1/1.8" - "Canon Canon PowerShot S40" => 7.176, # 1/1.8" - "Canon Canon PowerShot S410" => 7.176, # 1/1.8" - "Canon Canon PowerShot S45" => 7.176, # 1/1.8" - "Canon Canon PowerShot S500" => 7.176, # 1/1.8" - "Canon Canon PowerShot S50" => 7.176, # 1/1.8" - "Canon Canon PowerShot S60" => 7.176, # 1/1.8" - "Canon Canon PowerShot S70" => 7.176, # 1/1.8" - "Canon Canon PowerShot S80" => 7.176, # 1/1.8" - "Canon Canon PowerShot SD1000" => 5.75, # 1/2.5" - "Canon Canon PowerShot SD100" => 5.27, # 1/2.7" - "Canon Canon PowerShot SD10" => 5.75, # 1/2.5" - "Canon Canon PowerShot SD110" => 5.27, # 1/2.7" - "Canon Canon PowerShot SD200" => 5.76, # 1/2.5" - "Canon Canon PowerShot SD300" => 5.76, # 1/2.5" - "Canon Canon PowerShot SD400" => 5.76, # 1/2.5" - "Canon Canon PowerShot SD450" => 5.76, # 1/2.5" - "Canon Canon PowerShot SD500" => 7.176, # 1/1.8" - "Canon Canon PowerShot SD550" => 7.176, # 1/1.8" - "Canon Canon PowerShot SD600" => 5.76, # 1/2.5" - "Canon Canon PowerShot SD630" => 5.76, # 1/2.5" - "Canon Canon PowerShot SD700 IS" => 5.76, # 1/2.5" - "Canon Canon PowerShot SD750" => 5.75, # 1/2.5" - "Canon Canon PowerShot SD800 IS" => 5.76, # 1/2.5" - "Canon EOS 300D DIGITAL" => 22.66, - "Canon EOS DIGITAL REBEL" => 22.66, - "Canon PowerShot A510" => 5.76, # 1/2.5" ??? - "Canon PowerShot S30" => 7.176, # 1/1.8" - "CASIO COMPUTER CO.,LTD. EX-S500" => 5.76, # 1/2.5" - "CASIO COMPUTER CO.,LTD. EX-Z1000" => 7.716, # 1/1.8" - "CASIO COMPUTER CO.,LTD EX-Z30" => 5.76, # 1/2.5 " - "CASIO COMPUTER CO.,LTD. EX-Z600" => 5.76, # 1/2.5" - "CASIO COMPUTER CO.,LTD. EX-Z60" => 7.176, # 1/1.8" - "CASIO COMPUTER CO.,LTD EX-Z750" => 7.176, # 1/1.8" - "CASIO COMPUTER CO.,LTD. EX-Z850" => 7.176, - "EASTMAN KODAK COMPANY KODAK CX7330 ZOOM DIGITAL CAMERA" => 5.27, # 1/2.7" - "EASTMAN KODAK COMPANY KODAK CX7530 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - "EASTMAN KODAK COMPANY KODAK DX3900 ZOOM DIGITAL CAMERA" => 7.176, # 1/1.8" - "EASTMAN KODAK COMPANY KODAK DX4900 ZOOM DIGITAL CAMERA" => 7.176, # 1/1.8" - "EASTMAN KODAK COMPANY KODAK DX6340 ZOOM DIGITAL CAMERA" => 5.27, # 1/2.7" - "EASTMAN KODAK COMPANY KODAK DX6490 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - "EASTMAN KODAK COMPANY KODAK DX7630 ZOOM DIGITAL CAMERA" => 7.176, # 1/1.8" - "EASTMAN KODAK COMPANY KODAK Z650 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - "EASTMAN KODAK COMPANY KODAK Z700 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - "EASTMAN KODAK COMPANY KODAK Z740 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - "EASTMAN KODAK COMPANY KODAK Z740 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" ? - "FUJIFILM FinePix2600Zoom" => 5.27, # 1/2.7" - "FUJIFILM FinePix40i" => 7.600, # 1/1.7" - "FUJIFILM FinePix A310" => 5.27, # 1/2.7" - "FUJIFILM FinePix A330" => 5.27, # 1/2.7" - "FUJIFILM FinePix A600" => 7.600, # 1/1.7" - "FUJIFILM FinePix E500" => 5.76, # 1/2.5" - "FUJIFILM FinePix E510" => 5.76, # 1/2.5" - "FUJIFILM FinePix E550" => 7.600, # 1/1.7" - "FUJIFILM FinePix E900" => 7.78, # 1/1.6" - "FUJIFILM FinePix F10" => 7.600, # 1/1.7" - "FUJIFILM FinePix F30" => 7.600, # 1/1.7" - "FUJIFILM FinePix F450" => 5.76, # 1/2.5" - "FUJIFILM FinePix F601 ZOOM" => 7.600, # 1/1.7" - "FUJIFILM FinePix S3Pro" => 23.0, - "FUJIFILM FinePix S5000" => 5.27, # 1/2.7" - "FUJIFILM FinePix S5200" => 5.76, # 1/2.5" - "FUJIFILM FinePix S5500" => 5.27, # 1/2.7" - "FUJIFILM FinePix S6500fd" => 7.600, # 1/1.7" - "FUJIFILM FinePix S7000" => 7.600, # 1/1.7" - "FUJIFILM FinePix Z2" => 5.76, # 1/2.5" - "Hewlett-Packard hp 635 Digital Camera" => 4.54, # 1/3.2" - "Hewlett-Packard hp PhotoSmart 43x series" => 5.27, # 1/2.7" - "Hewlett-Packard HP PhotoSmart 618 (V1.1)" => 5.27, # 1/2.7" - "Hewlett-Packard HP PhotoSmart C945 (V01.61)" => 7.176, # 1/1.8" - "Hewlett-Packard HP PhotoSmart R707 (V01.00)" => 7.176, # 1/1.8" - "KONICA MILOLTA DYNAX 5D" => 23.5, - "Konica Minolta Camera, Inc. DiMAGE A2" => 8.80, # 2/3" - "KONICA MINOLTA CAMERA, Inc. DiMAGE G400" => 5.76, # 1/2.5" - "Konica Minolta Camera, Inc. DiMAGE Z2" => 5.76, # 1/2.5" - "KONICA MINOLTA DiMAGE A200" => 8.80, # 2/3" - "KONICA MINOLTA DiMAGE X1" => 7.176, # 1/1.8" - "KONICA MINOLTA DYNAX 5D" => 23.5, - "Minolta Co., Ltd. DiMAGE F100" => 7.176, # 1/2.7" - "Minolta Co., Ltd. DiMAGE Xi" => 5.27, # 1/2.7" - "Minolta Co., Ltd. DiMAGE Xt" => 5.27, # 1/2.7" - "Minolta Co., Ltd. DiMAGE Z1" => 5.27, # 1/2.7" - "NIKON COOLPIX L3" => 5.76, # 1/2.5" - "NIKON COOLPIX P2" => 7.176, # 1/1.8" - "NIKON COOLPIX S4" => 5.76, # 1/2.5" - "NIKON COOLPIX S7c" => 5.76, # 1/2.5" - "NIKON CORPORATION NIKON D100" => 23.7, - "NIKON CORPORATION NIKON D1" => 23.7, - "NIKON CORPORATION NIKON D1H" => 23.7, - "NIKON CORPORATION NIKON D200" => 23.6, - "NIKON CORPORATION NIKON D2H" => 23.3, - "NIKON CORPORATION NIKON D2X" => 23.7, - "NIKON CORPORATION NIKON D40" => 23.7, - "NIKON CORPORATION NIKON D50" => 23.7, - "NIKON CORPORATION NIKON D60" => 23.6, - "NIKON CORPORATION NIKON D70" => 23.7, - "NIKON CORPORATION NIKON D70s" => 23.7, - "NIKON CORPORATION NIKON D80" => 23.6, - "NIKON CORPORATION NIKON D700" => 36.0, - "NIKON E2500" => 5.27, # 1/2.7" - "NIKON E2500" => 5.27, # 1/2.7" - "NIKON E3100" => 5.27, # 1/2.7" - "NIKON E3200" => 5.27, - "NIKON E3700" => 5.27, # 1/2.7" - "NIKON E4200" => 7.176, # 1/1.8" - "NIKON E4300" => 7.18, - "NIKON E4500" => 7.176, # 1/1.8" - "NIKON E4600" => 5.76, # 1/2.5" - "NIKON E5000" => 8.80, # 2/3" - "NIKON E5200" => 7.176, # 1/1.8" - "NIKON E5400" => 7.176, # 1/1.8" - "NIKON E5600" => 5.76, # 1/2.5" - "NIKON E5700" => 8.80, # 2/3" - "NIKON E5900" => 7.176, # 1/1.8" - "NIKON E7600" => 7.176, # 1/1.8" - "NIKON E775" => 5.27, # 1/2.7" - "NIKON E7900" => 7.176, # 1/1.8" - "NIKON E7900" => 7.176, # 1/1.8" - "NIKON E8800" => 8.80, # 2/3" - "NIKON E990" => 7.176, # 1/1.8" - "NIKON E995" => 7.176, # 1/1.8" - "NIKON S1" => 5.76, # 1/2.5" - "Nokia N80" => 5.27, # 1/2.7" - "Nokia N80" => 5.27, # 1/2.7" - "Nokia N93" => 4.536, # 1/3.1" - "Nokia N95" => 5.7, # 1/2.7" - "OLYMPUS CORPORATION C-5000Z" => 7.176, # 1/1.8" - "OLYMPUS CORPORATION C5060WZ" => 7.176, # 1/1.8" - "OLYMPUS CORPORATION C750UZ" => 5.27, # 1/2.7" - "OLYMPUS CORPORATION C765UZ" => 5.76, # 1//2.5" - "OLYMPUS CORPORATION C8080WZ" => 8.80, # 2/3" - "OLYMPUS CORPORATION X250,D560Z,C350Z" => 5.76, # 1/2.5" - "OLYMPUS CORPORATION X-3,C-60Z" => 7.176, # 1.8" - "OLYMPUS CORPORATION X400,D580Z,C460Z" => 5.27, # 1/2.7" - "OLYMPUS IMAGING CORP. E-500" => 17.3, # 4/3? - "OLYMPUS IMAGING CORP. E-510" => 17.3, - "OLYMPUS IMAGING CORP. FE115,X715" => 5.76, # 1/2.5" - "OLYMPUS IMAGING CORP. SP310" => 7.176, # 1/1.8" - "OLYMPUS IMAGING CORP. SP510UZ" => 5.75, # 1/2.5" - "OLYMPUS IMAGING CORP. SP550UZ" => 5.76, # 1/2.5" - "OLYMPUS IMAGING CORP. uD600,S600" => 5.75, # 1/2.5" - "OLYMPUS_IMAGING_CORP. X450,D535Z,C370Z" => 5.27, # 1/2.7" - "OLYMPUS IMAGING CORP. X550,D545Z,C480Z" => 5.76, # 1/2.5" - "OLYMPUS OPTICAL CO.,LTD C2040Z" => 6.40, # 1/2" - "OLYMPUS OPTICAL CO.,LTD C211Z" => 5.27, # 1/2.7" - "OLYMPUS OPTICAL CO.,LTD C2Z,D520Z,C220Z" => 4.54, # 1/3.2" - "OLYMPUS OPTICAL CO.,LTD C3000Z" => 7.176, # 1/1.8" - "OLYMPUS OPTICAL CO.,LTD C300Z,D550Z" => 5.4, - "OLYMPUS OPTICAL CO.,LTD C4100Z,C4000Z" => 7.176, # 1/1.8" - "OLYMPUS OPTICAL CO.,LTD C750UZ" => 5.27, # 1/2.7" - "OLYMPUS OPTICAL CO.,LTD X-2,C-50Z" => 7.176, # 1/1.8" - "OLYMPUS SP550UZ" => 5.76, # 1/2.5" - "OLYMPUS X100,D540Z,C310Z" => 5.27, # 1/2.7" - "Panasonic DMC-FX01" => 5.76, # 1/2.5" - "Panasonic DMC-FX07" => 5.75, # 1/2.5" - "Panasonic DMC-FX9" => 5.76, # 1/2.5" - "Panasonic DMC-FZ20" => 5.760, # 1/2.5" - "Panasonic DMC-FZ2" => 4.54, # 1/3.2" - "Panasonic DMC-FZ30" => 7.176, # 1/1.8" - "Panasonic DMC-FZ50" => 7.176, # 1/1.8" - "Panasonic DMC-FZ5" => 5.760, # 1/2.5" - "Panasonic DMC-FZ7" => 5.76, # 1/2.5" - "Panasonic DMC-LC1" => 8.80, # 2/3" - "Panasonic DMC-LC33" => 5.760, # 1/2.5" - "Panasonic DMC-LX1" => 8.50, # 1/6.5" - "Panasonic DMC-LZ2" => 5.76, # 1/2.5" - "Panasonic DMC-TZ1" => 5.75, # 1/2.5" - "Panasonic DMC-TZ3" => 5.68, # 1/2.35" - "Panasonic DMC-TZ5" => 6.12, # 1/2.33" - "PENTAX Corporation PENTAX *ist DL" => 23.5, - "PENTAX Corporation PENTAX *ist DS2" => 23.5, - "PENTAX Corporation PENTAX *ist DS" => 23.5, - "PENTAX Corporation PENTAX K100D" => 23.5, - "PENTAX Corporation PENTAX Optio 450" => 7.176, # 1/1.8" - "PENTAX Corporation PENTAX Optio 550" => 7.176, # 1/1.8" - "PENTAX Corporation PENTAX Optio E10" => 5.76, # 1/2.5" - "PENTAX Corporation PENTAX Optio S40" => 5.76, # 1/2.5" - "PENTAX Corporation PENTAX Optio S4" => 5.76, # 1/2.5" - "PENTAX Corporation PENTAX Optio S50" => 5.76, # 1/2.5" - "PENTAX Corporation PENTAX Optio S5i" => 5.76, # 1/2.5" - "PENTAX Corporation PENTAX Optio S5z" => 5.76, # 1/2.5" - "PENTAX Corporation PENTAX Optio SV" => 5.76, # 1/2.5" - "PENTAX Corporation PENTAX Optio WP" => 5.75, # 1/2.5" - "RICOH CaplioG3 modelM" => 5.27, # 1/2.7" - "RICOH Caplio GX" => 7.176, # 1/1.8" - "RICOH Caplio R30" => 5.75, # 1/2.5" - "Samsung Digimax 301" => 5.27, # 1/2.7" - "Samsung Techwin " => 5.76, # 1/2.5" - "SAMSUNG TECHWIN Pro 815" => 8.80, # 2/3" - "SONY DSC-F828" => 8.80, # 2/3" - "SONY DSC-N12" => 7.176, # 1/1.8" - "SONY DSC-P100" => 7.176, # 1/1.8" - "SONY DSC-P10" => 7.176, # 1/1.8" - "SONY DSC-P12" => 7.176, # 1/1.8" - "SONY DSC-P150" => 7.176, # 1/1.8" - "SONY DSC-P200" => 7.176, # 1/1.8"); - "SONY DSC-P52" => 5.27, # 1/2.7" - "SONY DSC-P72" => 5.27, # 1/2.7" - "SONY DSC-P73" => 5.27, - "SONY DSC-P8" => 5.27, # 1/2.7" - "SONY DSC-R1" => 21.5, - "SONY DSC-S40" => 5.27, # 1/2.7" - "SONY DSC-S600" => 5.760, # 1/2.5" - "SONY DSC-T9" => 7.18, - "SONY DSC-V1" => 7.176, # 1/1.8" - "SONY DSC-W1" => 7.176, # 1/1.8" - "SONY DSC-W30" => 5.760, # 1/2.5" - "SONY DSC-W50" => 5.75, # 1/2.5" - "SONY DSC-W5" => 7.176, # 1/1.8" - "SONY DSC-W7" => 7.176, # 1/1.8" - "SONY DSC-W80" => 5.75, # 1/2.5" -); diff --git a/ccd_defs.rb b/ccd_defs.rb deleted file mode 100644 index 3e7f1486b..000000000 --- a/ccd_defs.rb +++ /dev/null @@ -1,278 +0,0 @@ -#!/usr/bin/ruby - -$ccd_widths = { - :"Asahi Optical Co.,Ltd. PENTAX Optio330RS" => 7.176, # 1/1.8" - :"Canon Canon DIGITAL IXUS 400" => 7.176, # 1/1.8" - :"Canon Canon DIGITAL IXUS 40" => 5.76, # 1/2.5" - :"Canon Canon DIGITAL IXUS 430" => 7.176, # 1/1.8" - :"Canon Canon DIGITAL IXUS 500" => 7.176, # 1/1.8" - :"Canon Canon DIGITAL IXUS 50" => 5.76, # 1/2.5" - :"Canon Canon DIGITAL IXUS 55" => 5.76, # 1/2.5" - :"Canon Canon DIGITAL IXUS 60" => 5.76, # 1/2.5" - :"Canon Canon DIGITAL IXUS 65" => 5.76, # 1/2.5" - :"Canon Canon DIGITAL IXUS 700" => 7.176, # 1/1.8" - :"Canon Canon DIGITAL IXUS 750" => 7.176, # 1/1.8" - :"Canon Canon DIGITAL IXUS 800 IS" => 5.76, # 1/2.5" - :"Canon Canon DIGITAL IXUS II" => 5.27, # 1/2.7" - :"Canon Canon DIGITAL IXUS 430" => 6.18, # 1/2.3" - :"Canon Canon EOS 10D" => 22.7, - :"Canon Canon EOS-1D Mark II" => 28.7, - :"Canon Canon EOS-1Ds Mark II" => 35.95, - :"Canon Canon EOS 20D" => 22.5, - :"Canon Canon EOS 20D" => 22.5, - :"Canon Canon EOS 300D DIGITAL" => 22.66, - :"Canon Canon EOS 30D" => 22.5, - :"Canon Canon EOS 350D DIGITAL" => 22.2, - :"Canon Canon EOS 400D DIGITAL" => 22.2, - :"Canon Canon EOS 40D" => 22.2, - :"Canon Canon EOS 5D" => 35.8, - :"Canon Canon EOS DIGITAL REBEL" => 22.66, - :"Canon Canon EOS DIGITAL REBEL XT" => 22.2, - :"Canon Canon EOS DIGITAL REBEL XTi" => 22.2, - :"Canon Canon EOS Kiss Digital" => 22.66, - :"Canon Canon IXY DIGITAL 600" => 7.176, # 1/1.8" - :"Canon Canon PowerShot A10" => 5.23, # 1/1.8" - :"Canon Canon PowerShot A20" => 7.176, # 1/1.8" - :"Canon Canon PowerShot A400" => 4.54, # 1/3.2" - :"Canon Canon PowerShot A40" => 5.27, # 1/2.7" - :"Canon Canon PowerShot A510" => 5.76, # 1/2.5" - :"Canon Canon PowerShot A520" => 5.76, # 1/2.5" - :"Canon Canon PowerShot A530" => 5.76, # 1/2.5" - :"Canon Canon PowerShot A60" => 5.27, # 1/2.7" - :"Canon Canon PowerShot A620" => 7.176, # 1/1.8" - :"Canon Canon PowerShot A630" => 7.176, # 1/1.8" - :"Canon Canon PowerShot A640" => 7.176, # 1/1.8" - :"Canon Canon PowerShot A700" => 5.76, # 1/2.5" - :"Canon Canon PowerShot A70" => 5.27, # 1/2.7" - :"Canon Canon PowerShot A710 IS" => 5.76, # 1/2.5" - :"Canon Canon PowerShot A75" => 5.27, # 1/2.7" - :"Canon Canon PowerShot A80" => 7.176, # 1/1.8" - :"Canon Canon PowerShot A85" => 5.27, # 1/2.7" - :"Canon Canon PowerShot A95" => 7.176, # 1/1.8" - :"Canon Canon PowerShot G1" => 7.176, # 1/1.8" - :"Canon Canon PowerShot G2" => 7.176, # 1/1.8" - :"Canon Canon PowerShot G3" => 7.176, # 1/1.8" - :"Canon Canon PowerShot G5" => 7.176, # 1/1.8" - :"Canon Canon PowerShot G6" => 7.176, # 1/1.8" - :"Canon Canon PowerShot G7" => 7.176, # 1/1.8" - :"Canon Canon PowerShot G9" => 7.600, # 1/1.7" - :"Canon Canon PowerShot Pro1" => 8.8, # 2/3" - :"Canon Canon PowerShot S110" => 5.27, # 1/2.7" - :"Canon Canon PowerShot S1 IS" => 5.27, # 1/2.7" - :"Canon Canon PowerShot S200" => 5.27, # 1/2.7" - :"Canon Canon PowerShot S2 IS" => 5.76, # 1/2.5" - :"Canon Canon PowerShot S30" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S3 IS" => 5.76, # 1/2.5" - :"Canon Canon PowerShot S400" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S40" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S410" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S45" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S500" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S50" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S60" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S70" => 7.176, # 1/1.8" - :"Canon Canon PowerShot S80" => 7.176, # 1/1.8" - :"Canon Canon PowerShot SD1000" => 5.75, # 1/2.5" - :"Canon Canon PowerShot SD100" => 5.27, # 1/2.7" - :"Canon Canon PowerShot SD10" => 5.75, # 1/2.5" - :"Canon Canon PowerShot SD110" => 5.27, # 1/2.7" - :"Canon Canon PowerShot SD200" => 5.76, # 1/2.5" - :"Canon Canon PowerShot SD300" => 5.76, # 1/2.5" - :"Canon Canon PowerShot SD400" => 5.76, # 1/2.5" - :"Canon Canon PowerShot SD450" => 5.76, # 1/2.5" - :"Canon Canon PowerShot SD500" => 7.176, # 1/1.8" - :"Canon Canon PowerShot SD550" => 7.176, # 1/1.8" - :"Canon Canon PowerShot SD600" => 5.76, # 1/2.5" - :"Canon Canon PowerShot SD630" => 5.76, # 1/2.5" - :"Canon Canon PowerShot SD700 IS" => 5.76, # 1/2.5" - :"Canon Canon PowerShot SD750" => 5.75, # 1/2.5" - :"Canon Canon PowerShot SD800 IS" => 5.76, # 1/2.5" - :"Canon EOS 300D DIGITAL" => 22.66, - :"Canon EOS DIGITAL REBEL" => 22.66, - :"Canon PowerShot A510" => 5.76, # 1/2.5" ??? - :"Canon PowerShot S30" => 7.176, # 1/1.8" - :"CASIO COMPUTER CO.,LTD. EX-S500" => 5.76, # 1/2.5" - :"CASIO COMPUTER CO.,LTD. EX-Z1000" => 7.716, # 1/1.8" - :"CASIO COMPUTER CO.,LTD EX-Z30" => 5.76, # 1/2.5 " - :"CASIO COMPUTER CO.,LTD. EX-Z600" => 5.76, # 1/2.5" - :"CASIO COMPUTER CO.,LTD. EX-Z60" => 7.176, # 1/1.8" - :"CASIO COMPUTER CO.,LTD EX-Z750" => 7.176, # 1/1.8" - :"CASIO COMPUTER CO.,LTD. EX-Z850" => 7.176, - :"EASTMAN KODAK COMPANY KODAK CX7330 ZOOM DIGITAL CAMERA" => 5.27, # 1/2.7" - :"EASTMAN KODAK COMPANY KODAK CX7530 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - :"EASTMAN KODAK COMPANY KODAK DX3900 ZOOM DIGITAL CAMERA" => 7.176, # 1/1.8" - :"EASTMAN KODAK COMPANY KODAK DX4900 ZOOM DIGITAL CAMERA" => 7.176, # 1/1.8" - :"EASTMAN KODAK COMPANY KODAK DX6340 ZOOM DIGITAL CAMERA" => 5.27, # 1/2.7" - :"EASTMAN KODAK COMPANY KODAK DX6490 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - :"EASTMAN KODAK COMPANY KODAK DX7630 ZOOM DIGITAL CAMERA" => 7.176, # 1/1.8" - :"EASTMAN KODAK COMPANY KODAK Z650 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - :"EASTMAN KODAK COMPANY KODAK Z700 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - :"EASTMAN KODAK COMPANY KODAK Z740 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" - :"EASTMAN KODAK COMPANY KODAK Z740 ZOOM DIGITAL CAMERA" => 5.76, # 1/2.5" ? - :"FUJIFILM FinePix2600Zoom" => 5.27, # 1/2.7" - :"FUJIFILM FinePix40i" => 7.600, # 1/1.7" - :"FUJIFILM FinePix A310" => 5.27, # 1/2.7" - :"FUJIFILM FinePix A330" => 5.27, # 1/2.7" - :"FUJIFILM FinePix A600" => 7.600, # 1/1.7" - :"FUJIFILM FinePix E500" => 5.76, # 1/2.5" - :"FUJIFILM FinePix E510" => 5.76, # 1/2.5" - :"FUJIFILM FinePix E550" => 7.600, # 1/1.7" - :"FUJIFILM FinePix E900" => 7.78, # 1/1.6" - :"FUJIFILM FinePix F10" => 7.600, # 1/1.7" - :"FUJIFILM FinePix F30" => 7.600, # 1/1.7" - :"FUJIFILM FinePix F450" => 5.76, # 1/2.5" - :"FUJIFILM FinePix F601 ZOOM" => 7.600, # 1/1.7" - :"FUJIFILM FinePix S3Pro" => 23.0, - :"FUJIFILM FinePix S5000" => 5.27, # 1/2.7" - :"FUJIFILM FinePix S5200" => 5.76, # 1/2.5" - :"FUJIFILM FinePix S5500" => 5.27, # 1/2.7" - :"FUJIFILM FinePix S6500fd" => 7.600, # 1/1.7" - :"FUJIFILM FinePix S7000" => 7.600, # 1/1.7" - :"FUJIFILM FinePix Z2" => 5.76, # 1/2.5" - :"Hewlett-Packard hp 635 Digital Camera" => 4.54, # 1/3.2" - :"Hewlett-Packard hp PhotoSmart 43x series" => 5.27, # 1/2.7" - :"Hewlett-Packard HP PhotoSmart 618 (V1.1)" => 5.27, # 1/2.7" - :"Hewlett-Packard HP PhotoSmart C945 (V01.61)" => 7.176, # 1/1.8" - :"Hewlett-Packard HP PhotoSmart R707 (V01.00)" => 7.176, # 1/1.8" - :"KONICA MILOLTA DYNAX 5D" => 23.5, - :"Konica Minolta Camera, Inc. DiMAGE A2" => 8.80, # 2/3" - :"KONICA MINOLTA CAMERA, Inc. DiMAGE G400" => 5.76, # 1/2.5" - :"Konica Minolta Camera, Inc. DiMAGE Z2" => 5.76, # 1/2.5" - :"KONICA MINOLTA DiMAGE A200" => 8.80, # 2/3" - :"KONICA MINOLTA DiMAGE X1" => 7.176, # 1/1.8" - :"KONICA MINOLTA DYNAX 5D" => 23.5, - :"Minolta Co., Ltd. DiMAGE F100" => 7.176, # 1/2.7" - :"Minolta Co., Ltd. DiMAGE Xi" => 5.27, # 1/2.7" - :"Minolta Co., Ltd. DiMAGE Xt" => 5.27, # 1/2.7" - :"Minolta Co., Ltd. DiMAGE Z1" => 5.27, # 1/2.7" - :"NIKON COOLPIX L3" => 5.76, # 1/2.5" - :"NIKON COOLPIX P2" => 7.176, # 1/1.8" - :"NIKON COOLPIX S4" => 5.76, # 1/2.5" - :"NIKON COOLPIX S7c" => 5.76, # 1/2.5" - :"NIKON CORPORATION NIKON D100" => 23.7, - :"NIKON CORPORATION NIKON D1" => 23.7, - :"NIKON CORPORATION NIKON D1H" => 23.7, - :"NIKON CORPORATION NIKON D200" => 23.6, - :"NIKON CORPORATION NIKON D2H" => 23.3, - :"NIKON CORPORATION NIKON D2X" => 23.7, - :"NIKON CORPORATION NIKON D40" => 23.7, - :"NIKON CORPORATION NIKON D50" => 23.7, - :"NIKON CORPORATION NIKON D60" => 23.6, - :"NIKON CORPORATION NIKON D70" => 23.7, - :"NIKON CORPORATION NIKON D70s" => 23.7, - :"NIKON CORPORATION NIKON D80" => 23.6, - :"NIKON CORPORATION NIKON D700" => 36.0, - :"NIKON E2500" => 5.27, # 1/2.7" - :"NIKON E2500" => 5.27, # 1/2.7" - :"NIKON E3100" => 5.27, # 1/2.7" - :"NIKON E3200" => 5.27, - :"NIKON E3700" => 5.27, # 1/2.7" - :"NIKON E4200" => 7.176, # 1/1.8" - :"NIKON E4300" => 7.18, - :"NIKON E4500" => 7.176, # 1/1.8" - :"NIKON E4600" => 5.76, # 1/2.5" - :"NIKON E5000" => 8.80, # 2/3" - :"NIKON E5200" => 7.176, # 1/1.8" - :"NIKON E5400" => 7.176, # 1/1.8" - :"NIKON E5600" => 5.76, # 1/2.5" - :"NIKON E5700" => 8.80, # 2/3" - :"NIKON E5900" => 7.176, # 1/1.8" - :"NIKON E7600" => 7.176, # 1/1.8" - :"NIKON E775" => 5.27, # 1/2.7" - :"NIKON E7900" => 7.176, # 1/1.8" - :"NIKON E7900" => 7.176, # 1/1.8" - :"NIKON E8800" => 8.80, # 2/3" - :"NIKON E990" => 7.176, # 1/1.8" - :"NIKON E995" => 7.176, # 1/1.8" - :"NIKON S1" => 5.76, # 1/2.5" - :"Nokia N80" => 5.27, # 1/2.7" - :"Nokia N80" => 5.27, # 1/2.7" - :"Nokia N93" => 4.536, # 1/3.1" - :"Nokia N95" => 5.7, # 1/2.7" - :"OLYMPUS CORPORATION C-5000Z" => 7.176, # 1/1.8" - :"OLYMPUS CORPORATION C5060WZ" => 7.176, # 1/1.8" - :"OLYMPUS CORPORATION C750UZ" => 5.27, # 1/2.7" - :"OLYMPUS CORPORATION C765UZ" => 5.76, # 1//2.5" - :"OLYMPUS CORPORATION C8080WZ" => 8.80, # 2/3" - :"OLYMPUS CORPORATION X250,D560Z,C350Z" => 5.76, # 1/2.5" - :"OLYMPUS CORPORATION X-3,C-60Z" => 7.176, # 1.8" - :"OLYMPUS CORPORATION X400,D580Z,C460Z" => 5.27, # 1/2.7" - :"OLYMPUS IMAGING CORP. E-500" => 17.3, # 4/3? - :"OLYMPUS IMAGING CORP. E-510" => 17.3, - :"OLYMPUS IMAGING CORP. FE115,X715" => 5.76, # 1/2.5" - :"OLYMPUS IMAGING CORP. SP310" => 7.176, # 1/1.8" - :"OLYMPUS IMAGING CORP. SP510UZ" => 5.75, # 1/2.5" - :"OLYMPUS IMAGING CORP. SP550UZ" => 5.76, # 1/2.5" - :"OLYMPUS IMAGING CORP. uD600,S600" => 5.75, # 1/2.5" - :"OLYMPUS_IMAGING_CORP. X450,D535Z,C370Z" => 5.27, # 1/2.7" - :"OLYMPUS IMAGING CORP. X550,D545Z,C480Z" => 5.76, # 1/2.5" - :"OLYMPUS OPTICAL CO.,LTD C2040Z" => 6.40, # 1/2" - :"OLYMPUS OPTICAL CO.,LTD C211Z" => 5.27, # 1/2.7" - :"OLYMPUS OPTICAL CO.,LTD C2Z,D520Z,C220Z" => 4.54, # 1/3.2" - :"OLYMPUS OPTICAL CO.,LTD C3000Z" => 7.176, # 1/1.8" - :"OLYMPUS OPTICAL CO.,LTD C300Z,D550Z" => 5.4, - :"OLYMPUS OPTICAL CO.,LTD C4100Z,C4000Z" => 7.176, # 1/1.8" - :"OLYMPUS OPTICAL CO.,LTD C750UZ" => 5.27, # 1/2.7" - :"OLYMPUS OPTICAL CO.,LTD X-2,C-50Z" => 7.176, # 1/1.8" - :"OLYMPUS SP550UZ" => 5.76, # 1/2.5" - :"OLYMPUS X100,D540Z,C310Z" => 5.27, # 1/2.7" - :"Panasonic DMC-FX01" => 5.76, # 1/2.5" - :"Panasonic DMC-FX07" => 5.75, # 1/2.5" - :"Panasonic DMC-FX9" => 5.76, # 1/2.5" - :"Panasonic DMC-FS6" => 5.76, # 1/2.5" - :"Panasonic DMC-FZ20" => 5.760, # 1/2.5" - :"Panasonic DMC-FZ2" => 4.54, # 1/3.2" - :"Panasonic DMC-FZ30" => 7.176, # 1/1.8" - :"Panasonic DMC-FZ50" => 7.176, # 1/1.8" - :"Panasonic DMC-FZ5" => 5.760, # 1/2.5" - :"Panasonic DMC-FZ7" => 5.76, # 1/2.5" - :"Panasonic DMC-LC1" => 8.80, # 2/3" - :"Panasonic DMC-LC33" => 5.760, # 1/2.5" - :"Panasonic DMC-LX1" => 8.50, # 1/6.5" - :"Panasonic DMC-LZ2" => 5.76, # 1/2.5" - :"Panasonic DMC-TZ1" => 5.75, # 1/2.5" - :"Panasonic DMC-TZ3" => 5.68, # 1/2.35" - :"Panasonic DMC-TZ5" => 6.12, # 1/2.33" - :"PENTAX Corporation PENTAX *ist DL" => 23.5, - :"PENTAX Corporation PENTAX *ist DS2" => 23.5, - :"PENTAX Corporation PENTAX *ist DS" => 23.5, - :"PENTAX Corporation PENTAX K100D" => 23.5, - :"PENTAX Corporation PENTAX Optio 450" => 7.176, # 1/1.8" - :"PENTAX Corporation PENTAX Optio 550" => 7.176, # 1/1.8" - :"PENTAX Corporation PENTAX Optio E10" => 5.76, # 1/2.5" - :"PENTAX Corporation PENTAX Optio S40" => 5.76, # 1/2.5" - :"PENTAX Corporation PENTAX Optio S4" => 5.76, # 1/2.5" - :"PENTAX Corporation PENTAX Optio S50" => 5.76, # 1/2.5" - :"PENTAX Corporation PENTAX Optio S5i" => 5.76, # 1/2.5" - :"PENTAX Corporation PENTAX Optio S5z" => 5.76, # 1/2.5" - :"PENTAX Corporation PENTAX Optio SV" => 5.76, # 1/2.5" - :"PENTAX Corporation PENTAX Optio WP" => 5.75, # 1/2.5" - :"RICOH CaplioG3 modelM" => 5.27, # 1/2.7" - :"RICOH Caplio GX" => 7.176, # 1/1.8" - :"RICOH Caplio R30" => 5.75, # 1/2.5" - :"Samsung Digimax 301" => 5.27, # 1/2.7" - :"Samsung Techwin " => 5.76, # 1/2.5" - :"SAMSUNG TECHWIN Pro 815" => 8.80, # 2/3" - :"SONY DSC-F828" => 8.80, # 2/3" - :"SONY DSC-N12" => 7.176, # 1/1.8" - :"SONY DSC-P100" => 7.176, # 1/1.8" - :"SONY DSC-P10" => 7.176, # 1/1.8" - :"SONY DSC-P12" => 7.176, # 1/1.8" - :"SONY DSC-P150" => 7.176, # 1/1.8" - :"SONY DSC-P200" => 7.176, # 1/1.8"); - :"SONY DSC-P52" => 5.27, # 1/2.7" - :"SONY DSC-P72" => 5.27, # 1/2.7" - :"SONY DSC-P73" => 5.27, - :"SONY DSC-P8" => 5.27, # 1/2.7" - :"SONY DSC-R1" => 21.5, - :"SONY DSC-S40" => 5.27, # 1/2.7" - :"SONY DSC-S600" => 5.760, # 1/2.5" - :"SONY DSC-T9" => 7.18, - :"SONY DSC-V1" => 7.176, # 1/1.8" - :"SONY DSC-W1" => 7.176, # 1/1.8" - :"SONY DSC-W30" => 5.760, # 1/2.5" - :"SONY DSC-W50" => 5.75, # 1/2.5" - :"SONY DSC-W5" => 7.176, # 1/1.8" - :"SONY DSC-W7" => 7.176, # 1/1.8" - :"SONY DSC-W80" => 5.75, # 1/2.5" -} diff --git a/ccd_defs_check.py b/ccd_defs_check.py new file mode 100644 index 000000000..efacfee6b --- /dev/null +++ b/ccd_defs_check.py @@ -0,0 +1,25 @@ +#!/usr/bin/python +import sys +import os +import json + +BIN_PATH_ABS = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) + +def get_ccd_widths(): + """Return the CCD Width of the camera listed in the JSON defs file.""" + with open(BIN_PATH_ABS + '/data/ccd_defs.json') as jsonFile: + return json.load(jsonFile) + +try: + ccd_defs = get_ccd_widths() + print "CCD_DEFS compiles OK" + print "Definitions in file: {0}".format(len(ccd_defs)) + exit_code=0 +except IOError as e: + print "I/O error with CCD_DEFS file: {0}".format(e.strerror) + exit_code=255 +except: + print "Error with CCD_DEFS file: {0}".format(sys.exc_info()[1]) + exit_code=255 + +sys.exit(exit_code) diff --git a/code_of_conduct.md b/code_of_conduct.md new file mode 100644 index 000000000..9dd416e9b --- /dev/null +++ b/code_of_conduct.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at `svm at clevelandmetroparks dot com`. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/configure.sh b/configure.sh new file mode 100755 index 000000000..e9ddb6785 --- /dev/null +++ b/configure.sh @@ -0,0 +1,153 @@ +#!/bin/bash + +install() { + ## Set up library paths + + export PYTHONPATH=$RUNPATH/SuperBuild/install/lib/python2.7/dist-packages:$RUNPATH/SuperBuild/src/opensfm:$PYTHONPATH + export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$RUNPATH/SuperBuild/install/lib + + if [[ $2 =~ ^[0-9]+$ ]] ; then + processes=$2 + else + processes=$(nproc) + fi + + ## Before installing + echo "Updating the system" + sudo apt-get update + + sudo add-apt-repository -y ppa:ubuntugis/ppa + sudo apt-get update + + echo "Installing Required Requisites" + sudo apt-get install -y -qq build-essential \ + git \ + cmake \ + python-pip \ + libgdal-dev \ + gdal-bin \ + libgeotiff-dev \ + pkg-config \ + libjsoncpp-dev + + echo "Getting CMake 3.1 for MVS-Texturing" + sudo apt-get install -y software-properties-common python-software-properties + sudo add-apt-repository -y ppa:george-edison55/cmake-3.x + sudo apt-get update -y + sudo apt-get install -y --only-upgrade cmake + + echo "Installing OpenCV Dependencies" + sudo apt-get install -y -qq libgtk2.0-dev \ + libavcodec-dev \ + libavformat-dev \ + libswscale-dev \ + python-dev \ + python-numpy \ + libtbb2 \ + libtbb-dev \ + libjpeg-dev \ + libpng-dev \ + libtiff-dev \ + libjasper-dev \ + libflann-dev \ + libproj-dev \ + libxext-dev \ + liblapack-dev \ + libeigen3-dev \ + libvtk6-dev + + echo "Removing libdc1394-22-dev due to python opencv issue" + sudo apt-get remove libdc1394-22-dev + + ## Installing OpenSfM Requisites + echo "Installing OpenSfM Dependencies" + sudo apt-get install -y -qq python-networkx \ + libgoogle-glog-dev \ + libsuitesparse-dev \ + libboost-filesystem-dev \ + libboost-iostreams-dev \ + libboost-regex-dev \ + libboost-python-dev \ + libboost-date-time-dev \ + libboost-thread-dev \ + python-pyproj + + sudo pip install -U PyYAML \ + exifread \ + gpxpy \ + xmltodict \ + appsettings \ + loky + + echo "Installing CGAL dependencies" + sudo apt-get install -y -qq libgmp-dev libmpfr-dev + + echo "Installing Ecto Dependencies" + sudo pip install -U catkin-pkg + sudo apt-get install -y -qq python-empy \ + python-nose \ + python-pyside + + echo "Installing OpenDroneMap Dependencies" + sudo apt-get install -y -qq python-pyexiv2 \ + python-scipy \ + libexiv2-dev \ + liblas-bin + + echo "Installing lidar2dems Dependencies" + sudo apt-get install -y -qq swig2.0 \ + python-wheel \ + libboost-log-dev + + sudo pip install -U https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz + + echo "Compiling SuperBuild" + cd ${RUNPATH}/SuperBuild + mkdir -p build && cd build + cmake .. && make -j$processes + + echo "Compiling build" + cd ${RUNPATH} + mkdir -p build && cd build + cmake .. && make -j$processes + + echo "Configuration Finished" +} + +uninstall() { + echo "Removing SuperBuild and build directories" + cd ${RUNPATH}/SuperBuild + rm -rfv build src download install + cd ../ + rm -rfv build +} + +reinstall() { + echo "Reinstalling ODM modules" + uninstall + install +} + +usage() { + echo "Usage:" + echo "bash configure.sh [nproc]" + echo "Subcommands:" + echo " install" + echo " Installs all dependencies and modules for running OpenDroneMap" + echo " reinstall" + echo " Removes SuperBuild and build modules, then re-installs them. Note this does not update OpenDroneMap to the latest version. " + echo " uninstall" + echo " Removes SuperBuild and build modules. Does not uninstall dependencies" + echo " help" + echo " Displays this message" + echo "[nproc] is an optional argument that can set the number of processes for the make -j tag. By default it uses $(nproc)" +} + +if [[ $1 =~ ^(install|reinstall|uninstall|usage)$ ]]; then + RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + "$1" +else + echo "Invalid instructions." >&2 + usage + exit 1 +fi diff --git a/contrib/blender/README.md b/contrib/blender/README.md new file mode 100644 index 000000000..74b93008f --- /dev/null +++ b/contrib/blender/README.md @@ -0,0 +1,41 @@ +# Blender scripts +# odm_photo +Renders photos from ODM generated texture models. +Currently can produce 360 panoramic photos and 360 3D panoramic (VR) photos. +NB: the default resolution for 360 photos is 6000x3000 (maximum supported by Facebook). + +## Requirements +* Blender +* ExifTool (must be on your PATH) + +## Usage +To generate a 360 panoramic photo: + + blender -b photo_360.blend --python odm_photo.py -- + +Output is `/odm_photo/odm_photo_360.jpg`. + +To generate a 360 3D panoramic photo: + + blender -b photo_vr.blend --python odm_photo.py -- + +Output is `/odm_photo/odm_photo_vr_L.jpg` and `/odm_photo/odm_photo_vr_R.jpg`. + +**NB: argument order matters!** + +# odm_video +Renders videos from ODM generated texture models. +Currently can produce 360 panoramic videos. +NB: the default resolution is 4096x2048 (maximum supported by Facebook). + +## Requirements +* Blender +* Python 2.7 (must be on your PATH) +* Spatial Media Metadata Injector (https://github.com/google/spatial-media/tree/master/spatialmedia) (place in `spatialmedia` subdirectory) + +## Usage +To generate a 360 panoramic photo: + + blender -b photo_360.blend --python odm_video.py -- + +Output is `/odm_video/odm_video_360.mp4`. diff --git a/contrib/blender/common.py b/contrib/blender/common.py new file mode 100644 index 000000000..e37749828 --- /dev/null +++ b/contrib/blender/common.py @@ -0,0 +1,45 @@ +import bpy +import materials_utils + +def loadMesh(file): + + bpy.utils.register_module('materials_utils') + + bpy.ops.import_scene.obj(filepath=file, + axis_forward='Y', + axis_up='Z') + + bpy.ops.xps_tools.convert_to_cycles_all() + + model = bpy.data.objects[-1] + minX = float('inf') + maxX = float('-inf') + minY = float('inf') + maxY = float('-inf') + minZ = float('inf') + maxZ = float('-inf') + for coord in model.bound_box: + x = coord[0] + y = coord[1] + z = coord[2] + minX = min(x, minX) + maxX = max(x, maxX) + minY = min(y, minY) + maxY = max(y, maxY) + minZ = min(z, minZ) + maxZ = max(z, maxZ) + + model.location[2] += (maxZ - minZ)/2 + + surfaceShaderType = 'ShaderNodeEmission' + surfaceShaderName = 'Emission' + + for m in bpy.data.materials: + nt = m.node_tree + nt.nodes.remove(nt.nodes['Color Mult']) + nt.nodes.remove(nt.nodes['Diffuse BSDF']) + nt.nodes.new(surfaceShaderType) + nt.links.new(nt.nodes['Material Output'].inputs[0], + nt.nodes[surfaceShaderName].outputs[0]) + nt.links.new(nt.nodes[surfaceShaderName].inputs[0], + nt.nodes['Diffuse Texture'].outputs[0]) diff --git a/contrib/blender/odm_photo.py b/contrib/blender/odm_photo.py new file mode 100644 index 000000000..b63cdcf37 --- /dev/null +++ b/contrib/blender/odm_photo.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +# Renders a photo. +# ExifTool must be on your PATH. +# To generate a 360 panoramic photo: +# blender -b photo_360.blend --python odm_photo.py -- +# To generate a 360 3D panoramic photo: +# blender -b photo_vr.blend --python odm_photo.py -- +# NB: argument order matters! + +import sys +import bpy +import subprocess +from common import loadMesh + + +def main(): + + if len(sys.argv) < 5 or sys.argv[-2] != '--': + sys.exit('Please provide the ODM project path.') + + projectHome = sys.argv[-1] + + loadMesh(projectHome + + '/odm_texturing/odm_textured_model_geo.obj') + + blendName = bpy.path.display_name_from_filepath(bpy.data.filepath) + fileName = projectHome + '/odm_photo/odm_' + blendName + render = bpy.data.scenes['Scene'].render + render.filepath = fileName + bpy.ops.render.render(write_still=True) + + width = render.resolution_x + height = render.resolution_y + if(render.use_multiview): + writeExif(fileName+render.views[0].file_suffix+'.jpg', width, height) + writeExif(fileName+render.views[1].file_suffix+'.jpg', width, height) + else: + writeExif(fileName+'.jpg', width, height) + + +def writeExif(fileName, width, height): + w = str(width) + h = str(height) + + subprocess.run(['exiftool', + '-overwrite_original', + '-CroppedAreaImageWidthPixels=' + w, + '-CroppedAreaImageHeightPixels=' + h, + '-FullPanoWidthPixels=' + w, + '-FullPanoHeightPixels=' + h, + '-CroppedAreaLeftPixels=0', + '-CroppedAreaTopPixels=0', + '-ProjectionType=equirectangular', + '-UsePanoramaViewer=True', + '-PoseHeadingDegrees=0', + '-LargestValidInteriorRectLeft=0', + '-LargestValidInteriorRectTop=0', + '-LargestValidInteriorRectWidth=' + w, + '-LargestValidInteriorRectHeight=' + h, + fileName]) + + +if __name__ == '__main__': + main() diff --git a/contrib/blender/odm_video.py b/contrib/blender/odm_video.py new file mode 100644 index 000000000..6faf8b15f --- /dev/null +++ b/contrib/blender/odm_video.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +# Renders a video. +# To generate a 360 panoramic video: +# blender -b photo_360.blend --python odm_video.py -- + +import sys +import subprocess +import os +import bpy +from common import loadMesh + + +def main(): + + if len(sys.argv) < 7 or sys.argv[-4] != '--': + sys.exit('Please provide the ODM project path, camera waypoints (xyz format), and number of frames.') + + projectHome = sys.argv[-3] + waypointFile = sys.argv[-2] + numFrames = int(sys.argv[-1]) + + loadMesh(projectHome + + '/odm_texturing/odm_textured_model_geo.obj') + + waypoints = loadWaypoints(waypointFile) + numWaypoints = len(waypoints) + + scene = bpy.data.scenes['Scene'] + + # create path thru waypoints + curve = bpy.data.curves.new(name='CameraPath', type='CURVE') + curve.dimensions = '3D' + curve.twist_mode = 'Z_UP' + nurbs = curve.splines.new('NURBS') + nurbs.points.add(numWaypoints-1) + weight = 1 + for i in range(numWaypoints): + nurbs.points[i].co[0] = waypoints[i][0] + nurbs.points[i].co[1] = waypoints[i][1] + nurbs.points[i].co[2] = waypoints[i][2] + nurbs.points[i].co[3] = weight + nurbs.use_endpoint_u = True + path = bpy.data.objects.new(name='CameraPath', object_data=curve) + scene.objects.link(path) + + camera = bpy.data.objects['Camera'] + camera.location[0] = 0 + camera.location[1] = 0 + camera.location[2] = 0 + followPath = camera.constraints.new(type='FOLLOW_PATH') + followPath.name = 'CameraFollowPath' + followPath.target = path + followPath.use_curve_follow = True + animateContext = bpy.context.copy() + animateContext['constraint'] = followPath + bpy.ops.constraint.followpath_path_animate(animateContext, + constraint='CameraFollowPath', + frame_start=0, + length=numFrames) + + blendName = bpy.path.display_name_from_filepath(bpy.data.filepath) + fileName = projectHome + '/odm_video/odm_' + blendName.replace('photo', 'video') + scene.frame_start = 0 + scene.frame_end = numFrames + render = scene.render + render.filepath = fileName + '.mp4' + render.image_settings.file_format = 'FFMPEG' + if(render.use_multiview): + render.image_settings.stereo_3d_format.display_mode = 'TOPBOTTOM' + render.image_settings.views_format = 'STEREO_3D' + render.views[0].file_suffix = '' + format3d = 'top-bottom' + else: + width = render.resolution_x + height = render.resolution_y + format3d = 'none' + render.resolution_x = 4096 + render.resolution_y = 2048 + + render.ffmpeg.audio_codec = 'AAC' + render.ffmpeg.codec = 'H264' + render.ffmpeg.format = 'MPEG4' + render.ffmpeg.video_bitrate = 45000 + bpy.ops.render.render(animation=True) + + writeMetadata(fileName+'.mp4', format3d) + + +def loadWaypoints(filename): + waypoints = [] + with open(filename) as f: + for line in f: + xyz = line.split() + waypoints.append((float(xyz[0]), float(xyz[1]), float(xyz[2]))) + return waypoints + + +def writeMetadata(filename, format3d): + subprocess.run(['python', + 'spatialmedia', + '-i', + '--stereo='+format3d, + filename, + filename+'.injected']) + # check metadata injector was succesful + if os.path.exists(filename+'.injected'): + os.remove(filename) + os.rename(filename+'.injected', filename) + + +if __name__ == '__main__': + main() diff --git a/contrib/blender/photo_360.blend b/contrib/blender/photo_360.blend new file mode 100644 index 000000000..6e2769479 Binary files /dev/null and b/contrib/blender/photo_360.blend differ diff --git a/contrib/blender/photo_vr.blend b/contrib/blender/photo_vr.blend new file mode 100644 index 000000000..00f8e7ff4 Binary files /dev/null and b/contrib/blender/photo_vr.blend differ diff --git a/contrib/grass/README.md b/contrib/grass/README.md new file mode 100644 index 000000000..9700f8adf --- /dev/null +++ b/contrib/grass/README.md @@ -0,0 +1,16 @@ +# GRASS scripts +# odm_grass +Generates contour and textured relief maps. + +## Requirements +* Compile and install GRASS 7 version or higher, https://grasswiki.osgeo.org/wiki/Compile_and_Install +* Environment variables: + * PYTHONHOME set to the location of Python + * PYTHONPATH set to the location of GRASS Python libs + * PATH includes GRASS bin and lib directories + * GISBASE set to the location of GRASS + +## Usage + python odm_grass.py + +Output is `/odm_georeferencing/odm_contour.shp` and `/odm_orthophoto/odm_relief.tif`. diff --git a/contrib/grass/odm_grass.py b/contrib/grass/odm_grass.py new file mode 100644 index 000000000..78bb61137 --- /dev/null +++ b/contrib/grass/odm_grass.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python + +# To run, set the following env variables: +# PYTHONHOME location of Python +# PYTHONPATH location of GRASS Python libs +# PATH include GRASS bin and lib +# GISBASE location of GRASS + +import os +import sys +import grass.script as gscript +import grass.script.core +import grass.script.setup + +rsurfName = 'odm_rsurf' +contourName = 'odm_contour' +orthophotoName = 'odm_orthophoto' +reliefName = 'odm_relief' +shadedReliefName = reliefName + '_shaded' + +overwrite = True + + +def main(): + if len(sys.argv) < 2: + sys.exit('Please provide the ODM project path.') + + projectHome = sys.argv[1] + + gisdb = projectHome+'/grassdata' + location = 'odm' + gisrc = gscript.setup.init(os.environ['GISBASE'], gisdb, location) + + # get srs and initial extents + with open(projectHome+'/odm_georeferencing/coords.txt') as f: + srs = f.readline().split() + mean = f.readline().split() + meanX = float(mean[0]) + meanY = float(mean[1]) + minX = float('inf') + maxX = float('-inf') + minY = float('inf') + maxY = float('-inf') + for line in f: + xy = line.split() + x = float(xy[0]) + y = float(xy[1]) + minX = min(x, minX) + maxX = max(x, maxX) + minY = min(y, minY) + maxY = max(y, maxY) + + datum = srs[0] + proj = srs[1] + zone = srs[2] + gscript.core.create_location(gisdb, location, datum=datum, + proj4='+proj='+proj+' +zone='+zone, + overwrite=overwrite) + + n = meanY + maxY + s = meanY + minY + e = meanX + maxX + w = meanX + minX + gscript.run_command('g.region', flags='s', n=n, s=s, e=e, w=w, res=0.01, + res3=0.01, overwrite=overwrite) + + contour(projectHome) + relief(projectHome) + + os.remove(gisrc) + + +def contour(projectHome): + """ + Creates a contour map based on the ODM project DEM model. + """ + print 'Creating contour map' + + step = 0.25 + + gscript.run_command('r.in.gdal', flags='o', + input=projectHome+'/odm_georeferencing/odm_georeferencing_model_dem.tif', + output=rsurfName, memory=2047, + overwrite=overwrite) + + gscript.run_command('r.contour', input=rsurfName, output=contourName, + step=step, overwrite=overwrite) + + gscript.run_command('v.out.ogr', input=contourName, + output=projectHome + + '/odm_georeferencing/odm_contour.shp', + overwrite=overwrite) + + +def relief(projectHome): + """ + Creates a textured relief map in GeoTIFF format. + NB: this is an RGBA raster and so is readable by image software. + """ + print 'Creating relief map' + + gscript.run_command('r.in.gdal', flags='o', + input=projectHome+'/odm_orthophoto/odm_orthophoto.tif', + output=orthophotoName, memory=2047, + overwrite=overwrite) + + gscript.run_command('r.composite', red=orthophotoName+'.red', + green=orthophotoName+'.green', + blue=orthophotoName+'.blue', + output=orthophotoName+'.rgb', + overwrite=overwrite) + + gscript.run_command('r.relief', input=rsurfName, output=reliefName, + overwrite=overwrite) + + gscript.run_command('r.shade', shade=reliefName, + color=orthophotoName+'.rgb', output=shadedReliefName, + overwrite=overwrite) + + calc = ';'.join([ + '$shadedRelief.red = ' + + 'if(isnull($orthophoto.red), 0, r#$shadedRelief)', + '$shadedRelief.green = ' + + 'if(isnull($orthophoto.green), 0, g#$shadedRelief)', + '$shadedRelief.blue = ' + + 'if(isnull($orthophoto.blue), 0, b#$shadedRelief)', + '$shadedRelief.alpha = ' + + 'if(isnull($orthophoto.alpha), 0, 255)' + ]) + gscript.mapcalc(calc, shadedRelief=shadedReliefName, + orthophoto=orthophotoName, overwrite=overwrite) + + gscript.run_command('i.group', group=shadedReliefName+'.group', + input=shadedReliefName+'.red,' + + shadedReliefName+'.green,' + + shadedReliefName+'.blue,' + + shadedReliefName+'.alpha') + + gscript.run_command('r.out.gdal', flags='cm', + input=shadedReliefName+'.group', + output=projectHome+'/odm_orthophoto/odm_relief.tif', + format='GTiff', type='Byte', + createopt='TILED=yes,COMPRESS=DEFLATE,PREDICTOR=2,' + + 'BLOCKXSIZE=512,BLOCKYSIZE=512', + nodata=0, overwrite=overwrite) + + +if __name__ == '__main__': + main() diff --git a/contrib/ndvi/README.md b/contrib/ndvi/README.md new file mode 100644 index 000000000..2b5df267c --- /dev/null +++ b/contrib/ndvi/README.md @@ -0,0 +1,31 @@ +# NDVI + +This script produces a NDVI raster from a CIR orthophoto (odm_orthophoto.tif in your project) + +## Requirements +* python_gdal package from apt +* numpy python package (included in ODM build) + +## Usage +``` +ndvi.py [-h] [--overwrite] N N + +positional arguments: + The CIR orthophoto. Must be a GeoTiff. + N NIR band number + N Vis band number + The output file. Also must be in GeoTiff format + +optional arguments: + -h, --help show this help message and exit + --overwrite, -o Will overwrite output file if it exists. +``` + +**Argument order matters! NIR first, then VIS** + +## Examples: +Use the [Seneca](https://github.com/OpenDroneMap/odm_data_seneca) dataset for a good working CIR. The band order for that set is NIR-G-B, so you will want to use bands 1 and 2 for this script. After running ODM, the command goes as follows: + +`python ndvi.py /path/to/odm_orthophoto.tif 1 2 /path/to/ndvi.tif` + +The output in QGIS (with a spectral pseudocolor): ![](http://i.imgur.com/TdLECII.png) \ No newline at end of file diff --git a/contrib/ndvi/ndvi.py b/contrib/ndvi/ndvi.py new file mode 100644 index 000000000..ab457f2df --- /dev/null +++ b/contrib/ndvi/ndvi.py @@ -0,0 +1,82 @@ +# A script to calculate the NDVI from a color-infrared orthophoto. +# requires python-gdal + +import numpy +import argparse +import os.path +try: + from osgeo import gdal + from osgeo import osr +except ImportError: + raise ImportError("You need to install python-gdal. run `apt-get install python-gdal`") + exit() + + +def parse_args(): + p = argparse.ArgumentParser("A script that calculates the NDVI of a CIR orthophoto") + + p.add_argument("orthophoto", metavar="", + type=argparse.FileType('r'), + help="The CIR orthophoto. Must be a GeoTiff.") + p.add_argument("nir", metavar="N", type=int, + help="NIR band number") + p.add_argument("vis", metavar="N", type=int, + help="Vis band number") + p.add_argument("out", metavar="", + type=argparse.FileType('w'), + help="The output file. Also must be in GeoTiff format") + p.add_argument("--overwrite", "-o", + action='http://github.com/store_true', + default=False, + help="Will overwrite output file if it exists. ") + return p.parse_args() + + +def calc_ndvi(nir, vis): + """ + Calculates the NDVI of an orthophoto using nir and vis bands. + :param nir: An array containing the nir band + :param vis: An array containing the vis band + :return: An array that will be exported as a tif + """ + + # Take the orthophoto and do nir - vis / nir + vis + # for each cell, calculate ndvi (masking out where divide by 0) + ndvi = numpy.empty(nir.shape, dtype=float) + mask = numpy.not_equal((nirb + visb), 0.0) + return numpy.choose(mask, (-1.0, numpy.true_divide(numpy.subtract(nirb, visb), numpy.add(nirb, visb)))) + + +if __name__ == "__main__": + + rootdir = os.path.dirname(os.path.abspath(__file__)) + + # Parse args + args = parse_args() + + if not args.overwrite and os.path.isfile(os.path.join(rootdir, args.out.name)): + print("File exists, rename or use -o to overwrite.") + exit() + + # import raster + raster = gdal.Open(args.orthophoto.name) + orthophoto = raster.ReadAsArray() + # parse out bands + nirb = orthophoto[args.nir - 1].astype(float) + visb = orthophoto[args.vis - 1].astype(float) + + outfile = args.out + + # Do ndvi calc + ndvi = calc_ndvi(nirb, visb) + + # export raster + out_driver = gdal.GetDriverByName('GTiff')\ + .Create(outfile.name, int(ndvi.shape[1]), int(ndvi.shape[0]), 1, gdal.GDT_Float32) + outband = out_driver.GetRasterBand(1) + outband.WriteArray(ndvi) + outcrs = osr.SpatialReference() + outcrs.ImportFromWkt(raster.GetProjectionRef()) + out_driver.SetProjection(outcrs.ExportToWkt()) + out_driver.SetGeoTransform(raster.GetGeoTransform()) + outband.FlushCache() diff --git a/contrib/visveg/readme.md b/contrib/visveg/readme.md new file mode 100644 index 000000000..1d883be43 --- /dev/null +++ b/contrib/visveg/readme.md @@ -0,0 +1,31 @@ +# Visible Vegetation Indexes + +This script produces a Vegetation Index raster from a RGB orthophoto (odm_orthophoto.tif in your project) + +## Requirements +* rasterio (pip install rasterio) +* numpy python package (included in ODM build) + +## Usage +``` +vegind.py index + +positional arguments: + The RGB orthophoto. Must be a GeoTiff. + index Index identifier. Allowed values: ngrdi, tgi, vari +``` +Output will be generated with index suffix in the same directory as input. + +## Examples + +`python vegind.py /path/to/odm_orthophoto.tif tgi` + +Orthophoto photo of Koniaków grass field and forest in QGIS: ![](http://imgur.com/K6x3nB2.jpg) +The Triangular Greenness Index output in QGIS (with a spectral pseudocolor): ![](http://i.imgur.com/f9TzISU.jpg) +Visible Atmospheric Resistant Index: ![](http://imgur.com/Y7BHzLs.jpg) +Normalized green-red difference index: ![](http://imgur.com/v8cmaPS.jpg) + +## Bibliography + +1. Hunt, E. Raymond, et al. "A Visible Band Index for Remote Sensing Leaf Chlorophyll Content At the Canopy Scale." ITC journal 21(2013): 103-112. doi: 10.1016/j.jag.2012.07.020 +(https://doi.org/10.1016/j.jag.2012.07.020) diff --git a/contrib/visveg/vegind.py b/contrib/visveg/vegind.py new file mode 100644 index 000000000..625546572 --- /dev/null +++ b/contrib/visveg/vegind.py @@ -0,0 +1,95 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +import rasterio, os, sys +import numpy as np + +class bcolors: + OKBLUE = '\033[94m' + OKGREEN = '\033[92m' + WARNING = '\033[93m' + FAIL = '\033[91m' + ENDC = '\033[0m' + BOLD = '\033[1m' + UNDERLINE = '\033[4m' + +try: + file = sys.argv[1] + typ = sys.argv[2] + (fileRoot, fileExt) = os.path.splitext(file) + outFileName = fileRoot + "_" + typ + fileExt + if typ not in ['vari', 'tgi', 'ngrdi']: + raise IndexError +except (TypeError, IndexError, NameError): + print bcolors.FAIL + 'Arguments messed up. Check arguments order and index name' + bcolors.ENDC + print 'Usage: ./vegind.py orto index' + print ' orto - filepath to RGB orthophoto' + print ' index - Vegetation Index' + print bcolors.OKGREEN + 'Available indexes: vari, ngrdi, tgi' + bcolors.ENDC + sys.exit() + + +def calcNgrdi(red, green): + """ + Normalized green red difference index + Tucker,C.J.,1979. + Red and photographic infrared linear combinations for monitoring vegetation. + Remote Sensing of Environment 8, 127–150 + :param red: red visible channel + :param green: green visible channel + :return: ngrdi index array + """ + mask = np.not_equal(np.add(red,green), 0.0) + return np.choose(mask, (-9999.0, np.true_divide( + np.subtract(green,red), + np.add(red,green)))) + +def calcVari(red,green,blue): + """ + Calculates Visible Atmospheric Resistant Index + Gitelson, A.A., Kaufman, Y.J., Stark, R., Rundquist, D., 2002. + Novel algorithms for remote estimation of vegetation fraction. + Remote Sensing of Environment 80, 76–87. + :param red: red visible channel + :param green: green visible channel + :param blue: blue visible channel + :return: vari index array, that will be saved to tiff + """ + mask = np.not_equal(np.subtract(np.add(green,red),blue), 0.0) + return np.choose(mask, (-9999.0, np.true_divide(np.subtract(green,red),np.subtract(np.add(green,red),blue)))) + +def calcTgi(red,green,blue): + """ + Calculates Triangular Greenness Index + Hunt, E. Raymond Jr.; Doraiswamy, Paul C.; McMurtrey, James E.; Daughtry, Craig S.T.; Perry, Eileen M.; and Akhmedov, Bakhyt, + A visible band index for remote sensing leaf chlorophyll content at the canopy scale (2013). + Publications from USDA-ARS / UNL Faculty. Paper 1156. + http://digitalcommons.unl.edu/usdaarsfacpub/1156 + :param red: red channel + :param green: green channel + :param blue: blue channel + :return: tgi index array, that will be saved to tiff + """ + mask = np.not_equal(green-red+blue-255.0, 0.0) + return np.choose(mask, (-9999.0, np.subtract(green, np.multiply(0.39,red), np.multiply(0.61, blue)))) + +try: + with rasterio.Env(): + ds = rasterio.open(file) + profile = ds.profile + profile.update(dtype=rasterio.float32, count=1, nodata=-9999) + red = np.float32(ds.read(1)) + green = np.float32(ds.read(2)) + blue = np.float32(ds.read(3)) + np.seterr(divide='ignore', invalid='ignore') + if typ == 'ngrdi': + indeks = calcNgrdi(red,green) + elif typ == 'vari': + indeks = calcVari(red, green, blue) + elif typ == 'tgi': + indeks = calcTgi(red, green, blue) + + with rasterio.open(outFileName, 'w', **profile) as dst: + dst.write(indeks.astype(rasterio.float32), 1) +except rasterio.errors.RasterioIOError: + print bcolors.FAIL + 'Orthophoto file not found or access denied' + bcolors.ENDC + sys.exit() diff --git a/convert_vlsift_to_lowesift.pl b/convert_vlsift_to_lowesift.pl deleted file mode 100644 index dc72a1f4c..000000000 --- a/convert_vlsift_to_lowesift.pl +++ /dev/null @@ -1,112 +0,0 @@ -#!/usr/local/bin/perl - -$filename_base = $ARGV[0]; - -$write_binary = 1; - -$filename_src = $filename_base.".key.sift"; -$filename_dest_bin = $filename_base.".key.bin"; -$filename_dest_key = $filename_base.".key"; -$filename_image = $filename_base.".pgm"; - -open (DEST_BIN, ">$filename_dest_bin"); -open (DEST_KEY, ">$filename_dest_key"); - -open (SRC, "$filename_src"); - -$linecount = 0; -$linecount += tr/\n/\n/ while sysread(SRC, $_, 2 ** 16); - -printf ("%d", $linecount); - -if($write_binary){ - seek(SRC, 0, 0); - - print DEST_BIN pack("L", $linecount); - - while ($record = ) { - @parts = split(/ /, $record); - - if(@parts[3] > 3.141){ - @parts[3] -= 6.282; - } - - @parts[3] *= -1; - - @tmp = @parts[0]; - @parts[0] = @parts[1]; - @parts[1] = @tmp; - - for ($count = 4; $count < 132; $count += 8) { - @tmp = @parts[$count+7]; - @parts[$count+7] = @parts[$count+1]; - @parts[$count+1] = @tmp; - - @tmp = @parts[$count+6]; - @parts[$count+6] = @parts[$count+2]; - @parts[$count+2] = @tmp; - - @tmp = @parts[$count+3]; - @parts[$count+3] = @parts[$count+5]; - @parts[$count+5] = @tmp; - } - - print DEST_BIN pack("f4 C128", @parts); - } -} - - seek(SRC, 0, 0); - - print DEST_KEY $linecount, " 128\n"; - - while ($record = ) { - @parts = split(/ /, $record); - - $counter = 0; - - if(@parts[3] > 3.141){ - @parts[3] -= 6.282; - } - - @parts[3] *= -1; - - printf (DEST_KEY "%.3f %.3f %.3f %.3f", @parts[1], @parts[0], @parts[2], @parts[3]); - - shift(@parts); - shift(@parts); - shift(@parts); - shift(@parts); - - for ($count = 0; $count < 128; $count += 8) { - @tmp = @parts[$count+7]; - @parts[$count+7] = @parts[$count+1]; - @parts[$count+1] = @tmp; - - @tmp = @parts[$count+6]; - @parts[$count+6] = @parts[$count+2]; - @parts[$count+2] = @tmp; - - @tmp = @parts[$count+3]; - @parts[$count+3] = @parts[$count+5]; - @parts[$count+5] = @tmp; - } - - foreach (@parts) { - if((($counter) % 20) == 0) { - print DEST_KEY "\n "; - } else { - if($counter != 0){ - print DEST_KEY " "; - } - } - - print DEST_KEY $_; - - $counter++; - } - } - - -close(DEST_BIN); -close(DEST_KEY); -close(SRC); \ No newline at end of file diff --git a/core2.Dockerfile b/core2.Dockerfile new file mode 100644 index 000000000..e300fbce3 --- /dev/null +++ b/core2.Dockerfile @@ -0,0 +1,69 @@ +FROM phusion/baseimage + +# Env variables +ENV DEBIAN_FRONTEND noninteractive + +#Install dependencies +#Required Requisites +RUN add-apt-repository -y ppa:ubuntugis/ppa +RUN add-apt-repository -y ppa:george-edison55/cmake-3.x +RUN apt-get update -y + +# All packages (Will install much faster) +RUN apt-get install --no-install-recommends -y git cmake python-pip build-essential software-properties-common python-software-properties libgdal-dev gdal-bin libgeotiff-dev \ +libgtk2.0-dev libavcodec-dev libavformat-dev libswscale-dev python-dev python-numpy libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libjasper-dev libflann-dev \ +libproj-dev libxext-dev liblapack-dev libeigen3-dev libvtk5-dev python-networkx libgoogle-glog-dev libsuitesparse-dev libboost-filesystem-dev libboost-iostreams-dev \ +libboost-regex-dev libboost-python-dev libboost-date-time-dev libboost-thread-dev python-pyproj python-empy python-nose python-pyside python-pyexiv2 python-scipy \ +libexiv2-dev liblas-bin python-matplotlib libatlas-base-dev libgmp-dev libmpfr-dev swig2.0 python-wheel libboost-log-dev libjsoncpp-dev + +RUN apt-get remove libdc1394-22-dev +RUN pip install --upgrade pip +RUN pip install setuptools +RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/v0.3.9.tar.gz loky + +ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages" +ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm" +ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" + +# Prepare directories + +RUN mkdir /code +WORKDIR /code + +# Copy repository files +COPY ccd_defs_check.py /code/ccd_defs_check.py +COPY CMakeLists.txt /code/CMakeLists.txt +COPY configure.sh /code/configure.sh +COPY /modules/ /code/modules/ +COPY /opendm/ /code/opendm/ +COPY /patched_files/ /code/patched_files/ +COPY run.py /code/run.py +COPY run.sh /code/run.sh +COPY /scripts/ /code/scripts/ +COPY /SuperBuild/cmake/ /code/SuperBuild/cmake/ +COPY /SuperBuild/CMakeLists.txt /code/SuperBuild/CMakeLists.txt +COPY docker.settings.yaml /code/settings.yaml +COPY VERSION /code/VERSION + +# Replace g++ and gcc with our own scripts +COPY /docker/ /code/docker/ +RUN mv -v /usr/bin/gcc /usr/bin/gcc_real && mv -v /usr/bin/g++ /usr/bin/g++_real && cp -v /code/docker/gcc /usr/bin/gcc && cp -v /code/docker/g++ /usr/bin/g++ + +#Compile code in SuperBuild and root directories + +RUN cd SuperBuild && mkdir build && cd build && cmake .. && make -j$(nproc) && cd ../.. && mkdir build && cd build && cmake .. && make -j$(nproc) + +RUN apt-get -y remove libgl1-mesa-dri git cmake python-pip build-essential +RUN apt-get install -y libvtk5-dev + +# Cleanup APT +RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +# Clean Superbuild + +RUN rm -rf /code/SuperBuild/download +RUN rm -rf /code/SuperBuild/src/opencv/samples /code/SuperBuild/src/pcl/test /code/SuperBuild/src/pcl/doc /code/SuperBuild/src/pdal/test /code/SuperBuild/src/pdal/doc + +# Entry point +ENTRYPOINT ["python", "/code/run.py", "code"] + diff --git a/docker.settings.yaml b/docker.settings.yaml new file mode 100644 index 000000000..2a2e4092a --- /dev/null +++ b/docker.settings.yaml @@ -0,0 +1,62 @@ +--- +# A list of global configuration variables +# Uncomment lines as needed to edit default settings. +# Note this only works for settings with default values. Some commands like --rerun +# or --force-ccd n will have to be set in the command line (if you need to) + +# This line is really important to set up properly +project_path: '/' #DO NOT CHANGE THIS OR DOCKER WILL NOT WORK. It should be '/' + +# The rest of the settings will default to the values set unless you uncomment and change them +#resize_to: 2048 +#start_with: 'resize' +#end_with: 'odm_orthophoto' +#rerun_all: False +#zip_results: False +#verbose: False +#time: False +#use_fixed_camera_params: False +#use_hybrid_bundle_adjustment: False +#opensfm_processes: 4 # by default this is set to $(nproc) +#min_num_features: 4000 +#matcher_threshold: 2.0 +#matcher_ratio: 0.6 +#matcher_neighbors: 8 +#matcher_distance: 0 +#use_pmvs: False # The cmvs/pmvs settings only matter if 'Enabled' is set to True +#cmvs_maximages: 500 +#pmvs_level: 1 +#pmvs_csize: 2 +#pmvs_threshold: 0.7 +#pmvs_wsize: 7 +#pmvs_min_images: 3 +#pmvs_num_cores: 4 # by default this is set to $(nproc) +#mesh_size: 100000 +#mesh_octree_depth: 9 +#mesh_samples: 1.0 +#mesh_solver_divide: 9 +#texturing_data_term: 'gmi' +#texturing_outlier_removal_type: 'gauss_clamping' +#texturing_skip_visibility_test: False +#texturing_skip_global_seam_leveling: False +#texturing_skip_local_seam_leveling: False +#texturing_skip_hole_filling: False +#texturing_keep_unseen_faces: False +#texturing_tone_mapping: 'none' +#gcp: !!null # YAML tag for None +#use_exif: False # Set to True if you have a GCP file (it auto-detects) and want to use EXIF +#dtm: False # Use this tag to build a DTM (Digital Terrain Model +#dsm: False # Use this tag to build a DSM (Digital Surface Model +#dem-gapfill-steps: 4 +#dem-resolution: 0.1 +#dem-maxangle:20 +#dem-maxsd: 2.5 +#dem-approximate: False +#dem-decimation: 1 +#dem-terrain-type: ComplexForest +#orthophoto_resolution: 20.0 # Pixels/meter +#orthophoto_target_srs: !!null # Currently does nothing +#orthophoto_no_tiled: False +#orthophoto_compression: DEFLATE # Options are [JPEG, LZW, PACKBITS, DEFLATE, LZMA, NONE] Don't change unless you know what you are doing +#orthophoto_bigtiff: IF_SAFER # Options are [YES, NO, IF_NEEDED, IF_SAFER] +#build_overviews: FALSE diff --git a/docker/README b/docker/README new file mode 100644 index 000000000..f96e85d88 --- /dev/null +++ b/docker/README @@ -0,0 +1,3 @@ +The g++ and gcc scripts in this directory are used to replace the real g++ and gcc programs so that compilation across all projects (including dependencies) uses the -march=core2 flag, which allows us to build a docker image compatible with most Intel based CPUs. + +Without the -march=core2 flag, a docker image will contain binaries that are optimized for the machine that built the image, and will not run on older machines. diff --git a/docker/g++ b/docker/g++ new file mode 100755 index 000000000..0206851b3 --- /dev/null +++ b/docker/g++ @@ -0,0 +1,12 @@ +#!/bin/bash + +args="" + +for i in "$@" +do + if [[ $i != -march* ]]; then + args="$args $i" + fi +done + +/usr/bin/g++_real -march=core2 $args diff --git a/docker/gcc b/docker/gcc new file mode 100755 index 000000000..d72824c66 --- /dev/null +++ b/docker/gcc @@ -0,0 +1,12 @@ +#!/bin/bash + +args="" + +for i in "$@" +do + if [[ $i != -march* ]]; then + args="$args $i" + fi +done + +/usr/bin/gcc_real -march=core2 $args diff --git a/hooks/pre-commit b/hooks/pre-commit new file mode 100644 index 000000000..0a85afe45 --- /dev/null +++ b/hooks/pre-commit @@ -0,0 +1,23 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +exec 1>&2 + +echo "RUNNING PRE-COMMIT" +EXIT_CODE=0 +# Get list of files about to be committed +if git diff --cached --name-only --diff-filter=ACM | grep 'ccd_defs.json'; then + echo "We changed ccd_defs.json" + GIT_ROOT=$(git rev-parse --show-toplevel) + python $GIT_ROOT/ccd_defs_check.py + EXIT_CODE=$(echo $?) +fi + +# non-zero exit fails the commit +exit $EXIT_CODE diff --git a/img/bellus_map.png b/img/bellus_map.png new file mode 100644 index 000000000..305acb70d Binary files /dev/null and b/img/bellus_map.png differ diff --git a/img/mvs-text-orthphoto.png b/img/mvs-text-orthphoto.png new file mode 100644 index 000000000..db8d210f5 Binary files /dev/null and b/img/mvs-text-orthphoto.png differ diff --git a/img/odm_image.png b/img/odm_image.png new file mode 100644 index 000000000..04817f567 Binary files /dev/null and b/img/odm_image.png differ diff --git a/img/odm_orthophoto_test.png b/img/odm_orthophoto_test.png new file mode 100644 index 000000000..113f8182f Binary files /dev/null and b/img/odm_orthophoto_test.png differ diff --git a/img/sm_seneca_orthophoto.png b/img/sm_seneca_orthophoto.png new file mode 100644 index 000000000..2d1bc1543 Binary files /dev/null and b/img/sm_seneca_orthophoto.png differ diff --git a/img/sm_tol_odm_orthophoto.png b/img/sm_tol_odm_orthophoto.png new file mode 100644 index 000000000..fd7c40614 Binary files /dev/null and b/img/sm_tol_odm_orthophoto.png differ diff --git a/img/tol_ptcloud.png b/img/tol_ptcloud.png new file mode 100644 index 000000000..911fe9179 Binary files /dev/null and b/img/tol_ptcloud.png differ diff --git a/img/tol_text.png b/img/tol_text.png new file mode 100644 index 000000000..715b57485 Binary files /dev/null and b/img/tol_text.png differ diff --git a/install-centos.sh b/install-centos.sh deleted file mode 100644 index cd7e1167c..000000000 --- a/install-centos.sh +++ /dev/null @@ -1,342 +0,0 @@ -#!/bin/bash/bin/bash - -set -o nounset -set -o errexit - -echo -echo " created by Daniel Schwarz/daniel.schwarz@topoi.org" -echo " released under Creative Commons/CC-BY" -echo " Attribution" -echo -echo " if the script doesn't finish properly" -echo " (i.e. it doesn't print \"script finished\" at the end)" -echo " please email me the content of the logs folder" -echo -echo -echo " - script started - `date`" - - ## dest base path - TOOLS_PATH="$PWD" - - ## paths for the tools - TOOLS_BIN_PATH="$TOOLS_PATH/bin" - TOOLS_INC_PATH="$TOOLS_PATH/include" - TOOLS_LIB_PATH="$TOOLS_PATH/lib" - TOOLS_SRC_PATH="$TOOLS_PATH/src" - TOOLS_LOG_PATH="$TOOLS_PATH/logs" - TOOLS_PATCHED_PATH="$TOOLS_PATH/patched_files" - -## get sys vars -ARCH=`uname -m` -CORES=`grep -c processor /proc/cpuinfo` - - ## loacal dest paths -if [ "$ARCH" = "i686" ]; then - LIB_PATH="/lib" -fi - -if [ "$ARCH" = "x86_64" ]; then - LIB_PATH="/lib64" -fi -INC_PATH="/usr/local/include" - - ## source paths - BUNDLER_PATH="$TOOLS_SRC_PATH/bundler" - CMVS_PATH="$TOOLS_SRC_PATH/cmvs" - PMVS_PATH="$TOOLS_SRC_PATH/pmvs" - CLAPACK_PATH="$TOOLS_SRC_PATH/clapack" - VLFEAT_PATH="$TOOLS_SRC_PATH/vlfeat" - PARALLEL_PATH="$TOOLS_SRC_PATH/parallel" - PSR_PATH="$TOOLS_SRC_PATH/PoissonRecon" - GRACLUS_PATH="$TOOLS_SRC_PATH/graclus" - - ## executables - EXTRACT_FOCAL="$TOOLS_BIN_PATH/extract_focal.pl" - MATCHKEYS="$TOOLS_BIN_PATH/KeyMatch" - MATCHKEYSFULL="$TOOLS_BIN_PATH/KeyMatchFull" - BUNDLER="$TOOLS_BIN_PATH/bundler" - BUNDLE2PVMS="$TOOLS_BIN_PATH/Bundle2PMVS" - CMVS="$TOOLS_BIN_PATH/cmvs" - PMVS="$TOOLS_BIN_PATH/pmvs2" - GENOPTION="$TOOLS_BIN_PATH/genOption" - VLSIFT="$TOOLS_BIN_PATH/vlsift" - PARALLEL="$TOOLS_BIN_PATH/parallel" - PSR="$TOOLS_BIN_PATH/PoissonRecon" - VLSIFT_TO_LOWESIFT="$TOOLS_BIN_PATH/convert_vlsift_to_lowesift.pl" - -## prevents different (localized) output -LC_ALL=C - -## removing old stuff -sudo rm -Rf "$TOOLS_BIN_PATH" -sudo rm -Rf "$TOOLS_INC_PATH" -sudo rm -Rf "$TOOLS_LIB_PATH" -sudo rm -Rf "$TOOLS_SRC_PATH" -sudo rm -Rf "$TOOLS_LOG_PATH" - -## create needed directories -mkdir -p "$TOOLS_BIN_PATH" -mkdir -p "$TOOLS_INC_PATH" -mkdir -p "$TOOLS_LIB_PATH" -mkdir -p "$TOOLS_SRC_PATH" -mkdir -p "$TOOLS_LOG_PATH" - -## output sys info -echo "System info:" > "$TOOLS_LOG_PATH/sysinfo.txt" -uname -a > "$TOOLS_LOG_PATH/sysinfo.txt" - -## install packages -echo -echo " > installing required packages" - -echo " - updating" -sudo yum update -y > "$TOOLS_LOG_PATH/apt-get_get.log" 2>&1 - -echo " - installing" -yum install -y \ - cmake gcc gcc-c++ compat-gcc-32 compat-gcc-32-c++ gcc-gfortran perl ruby rubygems git \ - curl wget \ - unzip \ - ImageMagick jhead \ - libjpeg-devel boost-devel gsl-devel libX11-devel libXext-devel lapack-devel blas-devel \ - zlib-devel \ - opencv-devel \ - > "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1 - -sudo gem install parallel > /dev/null 2>&1 - -echo " < done - `date`" - -## downloading sources -echo -echo " > getting the sources" - -## getting all archives if not already present; save them to .tmp and rename them after download -while read target source -do - if [ ! -f "$target" ] ; then - echo " - getting $source" - - curl --progress-bar --insecure --location -o "$target.tmp" "$source" - mv "$target.tmp" "$target" - echo " - finished $target" - echo - else - echo " - already downloaded $source" - fi -done < "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 & -done -for i in *.tgz *.tar.gz ; do - tar xzf "$i" > "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 & -done -for i in *.zip ; do - unzip "$i" > "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 & -done - -wait - -mv -f graclus1.2 "$GRACLUS_PATH" -mv -f clapack-3.2.1-CMAKE "$CLAPACK_PATH" -mv -f vlfeat-0.9.13 "$VLFEAT_PATH" -mv -f bundler-v0.4-source "$BUNDLER_PATH" -mv -f parallel-20100922 "$PARALLEL_PATH" -mv -f PoissonRecon "$PSR_PATH" -mv -f cmvs "$CMVS_PATH" - -echo " < done - `date`" - - -## copying patches -echo -echo " - copying patches" -echo - -for file in `find $TOOLS_PATCHED_PATH -type f -print` ; do - cp $file $TOOLS_PATH/${file/$TOOLS_PATCHED_PATH/.} -done - -echo " < done - `date`" - - -# building -echo -echo " - building" -echo - -sudo chown -R `id -u`:`id -g` * -sudo chmod -R 777 * - - -echo " > graclus" - cd "$GRACLUS_PATH" - - if [ "$ARCH" = "i686" ]; then - sed -i "$GRACLUS_PATH/Makefile.in" -e "11c\COPTIONS = -DNUMBITS=32" - fi - - if [ "$ARCH" = "x86_64" ]; then - sed -i "$GRACLUS_PATH/Makefile.in" -e "11c\COPTIONS = -DNUMBITS=64" - fi - - echo " - cleaning graclus" - make clean > "$TOOLS_LOG_PATH/graclus_1_clean.log" 2>&1 - - echo " - building graclus" - make -j > "$TOOLS_LOG_PATH/graclus_2_build.log" 2>&1 - - mkdir "$TOOLS_INC_PATH/metisLib" - cp -f "$GRACLUS_PATH/metisLib/"*.h "$TOOLS_INC_PATH/metisLib/" - - cp -f lib* "$TOOLS_LIB_PATH/" -echo " < done - `date`" -echo - -echo " > poisson surface reconstruction " - cd "$PSR_PATH" - - sed -i "$PSR_PATH/Makefile" -e "21c\BIN = ./" - - echo " - building poisson surface reconstruction" - make -j > "$TOOLS_LOG_PATH/poisson_1_build.log" 2>&1 - - cp -f "$PSR_PATH/PoissonRecon" "$TOOLS_BIN_PATH/PoissonRecon" - -echo " < done - `date`" -echo - - -echo " > parallel" - cd "$PARALLEL_PATH" - - echo " - configuring parallel" - ./configure > "$TOOLS_LOG_PATH/parallel_1_build.log" 2>&1 - - echo " - building paralel" - make -j > "$TOOLS_LOG_PATH/parallel_2_build.log" 2>&1 - - cp -f src/parallel "$TOOLS_BIN_PATH/" - -echo " < done - `date`" -echo - - -echo " > clapack" - cd "$CLAPACK_PATH" - cp make.inc.example make.inc - - set +e - echo " - building clapack" - make all -j > "$TOOLS_LOG_PATH/clapack_1_build.log" 2>&1 - set -e - - echo " - installing clapack" - make lapack_install > "$TOOLS_LOG_PATH/clapack_2_install.log" 2>&1 - - cp -Rf INCLUDE "$INC_PATH/clapack" - -echo " < done - `date`" -echo - - -echo " > vlfeat" - cd "$VLFEAT_PATH" - - echo " - installing vlfeat" - - if [ "$ARCH" = "i686" ]; then - cp -f "$VLFEAT_PATH/bin/glnx86/sift" "$TOOLS_BIN_PATH/vlsift" - cp -f "$VLFEAT_PATH/bin/glnx86/libvl.so" "$TOOLS_LIB_PATH/" - fi - - if [ "$ARCH" = "x86_64" ]; then - cp -f "$VLFEAT_PATH/bin/glnxa64/sift" "$TOOLS_BIN_PATH/vlsift" - cp -f "$VLFEAT_PATH/bin/glnxa64/libvl.so" "$TOOLS_LIB_PATH/" - fi -echo " < done - `date`" -echo - - -echo " > cmvs" - cd "$CMVS_PATH/program/main" - - sed -i "$CMVS_PATH/program/main/genOption.cc" -e "5c\#include \n" - sed -i "$CMVS_PATH/program/base/cmvs/bundle.cc" -e "3c\#include \n" - - sed -i "$CMVS_PATH/program/main/Makefile" -e "10c\#Your INCLUDE path (e.g., -I\/usr\/include)" - sed -i "$CMVS_PATH/program/main/Makefile" -e "11c\YOUR_INCLUDE_PATH =-I$INC_PATH -I$TOOLS_INC_PATH" - sed -i "$CMVS_PATH/program/main/Makefile" -e "13c\#Your metis directory (contains header files under graclus1.2/metisLib/)" - sed -i "$CMVS_PATH/program/main/Makefile" -e "14c\YOUR_INCLUDE_METIS_PATH = -I$TOOLS_INC_PATH/metisLib/" - sed -i "$CMVS_PATH/program/main/Makefile" -e "16c\#Your LDLIBRARY path (e.g., -L/usr/lib)" - sed -i "$CMVS_PATH/program/main/Makefile" -e "17c\YOUR_LDLIB_PATH = -L$LIB_PATH -L$TOOLS_LIB_PATH" - - if [ "$ARCH" = "i686" ]; then - sed -i "$CMVS_PATH/program/main/Makefile" -e "22c\CXXFLAGS_CMVS = -O2 -Wall -Wno-deprecated -DNUMBITS=32 \\\\" - sed -i "$CMVS_PATH/program/main/Makefile" -e '24c\ -fopenmp -DNUMBITS=32 ${OPENMP_FLAG}' - fi - - if [ "$ARCH" = "x86_64" ]; then - sed -i "$CMVS_PATH/program/main/Makefile" -e "22c\CXXFLAGS_CMVS = -O2 -Wall -Wno-deprecated -DNUMBITS=64 \\\\" - sed -i "$CMVS_PATH/program/main/Makefile" -e '24c\ -fopenmp -DNUMBITS=64 ${OPENMP_FLAG}' - fi - - echo " - cleaning cmvs" - make clean > "$TOOLS_LOG_PATH/cmvs_1_clean.log" 2>&1 - - echo " - building cmvs" - make -j > "$TOOLS_LOG_PATH/cmvs_2_build.log" 2>&1 - - echo " - make depend cmvs" - sudo make depend > "$TOOLS_LOG_PATH/cmvs_3_depend.log" 2>&1 - - cp -f "$CMVS_PATH/program/main/cmvs" "$CMVS_PATH/program/main/pmvs2" "$CMVS_PATH/program/main/genOption" "$TOOLS_BIN_PATH/" - cp -f "$CMVS_PATH/program/main/"*so* "$TOOLS_LIB_PATH/" -echo " < done - `date`" -echo - - -echo " > bundler" - cd "$BUNDLER_PATH" - - sed -i "$BUNDLER_PATH/src/BundlerApp.h" -e "620c\ BundlerApp();" - - echo " - cleaning bundler" - make clean > "$TOOLS_LOG_PATH/bundler_1_clean.log" 2>&1 - - echo " - building bundler" - make -j > "$TOOLS_LOG_PATH/bundler_2_build.log" 2>&1 - - cp -f "$BUNDLER_PATH/bin/Bundle2PMVS" "$BUNDLER_PATH/bin/Bundle2Vis" "$BUNDLER_PATH/bin/KeyMatchFull" "$BUNDLER_PATH/bin/KeyMatch" "$BUNDLER_PATH/bin/bundler" "$BUNDLER_PATH/bin/RadialUndistort" "$TOOLS_BIN_PATH/" - - cp -f "$BUNDLER_PATH/lib/libANN_char.so" "$TOOLS_LIB_PATH/" -echo " < done - `date`" -echo - - -cd "$TOOLS_PATH" - -sudo install -o `id -u` -g `id -g` -m 644 -t "$LIB_PATH" lib/*.so -sudo /sbin/ldconfig -v > "$TOOLS_LOG_PATH/ldconfig.log" 2>&1 - -sudo chown -R `id -u`:`id -g` * -sudo chmod -R 777 * - -echo " - script finished - `date`" - -exit diff --git a/install.sh b/install.sh deleted file mode 100755 index 35ddbed1c..000000000 --- a/install.sh +++ /dev/null @@ -1,336 +0,0 @@ -#!/bin/bash - -set -o nounset -set -o errexit - -echo -echo " created by Daniel Schwarz/daniel.schwarz@topoi.org" -echo " released under Creative Commons/CC-BY" -echo " Attribution" -echo -echo " if the script doesn't finish properly" -echo " (i.e. it doesn't print \"script finished\" at the end)" -echo " please email me the content of the logs folder" -echo -echo -echo " - script started - `date`" - - ## dest base path - TOOLS_PATH="$PWD" - - ## paths for the tools - TOOLS_BIN_PATH="$TOOLS_PATH/bin" - TOOLS_INC_PATH="$TOOLS_PATH/include" - TOOLS_LIB_PATH="$TOOLS_PATH/lib" - TOOLS_SRC_PATH="$TOOLS_PATH/src" - TOOLS_LOG_PATH="$TOOLS_PATH/logs" - TOOLS_PATCHED_PATH="$TOOLS_PATH/patched_files" - - ## loacal dest paths - LIB_PATH="/usr/local/lib" - INC_PATH="/usr/local/include" - - ## source paths - BUNDLER_PATH="$TOOLS_SRC_PATH/bundler" - CMVS_PATH="$TOOLS_SRC_PATH/cmvs" - PMVS_PATH="$TOOLS_SRC_PATH/pmvs" - CLAPACK_PATH="$TOOLS_SRC_PATH/clapack" - VLFEAT_PATH="$TOOLS_SRC_PATH/vlfeat" - PARALLEL_PATH="$TOOLS_SRC_PATH/parallel" - PSR_PATH="$TOOLS_SRC_PATH/PoissonRecon" - GRACLUS_PATH="$TOOLS_SRC_PATH/graclus" - - ## executables - EXTRACT_FOCAL="$TOOLS_BIN_PATH/extract_focal.pl" - MATCHKEYS="$TOOLS_BIN_PATH/KeyMatch" - MATCHKEYSFULL="$TOOLS_BIN_PATH/KeyMatchFull" - BUNDLER="$TOOLS_BIN_PATH/bundler" - BUNDLE2PVMS="$TOOLS_BIN_PATH/Bundle2PMVS" - CMVS="$TOOLS_BIN_PATH/cmvs" - PMVS="$TOOLS_BIN_PATH/pmvs2" - GENOPTION="$TOOLS_BIN_PATH/genOption" - VLSIFT="$TOOLS_BIN_PATH/vlsift" - PARALLEL="$TOOLS_BIN_PATH/parallel" - PSR="$TOOLS_BIN_PATH/PoissonRecon" - VLSIFT_TO_LOWESIFT="$TOOLS_BIN_PATH/convert_vlsift_to_lowesift.pl" - -## get sys vars -ARCH=`uname -m` -CORES=`grep -c processor /proc/cpuinfo` - -## prevents different (localized) output -LC_ALL=C - -## removing old stuff -sudo rm -Rf "$TOOLS_BIN_PATH" -sudo rm -Rf "$TOOLS_INC_PATH" -sudo rm -Rf "$TOOLS_LIB_PATH" -sudo rm -Rf "$TOOLS_SRC_PATH" -sudo rm -Rf "$TOOLS_LOG_PATH" - -## create needed directories -mkdir -p "$TOOLS_BIN_PATH" -mkdir -p "$TOOLS_INC_PATH" -mkdir -p "$TOOLS_LIB_PATH" -mkdir -p "$TOOLS_SRC_PATH" -mkdir -p "$TOOLS_LOG_PATH" - -## output sys info -echo "System info:" > "$TOOLS_LOG_PATH/sysinfo.txt" -uname -a > "$TOOLS_LOG_PATH/sysinfo.txt" - -## install packages -echo -echo " > installing required packages" - -echo " - updating" -sudo apt-get update --assume-yes > "$TOOLS_LOG_PATH/apt-get_get.log" 2>&1 - -echo " - installing" -sudo apt-get install --assume-yes --install-recommends \ - build-essential cmake g++ gcc gFortran perl ruby rubygems git \ - curl wget \ - unzip \ - imagemagick jhead \ - libjpeg-dev libboost-dev libgsl0-dev libx11-dev libxext-dev liblapack-dev \ - libzip-dev \ - libcv-dev libcvaux-dev \ - > "$TOOLS_LOG_PATH/apt-get_install.log" 2>&1 - -sudo gem install parallel > /dev/null 2>&1 - -echo " < done - `date`" - -## downloading sources -echo -echo " > getting the sources" - -## getting all archives if not already present; save them to .tmp and rename them after download -while read target source -do - if [ ! -f "$target" ] ; then - echo " - getting $source" - - curl --progress-bar --location -o "$target.tmp" "$source" - mv "$target.tmp" "$target" - echo " - finished $target" - echo - else - echo " - already downloaded $source" - fi -done < "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 & -done -for i in *.tgz *.tar.gz ; do - tar xzf "$i" > "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 & -done -for i in *.zip ; do - unzip "$i" > "$TOOLS_LOG_PATH/extract_$i.log" 2>&1 & -done - -wait - -mv -f graclus1.2 "$GRACLUS_PATH" -mv -f clapack-3.2.1-CMAKE "$CLAPACK_PATH" -mv -f vlfeat-0.9.13 "$VLFEAT_PATH" -mv -f bundler-v0.4-source "$BUNDLER_PATH" -mv -f parallel-20100922 "$PARALLEL_PATH" -mv -f PoissonRecon "$PSR_PATH" -mv -f cmvs "$CMVS_PATH" - -echo " < done - `date`" - - -## copying patches -echo -echo " - copying patches" -echo - -for file in `find $TOOLS_PATCHED_PATH -type f -print` ; do - cp $file $TOOLS_PATH/${file/$TOOLS_PATCHED_PATH/.} -done - -echo " < done - `date`" - - -# building -echo -echo " - building" -echo - -sudo chown -R `id -u`:`id -g` * -#sudo chmod -R 777 * - - -echo " > graclus" - cd "$GRACLUS_PATH" - - if [ "$ARCH" = "i686" ]; then - sed -i "$GRACLUS_PATH/Makefile.in" -e "11c\COPTIONS = -DNUMBITS=32" - fi - - if [ "$ARCH" = "x86_64" ]; then - sed -i "$GRACLUS_PATH/Makefile.in" -e "11c\COPTIONS = -DNUMBITS=64" - fi - - echo " - cleaning graclus" - make clean > "$TOOLS_LOG_PATH/graclus_1_clean.log" 2>&1 - - echo " - building graclus" - make -j > "$TOOLS_LOG_PATH/graclus_2_build.log" 2>&1 - - mkdir "$TOOLS_INC_PATH/metisLib" - cp -f "$GRACLUS_PATH/metisLib/"*.h "$TOOLS_INC_PATH/metisLib/" - - cp -f lib* "$TOOLS_LIB_PATH/" -echo " < done - `date`" -echo - -echo " > poisson surface reconstruction " - cd "$PSR_PATH" - - sed -i "$PSR_PATH/Makefile" -e "21c\BIN = ./" - - echo " - building poisson surface reconstruction" - make -j > "$TOOLS_LOG_PATH/poisson_1_build.log" 2>&1 - - cp -f "$PSR_PATH/PoissonRecon" "$TOOLS_BIN_PATH/PoissonRecon" - -echo " < done - `date`" -echo - - -echo " > parallel" - cd "$PARALLEL_PATH" - - echo " - configuring parallel" - ./configure > "$TOOLS_LOG_PATH/parallel_1_build.log" 2>&1 - - echo " - building paralel" - make -j > "$TOOLS_LOG_PATH/parallel_2_build.log" 2>&1 - - cp -f src/parallel "$TOOLS_BIN_PATH/" - -echo " < done - `date`" -echo - - -echo " > clapack" - cd "$CLAPACK_PATH" - cp make.inc.example make.inc - - set +e - echo " - building clapack" - make all -j > "$TOOLS_LOG_PATH/clapack_1_build.log" 2>&1 - set -e - - echo " - installing clapack" - make lapack_install > "$TOOLS_LOG_PATH/clapack_2_install.log" 2>&1 - - sudo cp -Rf INCLUDE "$INC_PATH/clapack" - -echo " < done - `date`" -echo - - -echo " > vlfeat" - cd "$VLFEAT_PATH" - - echo " - installing vlfeat" - - if [ "$ARCH" = "i686" ]; then - cp -f "$VLFEAT_PATH/bin/glnx86/sift" "$TOOLS_BIN_PATH/vlsift" - cp -f "$VLFEAT_PATH/bin/glnx86/libvl.so" "$TOOLS_LIB_PATH/" - fi - - if [ "$ARCH" = "x86_64" ]; then - cp -f "$VLFEAT_PATH/bin/glnxa64/sift" "$TOOLS_BIN_PATH/vlsift" - cp -f "$VLFEAT_PATH/bin/glnxa64/libvl.so" "$TOOLS_LIB_PATH/" - fi -echo " < done - `date`" -echo - - -echo " > cmvs/pmvs" - cd "$CMVS_PATH/program/main" - - sed -i "$CMVS_PATH/program/main/genOption.cc" -e "5c\#include \n" - sed -i "$CMVS_PATH/program/base/cmvs/bundle.cc" -e "3c\#include \n" - - sed -i "$CMVS_PATH/program/main/Makefile" -e "10c\#Your INCLUDE path (e.g., -I\/usr\/include)" - sed -i "$CMVS_PATH/program/main/Makefile" -e "11c\YOUR_INCLUDE_PATH =-I$INC_PATH -I$TOOLS_INC_PATH" - sed -i "$CMVS_PATH/program/main/Makefile" -e "13c\#Your metis directory (contains header files under graclus1.2/metisLib/)" - sed -i "$CMVS_PATH/program/main/Makefile" -e "14c\YOUR_INCLUDE_METIS_PATH = -I$TOOLS_INC_PATH/metisLib/" - sed -i "$CMVS_PATH/program/main/Makefile" -e "16c\#Your LDLIBRARY path (e.g., -L/usr/lib)" - sed -i "$CMVS_PATH/program/main/Makefile" -e "17c\YOUR_LDLIB_PATH = -L$LIB_PATH -L$TOOLS_LIB_PATH" - - if [ "$ARCH" = "i686" ]; then - sed -i "$CMVS_PATH/program/main/Makefile" -e "22c\CXXFLAGS_CMVS = -O2 -Wall -Wno-deprecated -DNUMBITS=32 \\\\" - sed -i "$CMVS_PATH/program/main/Makefile" -e '24c\ -fopenmp -DNUMBITS=32 ${OPENMP_FLAG}' - fi - - if [ "$ARCH" = "x86_64" ]; then - sed -i "$CMVS_PATH/program/main/Makefile" -e "22c\CXXFLAGS_CMVS = -O2 -Wall -Wno-deprecated -DNUMBITS=64 \\\\" - sed -i "$CMVS_PATH/program/main/Makefile" -e '24c\ -fopenmp -DNUMBITS=64 ${OPENMP_FLAG}' - fi - - echo " - cleaning cmvs" - make clean > "$TOOLS_LOG_PATH/cmvs_1_clean.log" 2>&1 - - echo " - building cmvs" - make -j > "$TOOLS_LOG_PATH/cmvs_2_build.log" 2>&1 - - echo " - make depend cmvs" - sudo make depend > "$TOOLS_LOG_PATH/cmvs_3_depend.log" 2>&1 - - cp -f "$CMVS_PATH/program/main/cmvs" "$CMVS_PATH/program/main/pmvs2" "$CMVS_PATH/program/main/genOption" "$TOOLS_BIN_PATH/" - cp -f "$CMVS_PATH/program/main/"*so* "$TOOLS_LIB_PATH/" -echo " < done - `date`" -echo - - -echo " > bundler" - cd "$BUNDLER_PATH" - - sed -i "$BUNDLER_PATH/src/BundlerApp.h" -e "620c\ BundlerApp();" - - echo " - cleaning bundler" - make clean > "$TOOLS_LOG_PATH/bundler_1_clean.log" 2>&1 - - echo " - building bundler" - make -j > "$TOOLS_LOG_PATH/bundler_2_build.log" 2>&1 - - cp -f "$BUNDLER_PATH/bin/Bundle2PMVS" "$BUNDLER_PATH/bin/Bundle2Vis" "$BUNDLER_PATH/bin/KeyMatchFull" "$BUNDLER_PATH/bin/KeyMatch" "$BUNDLER_PATH/bin/bundler" "$BUNDLER_PATH/bin/RadialUndistort" "$TOOLS_BIN_PATH/" - - cp -f "$BUNDLER_PATH/lib/libANN_char.so" "$TOOLS_LIB_PATH/" -echo " < done - `date`" -echo - - -cd "$TOOLS_PATH" - -sudo install -o `id -u` -g `id -g` -m 644 -t "$LIB_PATH" lib/*.so -sudo ldconfig -v > "$TOOLS_LOG_PATH/ldconfig.log" 2>&1 - -sudo chown -R `id -u`:`id -g` * -#sudo chmod -R 777 * - -echo " - script finished - `date`" - -exit diff --git a/licenses/libext_copyright.txt b/licenses/libext_copyright.txt new file mode 100644 index 000000000..60cc18d54 --- /dev/null +++ b/licenses/libext_copyright.txt @@ -0,0 +1,199 @@ +This package was downloaded from +http://xorg.freedesktop.org/releases/individual/lib/ + +Copyright 1986, 1987, 1988, 1989, 1994, 1998 The Open Group + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of The Open Group shall not be +used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization from The Open Group. + +Copyright (c) 1996 Digital Equipment Corporation, Maynard, Massachusetts. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +DIGITAL EQUIPMENT CORPORATION BE LIABLE FOR ANY CLAIM, DAMAGES, INCLUDING, +BUT NOT LIMITED TO CONSEQUENTIAL OR INCIDENTAL DAMAGES, OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of Digital Equipment Corporation +shall not be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization from Digital +Equipment Corporation. + +Copyright (c) 1997 by Silicon Graphics Computer Systems, Inc. +Permission to use, copy, modify, and distribute this +software and its documentation for any purpose and without +fee is hereby granted, provided that the above copyright +notice appear in all copies and that both that copyright +notice and this permission notice appear in supporting +documentation, and that the name of Silicon Graphics not be +used in advertising or publicity pertaining to distribution +of the software without specific prior written permission. +Silicon Graphics makes no representation about the suitability +of this software for any purpose. It is provided "as is" +without any express or implied warranty. +SILICON GRAPHICS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL SILICON +GRAPHICS BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH +THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Copyright 1992 Network Computing Devices + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the name of NCD. not be used in advertising or +publicity pertaining to distribution of the software without specific, +written prior permission. NCD. makes no representations about the +suitability of this software for any purpose. It is provided "as is" +without express or implied warranty. + +NCD. DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL NCD. +BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION +OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Copyright 1991,1993 by Digital Equipment Corporation, Maynard, Massachusetts, +and Olivetti Research Limited, Cambridge, England. + + All Rights Reserved + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the names of Digital or Olivetti +not be used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +DIGITAL AND OLIVETTI DISCLAIM ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL THEY BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +Copyright 1986, 1987, 1988 by Hewlett-Packard Corporation + +Permission to use, copy, modify, and distribute this +software and its documentation for any purpose and without +fee is hereby granted, provided that the above copyright +notice appear in all copies and that both that copyright +notice and this permission notice appear in supporting +documentation, and that the name of Hewlett-Packard not be used in +advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +Hewlett-Packard makes no representations about the +suitability of this software for any purpose. It is provided +"as is" without express or implied warranty. + +This software is not subject to any license of the American +Telephone and Telegraph Company or of the Regents of the +University of California. + +Copyright (c) 1994, 1995 Hewlett-Packard Company + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL HEWLETT-PACKARD COMPANY BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of the Hewlett-Packard +Company shall not be used in advertising or otherwise to promote the +sale, use or other dealings in this Software without prior written +authorization from the Hewlett-Packard Company. + +Copyright Digital Equipment Corporation, 1996 + +Permission to use, copy, modify, distribute, and sell this +documentation for any purpose is hereby granted without fee, +provided that the above copyright notice and this permission +notice appear in all copies. Digital Equipment Corporation +makes no representations about the suitability for any purpose +of the information in this document. This documentation is +provided ``as is'' without express or implied warranty. + +Copyright (c) 1999, 2005, 2006, Oracle and/or its affiliates. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next +paragraph) shall be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +Copyright (c) 1989 X Consortium, Inc. and Digital Equipment Corporation. +Copyright (c) 1992 X Consortium, Inc. and Intergraph Corporation. +Copyright (c) 1993 X Consortium, Inc. and Silicon Graphics, Inc. +Copyright (c) 1994, 1995 X Consortium, Inc. and Hewlett-Packard Company. + +Permission to use, copy, modify, and distribute this documentation for +any purpose and without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. +Digital Equipment Corporation, Intergraph Corporation, Silicon +Graphics, Hewlett-Packard, and the X Consortium make no +representations about the suitability for any purpose of the +information in this document. This documentation is provided ``as is'' +without express or implied warranty. diff --git a/licenses/libx11_copyright.txt b/licenses/libx11_copyright.txt new file mode 100644 index 000000000..0d563ab13 --- /dev/null +++ b/licenses/libx11_copyright.txt @@ -0,0 +1,944 @@ +This package was downloaded from +http://xorg.freedesktop.org/releases/individual/lib/ + +The following is the 'standard copyright' agreed upon by most contributors, +and is currently the canonical license preferred by the X.Org Foundation. +This is a slight variant of the common MIT license form published by the +Open Source Initiative at http://www.opensource.org/licenses/mit-license.php + +Copyright holders of new code should use this license statement where +possible, and insert their name to this list. Please sort by surname +for people, and by the full name for other entities (e.g. Juliusz +Chroboczek sorts before Intel Corporation sorts before Daniel Stone). + +See each individual source file or directory for the license that applies +to that file. + +Copyright (C) 2003-2006,2008 Jamey Sharp, Josh Triplett +Copyright © 2009 Red Hat, Inc. +Copyright 1990-1992,1999,2000,2004,2009,2010 Oracle and/or its affiliates. +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next +paragraph) shall be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + ---------------------------------------------------------------------- + +The following licenses are 'legacy' - usually MIT/X11 licenses with the name +of the copyright holder(s) in the license statement: + +Copyright 1984-1994, 1998 The Open Group + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of The Open Group shall not be +used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization from The Open Group. + +X Window System is a trademark of The Open Group. + + ---------------------------------------- + +Copyright 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1994, 1996 X Consortium +Copyright 2000 The XFree86 Project, Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of the X Consortium shall +not be used in advertising or otherwise to promote the sale, use or +other dealings in this Software without prior written authorization +from the X Consortium. + +Copyright 1985, 1986, 1987, 1988, 1989, 1990, 1991 by +Digital Equipment Corporation + +Portions Copyright 1990, 1991 by Tektronix, Inc. + +Permission to use, copy, modify and distribute this documentation for +any purpose and without fee is hereby granted, provided that the above +copyright notice appears in all copies and that both that copyright notice +and this permission notice appear in all copies, and that the names of +Digital and Tektronix not be used in in advertising or publicity pertaining +to this documentation without specific, written prior permission. +Digital and Tektronix makes no representations about the suitability +of this documentation for any purpose. +It is provided ``as is'' without express or implied warranty. + + ---------------------------------------- + +Copyright (c) 1999-2000 Free Software Foundation, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +FREE SOFTWARE FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of the Free Software Foundation +shall not be used in advertising or otherwise to promote the sale, use or +other dealings in this Software without prior written authorization from the +Free Software Foundation. + + ---------------------------------------- + +Code and supporting documentation (c) Copyright 1990 1991 Tektronix, Inc. + All Rights Reserved + +This file is a component of an X Window System-specific implementation +of Xcms based on the TekColor Color Management System. TekColor is a +trademark of Tektronix, Inc. The term "TekHVC" designates a particular +color space that is the subject of U.S. Patent No. 4,985,853 (equivalent +foreign patents pending). Permission is hereby granted to use, copy, +modify, sell, and otherwise distribute this software and its +documentation for any purpose and without fee, provided that: + +1. This copyright, permission, and disclaimer notice is reproduced in + all copies of this software and any modification thereof and in + supporting documentation; +2. Any color-handling application which displays TekHVC color + cooordinates identifies these as TekHVC color coordinates in any + interface that displays these coordinates and in any associated + documentation; +3. The term "TekHVC" is always used, and is only used, in association + with the mathematical derivations of the TekHVC Color Space, + including those provided in this file and any equivalent pathways and + mathematical derivations, regardless of digital (e.g., floating point + or integer) representation. + +Tektronix makes no representation about the suitability of this software +for any purpose. It is provided "as is" and with all faults. + +TEKTRONIX DISCLAIMS ALL WARRANTIES APPLICABLE TO THIS SOFTWARE, +INCLUDING THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE. IN NO EVENT SHALL TEKTRONIX BE LIABLE FOR ANY +SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA, OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE, OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR THE PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +(c) Copyright 1995 FUJITSU LIMITED +This is source code modified by FUJITSU LIMITED under the Joint +Development Agreement for the CDE/Motif PST. + + ---------------------------------------- + +Copyright 1992 by Oki Technosystems Laboratory, Inc. +Copyright 1992 by Fuji Xerox Co., Ltd. + +Permission to use, copy, modify, distribute, and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the name of Oki Technosystems +Laboratory and Fuji Xerox not be used in advertising or publicity +pertaining to distribution of the software without specific, written +prior permission. +Oki Technosystems Laboratory and Fuji Xerox make no representations +about the suitability of this software for any purpose. It is provided +"as is" without express or implied warranty. + +OKI TECHNOSYSTEMS LABORATORY AND FUJI XEROX DISCLAIM ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL OKI TECHNOSYSTEMS +LABORATORY AND FUJI XEROX BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE +OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1990, 1991, 1992, 1993, 1994 by FUJITSU LIMITED + +Permission to use, copy, modify, distribute, and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the name of FUJITSU LIMITED +not be used in advertising or publicity pertaining to distribution +of the software without specific, written prior permission. +FUJITSU LIMITED makes no representations about the suitability of +this software for any purpose. +It is provided "as is" without express or implied warranty. + +FUJITSU LIMITED DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +EVENT SHALL FUJITSU LIMITED BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + + +Copyright (c) 1995 David E. Wexelblat. All rights reserved + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL DAVID E. WEXELBLAT BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of David E. Wexelblat shall +not be used in advertising or otherwise to promote the sale, use or +other dealings in this Software without prior written authorization +from David E. Wexelblat. + + ---------------------------------------- + +Copyright 1990, 1991 by OMRON Corporation + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the name OMRON not be used in +advertising or publicity pertaining to distribution of the software without +specific, written prior permission. OMRON makes no representations +about the suitability of this software for any purpose. It is provided +"as is" without express or implied warranty. + +OMRON DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +EVENT SHALL OMRON BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTUOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1985, 1986, 1987, 1988, 1989, 1990, 1991 by +Digital Equipment Corporation + +Portions Copyright 1990, 1991 by Tektronix, Inc + +Rewritten for X.org by Chris Lee + +Permission to use, copy, modify, distribute, and sell this documentation +for any purpose and without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. +Chris Lee makes no representations about the suitability for any purpose +of the information in this document. It is provided \`\`as-is'' without +express or implied warranty. + + ---------------------------------------- + +Copyright 1993 by Digital Equipment Corporation, Maynard, Massachusetts, +Copyright 1994 by FUJITSU LIMITED +Copyright 1994 by Sony Corporation + + All Rights Reserved + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the names of Digital, FUJITSU +LIMITED and Sony Corporation not be used in advertising or publicity +pertaining to distribution of the software without specific, written +prior permission. + +DIGITAL, FUJITSU LIMITED AND SONY CORPORATION DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL DIGITAL, FUJITSU LIMITED +AND SONY CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + + +Copyright 1991 by the Open Software Foundation + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the name of Open Software Foundation +not be used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. Open Software +Foundation makes no representations about the suitability of this +software for any purpose. It is provided "as is" without express or +implied warranty. + +OPEN SOFTWARE FOUNDATION DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL OPEN SOFTWARE FOUNDATIONN BE +LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1990, 1991, 1992,1993, 1994 by FUJITSU LIMITED +Copyright 1993, 1994 by Sony Corporation + +Permission to use, copy, modify, distribute, and sell this software and +its documentation for any purpose is hereby granted without fee, provided +that the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the name of FUJITSU LIMITED and Sony Corporation +not be used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. FUJITSU LIMITED and +Sony Corporation makes no representations about the suitability of this +software for any purpose. It is provided "as is" without express or +implied warranty. + +FUJITSU LIMITED AND SONY CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD +TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL FUJITSU LIMITED OR SONY CORPORATION BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright (c) 1993, 1995 by Silicon Graphics Computer Systems, Inc. + +Permission to use, copy, modify, and distribute this +software and its documentation for any purpose and without +fee is hereby granted, provided that the above copyright +notice appear in all copies and that both that copyright +notice and this permission notice appear in supporting +documentation, and that the name of Silicon Graphics not be +used in advertising or publicity pertaining to distribution +of the software without specific prior written permission. +Silicon Graphics makes no representation about the suitability +of this software for any purpose. It is provided "as is" +without any express or implied warranty. + +SILICON GRAPHICS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL SILICON +GRAPHICS BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH +THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1991, 1992, 1993, 1994 by FUJITSU LIMITED +Copyright 1993 by Digital Equipment Corporation + +Permission to use, copy, modify, distribute, and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of FUJITSU LIMITED and +Digital Equipment Corporation not be used in advertising or publicity +pertaining to distribution of the software without specific, written +prior permission. FUJITSU LIMITED and Digital Equipment Corporation +makes no representations about the suitability of this software for +any purpose. It is provided "as is" without express or implied +warranty. + +FUJITSU LIMITED AND DIGITAL EQUIPMENT CORPORATION DISCLAIM ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL +FUJITSU LIMITED AND DIGITAL EQUIPMENT CORPORATION BE LIABLE FOR +ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER +IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1992, 1993 by FUJITSU LIMITED +Copyright 1993 by Fujitsu Open Systems Solutions, Inc. +Copyright 1994 by Sony Corporation + +Permission to use, copy, modify, distribute and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the name of FUJITSU LIMITED, +Fujitsu Open Systems Solutions, Inc. and Sony Corporation not be +used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. +FUJITSU LIMITED, Fujitsu Open Systems Solutions, Inc. and +Sony Corporation make no representations about the suitability of +this software for any purpose. It is provided "as is" without +express or implied warranty. + +FUJITSU LIMITED, FUJITSU OPEN SYSTEMS SOLUTIONS, INC. AND SONY +CORPORATION DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, +IN NO EVENT SHALL FUJITSU OPEN SYSTEMS SOLUTIONS, INC., FUJITSU LIMITED +AND SONY CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE +OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1987, 1988, 1990, 1993 by Digital Equipment Corporation, +Maynard, Massachusetts, + + All Rights Reserved + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Digital not be +used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +DIGITAL DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL +DIGITAL BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + + ---------------------------------------- + +Copyright 1993 by SunSoft, Inc. +Copyright 1999-2000 by Bruno Haible + +Permission to use, copy, modify, distribute, and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the names of SunSoft, Inc. and +Bruno Haible not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. SunSoft, Inc. and Bruno Haible make no representations +about the suitability of this software for any purpose. It is +provided "as is" without express or implied warranty. + +SunSoft Inc. AND Bruno Haible DISCLAIM ALL WARRANTIES WITH REGARD +TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS, IN NO EVENT SHALL SunSoft, Inc. OR Bruno Haible BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1991 by the Open Software Foundation +Copyright 1993 by the TOSHIBA Corp. + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the names of Open Software Foundation and TOSHIBA +not be used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. Open Software +Foundation and TOSHIBA make no representations about the suitability of this +software for any purpose. It is provided "as is" without express or +implied warranty. + +OPEN SOFTWARE FOUNDATION AND TOSHIBA DISCLAIM ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL OPEN SOFTWARE FOUNDATIONN OR TOSHIBA BE +LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1988 by Wyse Technology, Inc., San Jose, Ca., + + All Rights Reserved + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name Wyse not be +used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +WYSE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL +DIGITAL BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + + ---------------------------------------- + + +Copyright 1991 by the Open Software Foundation +Copyright 1993, 1994 by the Sony Corporation + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the names of Open Software Foundation and +Sony Corporation not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior permission. +Open Software Foundation and Sony Corporation make no +representations about the suitability of this software for any purpose. +It is provided "as is" without express or implied warranty. + +OPEN SOFTWARE FOUNDATION AND SONY CORPORATION DISCLAIM ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL OPEN +SOFTWARE FOUNDATIONN OR SONY CORPORATION BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1992, 1993 by FUJITSU LIMITED +Copyright 1993 by Fujitsu Open Systems Solutions, Inc. + +Permission to use, copy, modify, distribute and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the name of FUJITSU LIMITED and +Fujitsu Open Systems Solutions, Inc. not be used in advertising or +publicity pertaining to distribution of the software without specific, +written prior permission. +FUJITSU LIMITED and Fujitsu Open Systems Solutions, Inc. makes no +representations about the suitability of this software for any purpose. +It is provided "as is" without express or implied warranty. + +FUJITSU LIMITED AND FUJITSU OPEN SYSTEMS SOLUTIONS, INC. DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL FUJITSU OPEN SYSTEMS +SOLUTIONS, INC. AND FUJITSU LIMITED BE LIABLE FOR ANY SPECIAL, INDIRECT +OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1993, 1994 by Sony Corporation + +Permission to use, copy, modify, distribute, and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the name of Sony Corporation +not be used in advertising or publicity pertaining to distribution +of the software without specific, written prior permission. +Sony Corporation makes no representations about the suitability of +this software for any purpose. It is provided "as is" without +express or implied warranty. + +SONY CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +EVENT SHALL SONY CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1986, 1998 The Open Group +Copyright (c) 2000 The XFree86 Project, Inc. + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +X CONSORTIUM OR THE XFREE86 PROJECT BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Except as contained in this notice, the name of the X Consortium or of the +XFree86 Project shall not be used in advertising or otherwise to promote the +sale, use or other dealings in this Software without prior written +authorization from the X Consortium and the XFree86 Project. + + ---------------------------------------- + +Copyright 1990, 1991 by OMRON Corporation, NTT Software Corporation, + and Nippon Telegraph and Telephone Corporation +Copyright 1991 by the Open Software Foundation +Copyright 1993 by the FUJITSU LIMITED + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the names of OMRON, NTT Software, NTT, and +Open Software Foundation not be used in advertising or publicity +pertaining to distribution of the software without specific, +written prior permission. OMRON, NTT Software, NTT, and Open Software +Foundation make no representations about the suitability of this +software for any purpose. It is provided "as is" without express or +implied warranty. + +OMRON, NTT SOFTWARE, NTT, AND OPEN SOFTWARE FOUNDATION +DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT +SHALL OMRON, NTT SOFTWARE, NTT, OR OPEN SOFTWARE FOUNDATION BE +LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 1988 by Wyse Technology, Inc., San Jose, Ca, +Copyright 1987 by Digital Equipment Corporation, Maynard, Massachusetts, + + All Rights Reserved + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name Digital not be +used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +DIGITAL AND WYSE DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +EVENT SHALL DIGITAL OR WYSE BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF +USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + + +Copyright 1991, 1992 by Fuji Xerox Co., Ltd. +Copyright 1992, 1993, 1994 by FUJITSU LIMITED + +Permission to use, copy, modify, distribute, and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the name of Fuji Xerox, +FUJITSU LIMITED not be used in advertising or publicity pertaining +to distribution of the software without specific, written prior +permission. Fuji Xerox, FUJITSU LIMITED make no representations +about the suitability of this software for any purpose. +It is provided "as is" without express or implied warranty. + +FUJI XEROX, FUJITSU LIMITED DISCLAIM ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL FUJI XEROX, +FUJITSU LIMITED BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 2006 Josh Triplett + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + ---------------------------------------- + +(c) Copyright 1996 by Sebastien Marineau and Holger Veit + + + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +HOLGER VEIT BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF +OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Except as contained in this notice, the name of Sebastien Marineau or Holger Veit +shall not be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization from Holger Veit or +Sebastien Marineau. + + ---------------------------------------- + +Copyright 1990, 1991 by OMRON Corporation, NTT Software Corporation, + and Nippon Telegraph and Telephone Corporation +Copyright 1991 by the Open Software Foundation +Copyright 1993 by the TOSHIBA Corp. +Copyright 1993, 1994 by Sony Corporation +Copyright 1993, 1994 by the FUJITSU LIMITED + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the names of OMRON, NTT Software, NTT, Open +Software Foundation, and Sony Corporation not be used in advertising +or publicity pertaining to distribution of the software without specific, +written prior permission. OMRON, NTT Software, NTT, Open Software +Foundation, and Sony Corporation make no representations about the +suitability of this software for any purpose. It is provided "as is" +without express or implied warranty. + +OMRON, NTT SOFTWARE, NTT, OPEN SOFTWARE FOUNDATION, AND SONY +CORPORATION DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT +SHALL OMRON, NTT SOFTWARE, NTT, OPEN SOFTWARE FOUNDATION, OR SONY +CORPORATION BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER +IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright 2000 by Bruno Haible + +Permission to use, copy, modify, distribute, and sell this software +and its documentation for any purpose is hereby granted without fee, +provided that the above copyright notice appear in all copies and +that both that copyright notice and this permission notice appear +in supporting documentation, and that the name of Bruno Haible not +be used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. Bruno Haible +makes no representations about the suitability of this software for +any purpose. It is provided "as is" without express or implied +warranty. + +Bruno Haible DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN +NO EVENT SHALL Bruno Haible BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE +OR PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright © 2003 Keith Packard + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the name of Keith Packard not be used in +advertising or publicity pertaining to distribution of the software without +specific, written prior permission. Keith Packard makes no +representations about the suitability of this software for any purpose. It +is provided "as is" without express or implied warranty. + +KEITH PACKARD DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO +EVENT SHALL KEITH PACKARD BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + ---------------------------------------- + +Copyright (c) 2007-2009, Troy D. Hanson +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + ---------------------------------------- + +Copyright 1992, 1993 by TOSHIBA Corp. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, provided +that the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the name of TOSHIBA not be used in advertising +or publicity pertaining to distribution of the software without specific, +written prior permission. TOSHIBA make no representations about the +suitability of this software for any purpose. It is provided "as is" +without express or implied warranty. + +TOSHIBA DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL +TOSHIBA BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + + + ---------------------------------------- + +Copyright IBM Corporation 1993 + +All Rights Reserved + +License to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of IBM not be +used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. + +IBM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS, AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS, IN NO EVENT SHALL +IBM BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + + ---------------------------------------- + +Copyright 1990, 1991 by OMRON Corporation, NTT Software Corporation, + and Nippon Telegraph and Telephone Corporation + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation, and that the names of OMRON, NTT Software, and NTT +not be used in advertising or publicity pertaining to distribution of the +software without specific, written prior permission. OMRON, NTT Software, +and NTT make no representations about the suitability of this +software for any purpose. It is provided "as is" without express or +implied warranty. + +OMRON, NTT SOFTWARE, AND NTT, DISCLAIM ALL WARRANTIES WITH REGARD +TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS, IN NO EVENT SHALL OMRON, NTT SOFTWARE, OR NTT, BE +LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/licenses/license.md b/licenses/license.md new file mode 100644 index 000000000..7cd375fd3 --- /dev/null +++ b/licenses/license.md @@ -0,0 +1,27 @@ +Licensing for portions of OpenDroneMap are as follows: +* ImageMagick - Apache 2.0 - http://www.imagemagick.org/script/license.php +* Jhead - None - http://www.sentex.net/~mwandel/jhead/ +* libjpeg - GPLv2 - http://sourceforge.net/projects/libjpeg/ +* Boost - Boost Software License, Version 1.0 - http://www.boost.org/LICENSE_1_0.txt +* libgsl0 - GPL - http://www.gnu.org/software/gsl/ +* liblapack - Modified BSD - http://www.netlib.org/lapack/LICENSE.txt +* Flann - BSD2 - http://opensource.org/licenses/bsd-license.php +* libzip - BSD - http://www.nih.at/libzip/LICENSE.html +* libcv - BSD - http://opencv.org/license.html +* libcvaux - BSD - http://opencv.org/license.html +* bundler - GPLv3 - http://www.gnu.org/copyleft/gpl.html +* cmvs - GPLv3 - http://www.gnu.org/copyleft/gpl.html +* pmvs2 - GPLv3 - http://www.gnu.org/copyleft/gpl.html +* parallel - GPLv3 - http://www.gnu.org/copyleft/gpl.html +* PoissonRecon - BSD - http://www.cs.jhu.edu/~misha/Code/PoissonRecon/license.txt +* vlfeat - BSD - http://www.vlfeat.org/license.html +* graclus - GPLv3 - http://www.gnu.org/copyleft/gpl.html +* PROJ.4 - MIT - http://trac.osgeo.org/proj/wiki/WikiStart#License +* PCL - BSD - http://pointclouds.org + * Flann - BSD2 - http://opensource.org/licenses/bsd-license.php + * Eigen - MPL2 - http://www.mozilla.org/MPL/2.0 + * Qhull - http://www.qhull.org/COPYING.txt + * vtk5 - BSD - http://www.vtk.org/VTK/project/license.html +* libext - https://github.com/OpenDroneMap/OpenDroneMap/blob/gh-pages/licenses/libext_copyright.txt +* libx11 - https://github.com/OpenDroneMap/OpenDroneMap/blob/gh-pages/licenses/libx11_copyright.txt +* MVS Texturing - BSD - https://github.com/nmoehrle/mvs-texturing/blob/master/LICENSE.txt diff --git a/modules/CMakeLists.txt b/modules/CMakeLists.txt new file mode 100644 index 000000000..e1afd164b --- /dev/null +++ b/modules/CMakeLists.txt @@ -0,0 +1,9 @@ +# Add ODM sub-modules +add_subdirectory(odm_extract_utm) +add_subdirectory(odm_georef) +add_subdirectory(odm_meshing) +add_subdirectory(odm_orthophoto) +add_subdirectory(odm_25dmeshing) +if (ODM_BUILD_SLAM) + add_subdirectory(odm_slam) +endif () diff --git a/modules/odm_25dmeshing/CMakeLists.txt b/modules/odm_25dmeshing/CMakeLists.txt new file mode 100644 index 000000000..4c19bbbed --- /dev/null +++ b/modules/odm_25dmeshing/CMakeLists.txt @@ -0,0 +1,47 @@ +project(odm_25dmeshing) +cmake_minimum_required(VERSION 2.8) + +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}) + +# Set pcl dir to the input spedified with option -DCGAL_DIR="path" +set(CGAL_DIR "CGAL_DIR-NOTFOUND" CACHE "CGAL_DIR" "Path to the CGAL installation directory") + +# Link +find_package(CGAL COMPONENTS Core HINTS "${CGAL_DIR}") + +if ( CGAL_FOUND ) + + find_package( Eigen3 ) + + if( EIGEN3_FOUND ) + include_directories(${EIGEN3_INCLUDE_DIR}) + add_definitions(-DCGAL_EIGEN3_ENABLED) + else() + message(FATAL_ERROR "This program requires the Eigen3 library, and will not be compiled.") + endif() + + include( ${CGAL_USE_FILE} ) + + find_package( TBB ) + if( TBB_FOUND ) + include(${TBB_USE_FILE}) + else() + message(WARNING "TBB not found, parallel processing will be disabled.") + endif() + + # Add compiler options. + add_definitions(-Wall -Wextra -std=c++11) + + # Add source directory + aux_source_directory("./src" SRC_LIST) + + # Add exectuteable + add_executable(${PROJECT_NAME} ${SRC_LIST}) + + target_link_libraries(odm_25dmeshing ${CGAL_LIBRARIES} ${TBB_LIBRARIES}) +else() + + message(FATAL_ERROR "This program requires the CGAL library, and will not be compiled.") + +endif() + diff --git a/modules/odm_25dmeshing/FindEigen3.cmake b/modules/odm_25dmeshing/FindEigen3.cmake new file mode 100644 index 000000000..e56005eb3 --- /dev/null +++ b/modules/odm_25dmeshing/FindEigen3.cmake @@ -0,0 +1,84 @@ +# - Try to find Eigen3 lib +# +# This module supports requiring a minimum version, e.g. you can do +# find_package(Eigen3 3.1.2) +# to require version 3.1.2 or newer of Eigen3. +# +# Once done this will define +# +# EIGEN3_FOUND - system has eigen lib with correct version +# EIGEN3_INCLUDE_DIR - the eigen include directory +# EIGEN3_VERSION - eigen version + +# Copyright (c) 2006, 2007 Montel Laurent, +# Copyright (c) 2008, 2009 Gael Guennebaud, +# Copyright (c) 2009 Benoit Jacob +# Redistribution and use is allowed according to the terms of the 2-clause BSD license. + +include(FindPackageHandleStandardArgs) + +if(NOT Eigen3_FIND_VERSION) + if(NOT Eigen3_FIND_VERSION_MAJOR) + set(Eigen3_FIND_VERSION_MAJOR 2) + endif(NOT Eigen3_FIND_VERSION_MAJOR) + if(NOT Eigen3_FIND_VERSION_MINOR) + set(Eigen3_FIND_VERSION_MINOR 91) + endif(NOT Eigen3_FIND_VERSION_MINOR) + if(NOT Eigen3_FIND_VERSION_PATCH) + set(Eigen3_FIND_VERSION_PATCH 0) + endif(NOT Eigen3_FIND_VERSION_PATCH) + + set(Eigen3_FIND_VERSION "${Eigen3_FIND_VERSION_MAJOR}.${Eigen3_FIND_VERSION_MINOR}.${Eigen3_FIND_VERSION_PATCH}") +endif(NOT Eigen3_FIND_VERSION) + +macro(_eigen3_get_version) + file(READ "${EIGEN3_INCLUDE_DIR}/Eigen/src/Core/util/Macros.h" _eigen3_version_header) + + string(REGEX MATCH "define[ \t]+EIGEN_WORLD_VERSION[ \t]+([0-9]+)" _eigen3_world_version_match "${_eigen3_version_header}") + set(EIGEN3_WORLD_VERSION "${CMAKE_MATCH_1}") + string(REGEX MATCH "define[ \t]+EIGEN_MAJOR_VERSION[ \t]+([0-9]+)" _eigen3_major_version_match "${_eigen3_version_header}") + set(EIGEN3_MAJOR_VERSION "${CMAKE_MATCH_1}") + string(REGEX MATCH "define[ \t]+EIGEN_MINOR_VERSION[ \t]+([0-9]+)" _eigen3_minor_version_match "${_eigen3_version_header}") + set(EIGEN3_MINOR_VERSION "${CMAKE_MATCH_1}") + + set(EIGEN3_VERSION ${EIGEN3_WORLD_VERSION}.${EIGEN3_MAJOR_VERSION}.${EIGEN3_MINOR_VERSION}) +endmacro(_eigen3_get_version) + +if (EIGEN3_INCLUDE_DIR) + + if (EXISTS ${EIGEN3_INCLUDE_DIR}/signature_of_eigen3_matrix_library) + # in cache already and valid + _eigen3_get_version() + set(EIGEN3_FOUND ${EIGEN3_VERSION_OK}) + + find_package_handle_standard_args(Eigen3 + REQUIRED_VARS EIGEN3_INCLUDE_DIR + VERSION_VAR EIGEN3_VERSION) + + else() + message(STATUS "Eigen3 path specified in cmake variable EIGEN3_INCLUDE_DIR is " + "set to ${EIGEN3_INCLUDE_DIR}, but that path does not contains the file " + "signature_of_eigen3_matrix_library and is considered as invalid.") + endif() + + + +else (EIGEN3_INCLUDE_DIR) + + find_path(EIGEN3_INCLUDE_DIR NAMES signature_of_eigen3_matrix_library + HINTS ENV EIGEN3_INC_DIR + ENV EIGEN3_DIR + PATHS ${KDE4_INCLUDE_DIR} + PATH_SUFFIXES include eigen3 eigen + DOC "Directory containing the Eigen3 header files" + ) + + if(EIGEN3_INCLUDE_DIR) + _eigen3_get_version() + endif(EIGEN3_INCLUDE_DIR) + + find_package_handle_standard_args(Eigen3 + REQUIRED_VARS EIGEN3_INCLUDE_DIR + VERSION_VAR EIGEN3_VERSION) + +endif(EIGEN3_INCLUDE_DIR) diff --git a/modules/odm_25dmeshing/src/CGAL.hpp b/modules/odm_25dmeshing/src/CGAL.hpp new file mode 100644 index 000000000..b203cae98 --- /dev/null +++ b/modules/odm_25dmeshing/src/CGAL.hpp @@ -0,0 +1,38 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +typedef CGAL::Exact_predicates_inexact_constructions_kernel Kernel; +typedef Kernel::FT FT; +typedef Kernel::Point_3 Point3; +typedef Kernel::Vector_3 Vector3; + +//We define a vertex_base with info. The "info" (size_t) allow us to keep track of the original point index. +typedef CGAL::Triangulation_vertex_base_with_info_2 Vb; +typedef CGAL::Constrained_triangulation_face_base_2 Fb; +typedef CGAL::Triangulation_data_structure_2 Tds; +typedef CGAL::Constrained_Delaunay_triangulation_2 CDT; + +typedef CGAL::Polyhedron_3 Polyhedron; +typedef Polyhedron::HalfedgeDS HalfedgeDS; + +namespace SMS = CGAL::Surface_mesh_simplification; + +// Concurrency +#ifdef CGAL_LINKED_WITH_TBB +typedef CGAL::Parallel_tag Concurrency_tag; +#else +typedef CGAL::Sequential_tag Concurrency_tag; +#endif + +//typedef CGAL::First_of_pair_property_map Point_map; +//typedef CGAL::Second_of_pair_property_map Normal_map; + diff --git a/modules/odm_25dmeshing/src/Logger.cpp b/modules/odm_25dmeshing/src/Logger.cpp new file mode 100644 index 000000000..a6c81a8b5 --- /dev/null +++ b/modules/odm_25dmeshing/src/Logger.cpp @@ -0,0 +1,31 @@ +#include "Logger.hpp" + + +Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout) +{ + +} + +Logger::~Logger() +{ + +} + +void Logger::printToFile(std::string filePath) +{ + std::ofstream file(filePath.c_str(), std::ios::binary); + file << logStream_.str(); + file.close(); +} + +bool Logger::isPrintingInCout() const +{ + return isPrintingInCout_; +} + +void Logger::setIsPrintingInCout(bool isPrintingInCout) +{ + isPrintingInCout_ = isPrintingInCout; +} + + diff --git a/modules/odm_25dmeshing/src/Logger.hpp b/modules/odm_25dmeshing/src/Logger.hpp new file mode 100644 index 000000000..aa1dcad6a --- /dev/null +++ b/modules/odm_25dmeshing/src/Logger.hpp @@ -0,0 +1,67 @@ +#pragma once + +#include +#include +#include +#include + +/*! + * \brief The Logger class is used to store program messages in a log file. + * \details By using the << operator while printInCout is set, the class writes both to + * cout and to file, if the flag is not set, output is written to file only. + */ +class Logger +{ +public: + /*! + * \brief Logger Contains functionality for printing and displaying log information. + * \param printInCout Flag toggling if operator << also writes to cout. + */ + Logger(bool isPrintingInCout = true); + + /*! + * \brief Destructor. + */ + ~Logger(); + + /*! + * \brief print Prints the contents of the log to file. + * \param filePath Path specifying where to write the log. + */ + void printToFile(std::string filePath); + + /*! + * \brief isPrintingInCout Check if console printing flag is set. + * \return Console printing flag. + */ + bool isPrintingInCout() const; + + /*! + * \brief setIsPrintingInCout Set console printing flag. + * \param isPrintingInCout Value, if true, messages added to the log are also printed in cout. + */ + void setIsPrintingInCout(bool isPrintingInCout); + + /*! + * Operator for printing messages to log and in the standard output stream if desired. + */ + template + friend Logger& operator<< (Logger &log, T t) + { + // If console printing is enabled. + if (log.isPrintingInCout_) + { + std::cout << t; + std::cout.flush(); + } + // Write to log. + log.logStream_ << t; + + return log; + } + +private: + bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */ + + std::stringstream logStream_; /*!< Stream for storing the log. */ +}; diff --git a/modules/odm_25dmeshing/src/Odm25dMeshing.cpp b/modules/odm_25dmeshing/src/Odm25dMeshing.cpp new file mode 100644 index 000000000..9d1c1788e --- /dev/null +++ b/modules/odm_25dmeshing/src/Odm25dMeshing.cpp @@ -0,0 +1,430 @@ +#include "Odm25dMeshing.hpp" + +int Odm25dMeshing::run(int argc, char **argv) { + log << logFilePath << "\n"; + + // If no arguments were passed, print help and return early. + if (argc <= 1) { + printHelp(); + return EXIT_SUCCESS; + } + + try { + + parseArguments(argc, argv); + + loadPointCloud(); + + buildMesh(); + + } catch (const Odm25dMeshingException& e) { + log.setIsPrintingInCout(true); + log << e.what() << "\n"; + log.printToFile(logFilePath); + log << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } catch (const std::exception& e) { + log.setIsPrintingInCout(true); + log << "Error in OdmMeshing:\n"; + log << e.what() << "\n"; + log.printToFile(logFilePath); + log << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } + + log.printToFile(logFilePath); + + return EXIT_SUCCESS; +} + +void Odm25dMeshing::parseArguments(int argc, char **argv) { + for (int argIndex = 1; argIndex < argc; ++argIndex) { + // The argument to be parsed. + std::string argument = std::string(argv[argIndex]); + + if (argument == "-help") { + printHelp(); + exit(0); + } else if (argument == "-verbose") { + log.setIsPrintingInCout(true); + } else if (argument == "-maxVertexCount" && argIndex < argc) { + ++argIndex; + if (argIndex >= argc) throw Odm25dMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + std::stringstream ss(argv[argIndex]); + ss >> maxVertexCount; + if (ss.bad()) throw Odm25dMeshingException("Argument '" + argument + "' has a bad value (wrong type)."); + maxVertexCount = std::max(maxVertexCount, 0); + log << "Vertex count was manually set to: " << maxVertexCount << "\n"; + } else if (argument == "-outliersRemovalPercentage" && argIndex < argc) { + ++argIndex; + if (argIndex >= argc) throw Odm25dMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + std::stringstream ss(argv[argIndex]); + ss >> outliersRemovalPercentage; + if (ss.bad()) throw Odm25dMeshingException("Argument '" + argument + "' has a bad value (wrong type)."); + + outliersRemovalPercentage = std::min(99.99, std::max(outliersRemovalPercentage, 0)); + log << "Outliers removal was manually set to: " << outliersRemovalPercentage << "\n"; + } else if (argument == "-wlopIterations" && argIndex < argc) { + ++argIndex; + if (argIndex >= argc) throw Odm25dMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + std::stringstream ss(argv[argIndex]); + ss >> wlopIterations; + if (ss.bad()) throw Odm25dMeshingException("Argument '" + argument + "' has a bad value (wrong type)."); + + wlopIterations = std::min(1000, std::max(wlopIterations, 1)); + log << "WLOP iterations was manually set to: " << wlopIterations << "\n"; + } else if (argument == "-inputFile" && argIndex < argc) { + ++argIndex; + if (argIndex >= argc) { + throw Odm25dMeshingException( + "Argument '" + argument + + "' expects 1 more input following it, but no more inputs were provided."); + } + inputFile = std::string(argv[argIndex]); + std::ifstream testFile(inputFile.c_str(), std::ios::binary); + if (!testFile.is_open()) { + throw Odm25dMeshingException( + "Argument '" + argument + "' has a bad value. (file not accessible)"); + } + testFile.close(); + log << "Reading point cloud at: " << inputFile << "\n"; + } else if (argument == "-outputFile" && argIndex < argc) { + ++argIndex; + if (argIndex >= argc) { + throw Odm25dMeshingException( + "Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputFile = std::string(argv[argIndex]); + std::ofstream testFile(outputFile.c_str()); + if (!testFile.is_open()) { + throw Odm25dMeshingException( + "Argument '" + argument + "' has a bad value."); + } + testFile.close(); + log << "Writing output to: " << outputFile << "\n"; + } else if (argument == "-logFile" && argIndex < argc) { + ++argIndex; + if (argIndex >= argc) { + throw Odm25dMeshingException( + "Argument '" + argument + + "' expects 1 more input following it, but no more inputs were provided."); + } + logFilePath = std::string(argv[argIndex]); + std::ofstream testFile(outputFile.c_str()); + if (!testFile.is_open()) { + throw Odm25dMeshingException( + "Argument '" + argument + "' has a bad value."); + } + testFile.close(); + log << "Writing log information to: " << logFilePath << "\n"; + } else { + printHelp(); + throw Odm25dMeshingException("Unrecognised argument '" + argument + "'"); + } + } +} + +void Odm25dMeshing::loadPointCloud(){ + PlyInterpreter interpreter(points); + + std::ifstream in(inputFile); + if (!in || !CGAL::read_ply_custom_points (in, interpreter, Kernel())){ + throw Odm25dMeshingException( + "Error when reading points and normals from:\n" + inputFile + "\n"); + } + + flipFaces = interpreter.flip_faces(); + + log << "Successfully loaded " << points.size() << " points from file\n"; +} + +void Odm25dMeshing::buildMesh(){ + const unsigned int NEIGHBORS = 24; + + size_t pointCount = points.size(); + size_t pointCountBeforeOutlierRemoval = pointCount; + + log << "Removing outliers... "; + + points.erase(CGAL::remove_outliers(points.begin(), points.end(), + NEIGHBORS, + outliersRemovalPercentage), + points.end()); + std::vector(points).swap(points); + pointCount = points.size(); + + log << "removed " << pointCountBeforeOutlierRemoval - pointCount << " points\n"; + + log << "Computing average spacing... "; + + FT avgSpacing = CGAL::compute_average_spacing( + points.begin(), + points.end(), + NEIGHBORS); + + log << avgSpacing << "\n"; + + log << "Grid Z sampling... "; + + size_t pointCountBeforeGridSampling = pointCount; + + double gridStep = avgSpacing / 2; + Kernel::Iso_cuboid_3 bbox = CGAL::bounding_box(points.begin(), points.end()); + Vector3 boxDiag = bbox.max() - bbox.min(); + + int gridWidth = 1 + static_cast(boxDiag.x() / gridStep + 0.5); + int gridHeight = 1 + static_cast(boxDiag.y() / gridStep + 0.5); + + #define KEY(i, j) (i * gridWidth + j) + + std::unordered_map grid; + + for (size_t c = 0; c < pointCount; c++){ + const Point3 &p = points[c]; + Vector3 relativePos = p - bbox.min(); + int i = static_cast((relativePos.x() / gridStep + 0.5)); + int j = static_cast((relativePos.y() / gridStep + 0.5)); + + if ((i >= 0 && i < gridWidth) && (j >= 0 && j < gridHeight)){ + int key = KEY(i, j); + + if (grid.find(key) == grid.end()){ + grid[key] = p; + }else if ((!flipFaces && p.z() > grid[key].z()) || (flipFaces && p.z() < grid[key].z())){ + grid[key] = p; + } + } + } + + std::vector gridPoints; + for ( auto it = grid.begin(); it != grid.end(); ++it ){ + gridPoints.push_back(it->second); + } + + pointCount = gridPoints.size(); + log << "sampled " << (pointCountBeforeGridSampling - pointCount) << " points\n"; + + const double RETAIN_PERCENTAGE = std::min(80., 100. * static_cast(maxVertexCount) / static_cast(pointCount)); // percentage of points to retain. + std::vector simplifiedPoints; + + log << "Performing weighted locally optimal projection simplification and regularization (retain: " << RETAIN_PERCENTAGE << "%, iterate: " << wlopIterations << ")" << "\n"; + + CGAL::wlop_simplify_and_regularize_point_set( + gridPoints.begin(), + gridPoints.end(), + std::back_inserter(simplifiedPoints), + RETAIN_PERCENTAGE, + 8 * avgSpacing, + wlopIterations, + true); + + pointCount = simplifiedPoints.size(); + + if (pointCount < 3){ + throw Odm25dMeshingException("Not enough points"); + } + + log << "Vertex count is " << pointCount << "\n"; + + typedef CDT::Point cgalPoint; + typedef CDT::Vertex_circulator Vertex_circulator; + + std::vector< std::pair > pts; + try{ + pts.reserve(pointCount); + } catch (const std::bad_alloc&){ + throw Odm25dMeshingException("Not enough memory"); + } + + for (size_t i = 0; i < pointCount; ++i){ + pts.push_back(std::make_pair(cgalPoint(simplifiedPoints[i].x(), simplifiedPoints[i].y()), i)); + } + + log << "Computing delaunay triangulation... "; + + CDT cdt; + cdt.insert(pts.begin(), pts.end()); + + unsigned int numberOfTriangles = static_cast(cdt.number_of_faces()); + unsigned int triIndexes = cdt.number_of_faces()*3; + + if (numberOfTriangles == 0) throw Odm25dMeshingException("No triangles in resulting mesh"); + + log << numberOfTriangles << " triangles\n"; + + std::vector vertices; + std::vector vertexIndices; + + try{ + vertices.reserve(pointCount); + vertexIndices.reserve(triIndexes); + } catch (const std::bad_alloc&){ + throw Odm25dMeshingException("Not enough memory"); + } + + + for (size_t i = 0; i < pointCount; ++i){ + vertices.push_back(simplifiedPoints[i].x()); + vertices.push_back(simplifiedPoints[i].y()); + vertices.push_back(simplifiedPoints[i].z()); + } + + for (CDT::Face_iterator face = cdt.faces_begin(); face != cdt.faces_end(); ++face) { + if (flipFaces){ + vertexIndices.push_back(face->vertex(2)->info()); + vertexIndices.push_back(face->vertex(1)->info()); + vertexIndices.push_back(face->vertex(0)->info()); + }else{ + vertexIndices.push_back(face->vertex(0)->info()); + vertexIndices.push_back(face->vertex(1)->info()); + vertexIndices.push_back(face->vertex(2)->info()); + } + } + + log << "Removing spikes... "; + + const float THRESHOLD = avgSpacing; + std::vector heights; + unsigned int spikesRemoved = 0; + + for (CDT::Vertex_iterator vertex = cdt.vertices_begin(); vertex != cdt.vertices_end(); ++vertex){ + // Check if the height between this vertex and its + // incident vertices is greater than THRESHOLD + Vertex_circulator vc = cdt.incident_vertices(vertex), done(vc); + + if (vc != 0){ + float height = vertices[vertex->info() * 3 + 2]; + int threshold_over_count = 0; + int vertexCount = 0; + + do{ + if (cdt.is_infinite(vc)) continue; + + float ivHeight = vertices[vc->info() * 3 + 2]; + + if (fabs(height - ivHeight) > THRESHOLD){ + threshold_over_count++; + heights.push_back(ivHeight); + } + + vertexCount++; + }while(++vc != done); + + if (vertexCount == threshold_over_count){ + // Replace the height of the vertex by the median height + // of its incident vertices + std::sort(heights.begin(), heights.end()); + + vertices[vertex->info() * 3 + 2] = heights[heights.size() / 2]; + + spikesRemoved++; + } + + heights.clear(); + } + } + + log << "removed " << spikesRemoved << " spikes\n"; + + log << "Building polyhedron... "; + + Polyhedron poly; + PolyhedronBuilder builder(vertices, vertexIndices); + poly.delegate( builder ); + + log << "done\n"; + + log << "Refining... "; + + typedef Polyhedron::Vertex_handle Vertex_handle; + std::vector new_facets; + std::vector new_vertices; + CGAL::Polygon_mesh_processing::refine(poly, + faces(poly), + std::back_inserter(new_facets), + std::back_inserter(new_vertices), + CGAL::Polygon_mesh_processing::parameters::density_control_factor(2.)); + + log << "added " << new_vertices.size() << " new vertices\n"; + +// log << "Edge collapsing... "; +// +// SMS::Count_stop_predicate stop(maxVertexCount * 3); +// int redgesRemoved = SMS::edge_collapse(poly, stop, +// CGAL::parameters::vertex_index_map(get(CGAL::vertex_external_index, poly)) +// .halfedge_index_map (get(CGAL::halfedge_external_index, poly)) +// .get_cost (SMS::Edge_length_cost ()) +// .get_placement(SMS::Midpoint_placement()) +// ); +// +// log << redgesRemoved << " edges removed.\n"; + + log << "Final vertex count is " << poly.size_of_vertices() << "\n"; + + log << "Saving mesh to file.\n"; + + typedef typename Polyhedron::Vertex_const_iterator VCI; + typedef typename Polyhedron::Facet_const_iterator FCI; + typedef typename Polyhedron::Halfedge_around_facet_const_circulator HFCC; + + std::filebuf fb; + fb.open(outputFile, std::ios::out); + std::ostream os(&fb); + + os << "ply\n" + << "format ascii 1.0\n" + << "element vertex " << poly.size_of_vertices() << "\n" + << "property float x\n" + << "property float y\n" + << "property float z\n" + << "element face " << poly.size_of_facets() << "\n" + << "property list uchar int vertex_index\n" + << "end_header\n"; + + for (auto it = poly.vertices_begin(); it != poly.vertices_end(); it++){ + os << it->point().x() << " " << it->point().y() << " " << it->point().z() << std::endl; + } + + typedef CGAL::Inverse_index Index; + Index index(poly.vertices_begin(), poly.vertices_end()); + + for( FCI fi = poly.facets_begin(); fi != poly.facets_end(); ++fi) { + HFCC hc = fi->facet_begin(); + HFCC hc_end = hc; + + os << circulator_size(hc) << " "; + do { + os << index[VCI(hc->vertex())] << " "; + ++hc; + } while( hc != hc_end); + + os << "\n"; + } + + fb.close(); + + log << "Successfully wrote mesh to: " << outputFile << "\n"; +} + +void Odm25dMeshing::printHelp() { + bool printInCoutPop = log.isPrintingInCout(); + log.setIsPrintingInCout(true); + + log << "Usage: odm_25dmeshing -inputFile [plyFile] [optional-parameters]\n"; + log << "Create a 2.5D mesh from an oriented point cloud (points with normals) using a constrained delaunay triangulation. " + << "The program requires a path to an input PLY point cloud file, all other input parameters are optional.\n\n"; + + log << " -inputFile to PLY point cloud\n" + << " -outputFile where the output PLY 2.5D mesh should be saved (default: " << outputFile << ")\n" + << " -logFile log file path (default: " << logFilePath << ")\n" + << " -verbose whether to print verbose output (default: " << (printInCoutPop ? "true" : "false") << ")\n" + << " -maxVertexCount <0 - N> Maximum number of vertices in the output mesh. The mesh might have fewer vertices, but will not exceed this limit. (default: " << maxVertexCount << ")\n" + << " -wlopIterations <1 - 1000> Iterations of the Weighted Locally Optimal Projection (WLOP) simplification algorithm. Higher values take longer but produce a smoother mesh. (default: " << wlopIterations << ")\n" + + << "\n"; + + log.setIsPrintingInCout(printInCoutPop); +} + + + diff --git a/modules/odm_25dmeshing/src/Odm25dMeshing.hpp b/modules/odm_25dmeshing/src/Odm25dMeshing.hpp new file mode 100644 index 000000000..4ebf9df39 --- /dev/null +++ b/modules/odm_25dmeshing/src/Odm25dMeshing.hpp @@ -0,0 +1,93 @@ +#pragma once + +// STL +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "CGAL.hpp" +#include "Logger.hpp" +#include "PlyInterpreter.hpp" +#include "PolyhedronBuilder.hpp" + +class Odm25dMeshing { +public: + Odm25dMeshing() : + log(false) {}; + ~Odm25dMeshing() {}; + + /*! + * \brief run Runs the meshing functionality using the provided input arguments. + * For a list of accepted arguments, please see the main page documentation or + * call the program with parameter "-help". + * \param argc Application argument count. + * \param argv Argument values. + * \return 0 If successful. + */ + int run(int argc, char **argv); + +private: + + /*! + * \brief parseArguments Parses command line arguments. + * \param argc Application argument count. + * \param argv Argument values. + */ + void parseArguments(int argc, char** argv); + + /*! + * \brief loadPointCloud Loads a PLY file with points and normals from file. + */ + void loadPointCloud(); + + /*! + * \brief loadPointCloud Builds a 2.5D mesh from loaded points + */ + void buildMesh(); + + /*! + * \brief printHelp Prints help, explaining usage. Can be shown by calling the program with argument: "-help". + */ + void printHelp(); + + Logger log; + + std::string inputFile = ""; + std::string outputFile = "odm_25dmesh.ply"; + std::string logFilePath = "odm_25dmeshing_log.txt"; + unsigned int maxVertexCount = 100000; + double outliersRemovalPercentage = 2; + unsigned int wlopIterations = 35; + std::vector points; + bool flipFaces = false; +}; + +class Odm25dMeshingException: public std::exception { + +public: + Odm25dMeshingException() : + message("Error in Odm25dMeshing") { + } + Odm25dMeshingException(std::string msgInit) : + message("Error in Odm25dMeshing:\n" + msgInit) { + } + ~Odm25dMeshingException() throw () { + } + virtual const char* what() const throw () { + return message.c_str(); + } + +private: + std::string message; /**< The error message **/ +}; diff --git a/modules/odm_25dmeshing/src/PlyInterpreter.cpp b/modules/odm_25dmeshing/src/PlyInterpreter.cpp new file mode 100644 index 000000000..f8a074888 --- /dev/null +++ b/modules/odm_25dmeshing/src/PlyInterpreter.cpp @@ -0,0 +1,40 @@ +#include "PlyInterpreter.hpp" + +// Init and test if input file contains the right properties +bool PlyInterpreter::is_applicable(CGAL::Ply_reader& reader) { + return reader.does_tag_exist ("x") + && reader.does_tag_exist ("y") + && reader.does_tag_exist ("z") + && reader.does_tag_exist ("nx") + && reader.does_tag_exist ("ny") + && reader.does_tag_exist ("nz"); +} + +// Describes how to process one line (= one point object) +void PlyInterpreter::process_line(CGAL::Ply_reader& reader) { + FT x = (FT)0., y = (FT)0., z = (FT)0., + nx = (FT)0., ny = (FT)0., nz = (FT)0.; + + reader.assign (x, "x"); + reader.assign (y, "y"); + reader.assign (z, "z"); + reader.assign (nx, "nx"); + reader.assign (ny, "ny"); + reader.assign (nz, "nz"); + + Point3 p(x, y, z); +// Vector3 n(nx, ny, nz); + + if (nz >= 0 && zNormalsDirectionCount < std::numeric_limits::max()){ + zNormalsDirectionCount++; + }else if (nz < 0 && zNormalsDirectionCount > std::numeric_limits::min()){ + zNormalsDirectionCount--; + } + +// points.push_back(std::make_pair(p, n)); + points.push_back(p); +} + +bool PlyInterpreter::flip_faces(){ + return zNormalsDirectionCount < 0; +} diff --git a/modules/odm_25dmeshing/src/PlyInterpreter.hpp b/modules/odm_25dmeshing/src/PlyInterpreter.hpp new file mode 100644 index 000000000..eca6bcb77 --- /dev/null +++ b/modules/odm_25dmeshing/src/PlyInterpreter.hpp @@ -0,0 +1,27 @@ +#pragma once + +#include +#include +#include +#include + +#include +#include + +#include "CGAL.hpp" + +// points, normals +//typedef std::pair Pwn; + +class PlyInterpreter { + std::vector& points; + long zNormalsDirectionCount; + + public: + PlyInterpreter (std::vector& points) + : points (points), zNormalsDirectionCount(0) + { } + bool is_applicable (CGAL::Ply_reader& reader); + void process_line (CGAL::Ply_reader& reader); + bool flip_faces(); +}; diff --git a/modules/odm_25dmeshing/src/PolyhedronBuilder.cpp b/modules/odm_25dmeshing/src/PolyhedronBuilder.cpp new file mode 100644 index 000000000..69b06d441 --- /dev/null +++ b/modules/odm_25dmeshing/src/PolyhedronBuilder.cpp @@ -0,0 +1,2 @@ +#include "PolyhedronBuilder.hpp" + diff --git a/modules/odm_25dmeshing/src/PolyhedronBuilder.hpp b/modules/odm_25dmeshing/src/PolyhedronBuilder.hpp new file mode 100644 index 000000000..f68dbe0c7 --- /dev/null +++ b/modules/odm_25dmeshing/src/PolyhedronBuilder.hpp @@ -0,0 +1,43 @@ +#include +#include + +#include +#include + +#include "CGAL.hpp" + +// A modifier creating a triangle with the incremental builder. +template +class PolyhedronBuilder : public CGAL::Modifier_base { +public: + std::vector &vertices; + std::vector &vertexIndices; + + PolyhedronBuilder( std::vector &vertices, std::vector &vertexIndices ) + : vertices(vertices), vertexIndices(vertexIndices) {} + + void operator()( HDS& hds) { + typedef typename HDS::Vertex Vertex; + typedef typename Vertex::Point Point; + + CGAL::Polyhedron_incremental_builder_3 builder( hds, true); + builder.begin_surface( vertices.size() / 3, vertexIndices.size() / 3 ); + + for(size_t i = 0; i < vertices.size(); i+=3 ){ + builder.add_vertex(Point(vertices[i+0], vertices[i+1], vertices[i+2])); + } + + for(size_t i = 0; i < vertexIndices.size(); i+=3){ + builder.begin_facet(); + builder.add_vertex_to_facet(vertexIndices[i+0]); + builder.add_vertex_to_facet(vertexIndices[i+1]); + builder.add_vertex_to_facet(vertexIndices[i+2]); + builder.end_facet(); + } + + // finish up the surface + builder.end_surface(); + } +}; + + diff --git a/modules/odm_25dmeshing/src/main.cpp b/modules/odm_25dmeshing/src/main.cpp new file mode 100644 index 000000000..75e1934ec --- /dev/null +++ b/modules/odm_25dmeshing/src/main.cpp @@ -0,0 +1,31 @@ +/* +OpenDroneMap - https://www.opendronemap.org +Copyright (C) 2017 OpenDroneMap Contributors + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program. If not, see . +*/ +#include "Odm25dMeshing.hpp" + +/*! + * \mainpage main OpenDroneMap 2.5D Meshing Module + * + * The OpenDroneMap 2.5D Meshing Module generates a 2.5D mesh using a constrained + * delaunay triangulation from any point cloud (points with corresponding normals). + */ + +int main(int argc, char** argv) +{ + Odm25dMeshing om; + return om.run(argc, argv); +} diff --git a/modules/odm_extract_utm/CMakeLists.txt b/modules/odm_extract_utm/CMakeLists.txt new file mode 100644 index 000000000..460b83e96 --- /dev/null +++ b/modules/odm_extract_utm/CMakeLists.txt @@ -0,0 +1,21 @@ +project(odm_extract_utm) +cmake_minimum_required(VERSION 2.8) + +set(PROJ4_INCLUDE_DIR "/usr/include/" CACHE "PROJ4_INCLUDE_DIR" "Path to the proj4 inlcude directory") + +find_library(PROJ4_LIBRARY "libproj.so" PATHS "/usr/lib" "/usr/lib/x86_64-linux-gnu") +find_library(EXIV2_LIBRARY "libexiv2.so" PATHS "/usr/lib" "/usr/lib/x86_64-linux-gnu") + +# Add compiler options. +add_definitions(-Wall -Wextra) + +# Add source directory +aux_source_directory("./src" SRC_LIST) + +# Add exectuteable +add_executable(${PROJECT_NAME} ${SRC_LIST}) + +# Link +target_link_libraries(${PROJECT_NAME} ${PROJ4_LIBRARY}) +target_link_libraries(${PROJECT_NAME} ${EXIV2_LIBRARY}) + diff --git a/modules/odm_extract_utm/src/Logger.cpp b/modules/odm_extract_utm/src/Logger.cpp new file mode 100644 index 000000000..a6c81a8b5 --- /dev/null +++ b/modules/odm_extract_utm/src/Logger.cpp @@ -0,0 +1,31 @@ +#include "Logger.hpp" + + +Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout) +{ + +} + +Logger::~Logger() +{ + +} + +void Logger::printToFile(std::string filePath) +{ + std::ofstream file(filePath.c_str(), std::ios::binary); + file << logStream_.str(); + file.close(); +} + +bool Logger::isPrintingInCout() const +{ + return isPrintingInCout_; +} + +void Logger::setIsPrintingInCout(bool isPrintingInCout) +{ + isPrintingInCout_ = isPrintingInCout; +} + + diff --git a/modules/odm_extract_utm/src/Logger.hpp b/modules/odm_extract_utm/src/Logger.hpp new file mode 100644 index 000000000..31c5538cb --- /dev/null +++ b/modules/odm_extract_utm/src/Logger.hpp @@ -0,0 +1,68 @@ +#pragma once + +// STL +#include +#include +#include +#include + +/*! + * \brief The Logger class is used to store program messages in a log file. + * \details By using the << operator while printInCout is set, the class writes both to + * cout and to file, if the flag is not set, output is written to file only. + */ +class Logger +{ +public: + /*! + * \brief Logger Contains functionality for printing and displaying log information. + * \param printInCout Flag toggling if operator << also writes to cout. + */ + Logger(bool isPrintingInCout = true); + + /*! + * \brief Destructor. + */ + ~Logger(); + + /*! + * \brief print Prints the contents of the log to file. + * \param filePath Path specifying where to write the log. + */ + void printToFile(std::string filePath); + + /*! + * \brief isPrintingInCout Check if console printing flag is set. + * \return Console printing flag. + */ + bool isPrintingInCout() const; + + /*! + * \brief setIsPrintingInCout Set console printing flag. + * \param isPrintingInCout Value, if true, messages added to the log are also printed in cout. + */ + void setIsPrintingInCout(bool isPrintingInCout); + + /*! + * Operator for printing messages to log and in the standard output stream if desired. + */ + template + friend Logger& operator<< (Logger &log, T t) + { + // If console printing is enabled. + if (log.isPrintingInCout_) + { + std::cout << t; + std::cout.flush(); + } + // Write to log. + log.logStream_ << t; + + return log; + } + +private: + bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */ + + std::stringstream logStream_; /*!< Stream for storing the log. */ +}; diff --git a/modules/odm_extract_utm/src/UtmExtractor.cpp b/modules/odm_extract_utm/src/UtmExtractor.cpp new file mode 100644 index 000000000..ff2687843 --- /dev/null +++ b/modules/odm_extract_utm/src/UtmExtractor.cpp @@ -0,0 +1,353 @@ +// STL +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Proj4 +#include + +// This +#include "UtmExtractor.hpp" + +UtmExtractor::UtmExtractor() : log_(false) +{ + logFile_ = "odm_extracting_utm_log.txt"; +} + +UtmExtractor::~UtmExtractor() +{ +} + + + +int UtmExtractor::run(int argc, char **argv) +{ + if (argc <= 1) + { + printHelp(); + return EXIT_SUCCESS; + } + + try + { + parseArguments(argc, argv); + extractUtm(); + } + catch (const UtmExtractorException& e) + { + log_.setIsPrintingInCout(true); + log_ << e.what() << "\n"; + log_.printToFile(logFile_); + log_ << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } + catch (const std::exception& e) + { + log_.setIsPrintingInCout(true); + log_ << "Error in OdmExtractUtm:\n"; + log_ << e.what() << "\n"; + log_.printToFile(logFile_); + log_ << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } + catch (...) + { + log_.setIsPrintingInCout(true); + log_ << "Unknown error in OdmExtractUtm:\n"; + log_.printToFile(logFile_); + log_ << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } + + log_.printToFile(logFile_); + return EXIT_SUCCESS; +} + +void UtmExtractor::parseArguments(int argc, char **argv) +{ + for(int argIndex = 1; argIndex < argc; ++argIndex) + { + // The argument to be parsed + std::string argument = std::string(argv[argIndex]); + if (argument == "-help") + { + printHelp(); + } + else if (argument == "-verbose") + { + log_.setIsPrintingInCout(true); + } + else if (argument == "-imageListFile") + { + ++argIndex; + if (argIndex >= argc) + { + throw UtmExtractorException("Missing argument for '" + argument + "'."); + } + imageListFileName_ = std::string(argv[argIndex]); + std::ifstream testFile(imageListFileName_.c_str(), std::ios_base::binary); + if (!testFile.is_open()) + { + throw UtmExtractorException("Argument '" + argument + "' has a bad value (file not accessible)."); + } + log_ << "imageListFile was set to: " << imageListFileName_ << "\n"; + } + else if (argument == "-imagesPath") + { + ++argIndex; + if (argIndex >= argc) + { + throw UtmExtractorException("Missing argument for '" + argument + "'."); + } + std::stringstream ss(argv[argIndex]); + ss >> imagesPath_; + if (ss.bad()) + { + throw UtmExtractorException("Argument '" + argument + "' has a bad value. (wrong type)"); + } + log_ << "imagesPath was set to: " << imagesPath_ << "\n"; + } + else if (argument == "-outputCoordFile") + { + ++argIndex; + if (argIndex >= argc) + { + throw UtmExtractorException("Missing argument for '" + argument + "'."); + } + std::stringstream ss(argv[argIndex]); + ss >> outputCoordFileName_; + if (ss.bad()) + { + throw UtmExtractorException("Argument '" + argument + "' has a bad value. (wrong type)"); + } + log_ << "outputCoordFile was set to: " << outputCoordFileName_ << "\n"; + } + else if (argument == "-logFile") + { + ++argIndex; + if (argIndex >= argc) + { + throw UtmExtractorException("Missing argument for '" + argument + "'."); + } + std::stringstream ss(argv[argIndex]); + ss >> logFile_; + if (ss.bad()) + { + throw UtmExtractorException("Argument '" + argument + "' has a bad value. (wrong type)"); + } + log_ << "logFile_ was set to: " << logFile_ << "\n"; + } + else + { + printHelp(); + throw UtmExtractorException("Unrecognised argument '" + argument + "'."); + } + } + +} + +void UtmExtractor::extractUtm() +{ + // Open file listing all used camera images + std::ifstream imageListStream(imageListFileName_.c_str()); + if (!imageListStream.good()) { + throw UtmExtractorException("Failed to open " + imageListFileName_ + " for reading."); + } + + // Traverse images + int utmZone = 99; // for auto-select + char hemisphere; + std::string imageFilename; + std::vector coords; + while (getline(imageListStream, imageFilename)) { + + // Read image and load metadata + Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(imagesPath_ + "/" + imageFilename); + if (image.get() == 0) { + std::string error(imageFilename); + error += ": Image cannot be read"; + throw Exiv2::Error(1, error); + } + else { + image->readMetadata(); + + Exiv2::ExifData &exifData = image->exifData(); + if (exifData.empty()) { + std::string error(imageFilename); + error += ": No Exif data found in the file"; + throw Exiv2::Error(1, error); + } + + // Parse exif data for positional data + double lon, lat, alt = 0.0; + + parsePosition(exifData, lon, lat, alt); + + if (lon == 0.0 || lat == 0.0 || alt == 0.0) { + std::string error("Failed parsing GPS position for " + imageFilename); + throw UtmExtractorException(error); + } + // Convert to UTM + double x, y, z = 0.0; + convert(lon, lat, alt, x, y, z, utmZone, hemisphere); + if (x == 0.0 || y == 0.0 || z == 0.0) { + std::string error("Failed to convert GPS position to UTM for " + imageFilename); + throw UtmExtractorException(error); + } + coords.push_back(Coord(x, y, z)); + } + } + imageListStream.close(); + + // Calculate average + double dx = 0.0, dy = 0.0; + double num = static_cast(coords.size()); + for (std::vector::iterator iter = coords.begin(); iter != coords.end(); ++iter) { + dx += iter->x/num; + dy += iter->y/num; + } + + dx = floor(dx); + dy = floor(dy); + + // Open output file + std::ofstream outputCoordStream(outputCoordFileName_.c_str()); + if (!outputCoordStream.good()) { + throw UtmExtractorException("Failed to open " + outputCoordFileName_ + " for writing."); + } + outputCoordStream.precision(10); + + // Write coordinate file + outputCoordStream << "WGS84 UTM " << utmZone << hemisphere << std::endl; + outputCoordStream << dx << " " << dy << std::endl; + for (std::vector::iterator iter = coords.begin(); iter != coords.end(); ++iter) { + outputCoordStream << (iter->x - dx) << " " << (iter->y - dy) << " " << iter->z << std::endl; + } + + outputCoordStream.close(); + +} + +void UtmExtractor::convert(const double &lon, const double &lat, const double &alt, double &x, double &y, double &z, int &utmZone, char &hemisphere) +{ + // Create WGS84 longitude/latitude coordinate system + projPJ pjLatLon = pj_init_plus("+proj=latlong +datum=WGS84"); + if (!pjLatLon) { + throw UtmExtractorException("Couldn't create WGS84 coordinate system with PROJ.4."); + } + + // Calculate UTM zone if it's set to magic 99 + // NOTE: Special UTM cases in Norway/Svalbard not supported here + if (utmZone == 99) { + utmZone = ((static_cast(floor((lon + 180.0)/6.0)) % 60) + 1); + if (lat < 0) + hemisphere = 'S'; + else + hemisphere = 'N'; + } + + std::ostringstream ostr; + ostr << utmZone; + if (hemisphere == 'S') + ostr << " +south"; + + // Create UTM coordinate system + projPJ pjUtm = pj_init_plus(("+proj=utm +datum=WGS84 +zone=" + ostr.str()).c_str()); + if (!pjUtm) { + throw UtmExtractorException("Couldn't create UTM coordinate system with PROJ.4."); + } + + // Convert to radians + x = lon * DEG_TO_RAD; + y = lat * DEG_TO_RAD; + z = alt; + + // Transform + int res = pj_transform(pjLatLon, pjUtm, 1, 1, &x, &y, &z); + if (res != 0) { + throw UtmExtractorException("Failed to transform coordinates"); + } +} + +void UtmExtractor::parsePosition(Exiv2::ExifData &exifData, double &lon, double &lat, double &alt) +{ + Exiv2::Exifdatum& latitudeTag = exifData["Exif.GPSInfo.GPSLatitude"]; + Exiv2::Exifdatum& latitudeRef = exifData["Exif.GPSInfo.GPSLatitudeRef"]; + Exiv2::Exifdatum& longitudeTag = exifData["Exif.GPSInfo.GPSLongitude"]; + Exiv2::Exifdatum& longitudeRef = exifData["Exif.GPSInfo.GPSLongitudeRef"]; + Exiv2::Exifdatum& altitudeTag = exifData["Exif.GPSInfo.GPSAltitude"]; + Exiv2::Exifdatum& altitudeRef = exifData["Exif.GPSInfo.GPSAltitudeRef"]; + + // Latitude: parse into a double + if (latitudeTag.count() < 3) + throw UtmExtractorException("Image is missing GPS Latitude data"); + else { + Exiv2::URational rLat[] = {latitudeTag.toRational(0), latitudeTag.toRational(1), latitudeTag.toRational(2)}; + bool south = (strcmp(latitudeRef.toString().c_str(), "S") == 0); + double degrees, minutes, seconds; + + degrees = (double)rLat[0].first / (double)rLat[0].second; + minutes = (double)rLat[1].first / (double)rLat[1].second / 60.0; + seconds = (double)rLat[2].first / (double)rLat[2].second / 3600.0; + lat = (south ? -1 : 1) * (degrees + minutes + seconds); + } + + // Longitude + if (longitudeTag.count() < 3) + throw UtmExtractorException("Image is missing GPS Longitude data"); + else { + Exiv2::URational rLon[] = {longitudeTag.toRational(0), longitudeTag.toRational(1), longitudeTag.toRational(2)}; + bool west = (strcmp(longitudeRef.toString().c_str(), "W") == 0); + double degrees, minutes, seconds; + + degrees = (double)rLon[0].first / (double)rLon[0].second; + minutes = (double)rLon[1].first / (double)rLon[1].second / 60.0; + seconds = (double)rLon[2].first / (double)rLon[2].second / 3600.0; + lon = (west ? -1 : 1) * (degrees + minutes + seconds); + } + + // Altitude + if (altitudeTag.count() < 1) + throw UtmExtractorException("Image is missing GPS Altitude data"); + else { + Exiv2::URational rAlt = altitudeTag.toRational(0); + bool below = (altitudeRef.count() >= 1 && altitudeRef.toLong() == 1); + alt = (below ? -1 : 1) * (double) rAlt.first / (double) rAlt.second; + } +} + +void UtmExtractor::printHelp() +{ +log_.setIsPrintingInCout(true); + + log_ << "Purpose:\n"; + log_ << "Create a coordinate file containing the GPS positions of all cameras to be used later in the ODM toolchain for automatic georeferecing.\n"; + + log_ << "Usage:\n"; + log_ << "The program requires paths to a image list file, a image folder path and an output textfile to store the results.\n"; + + log_ << "The following flags are available:\n"; + log_ << "Call the program with flag \"-help\", or without parameters to print this message, or check any generated log file.\n"; + log_ << "Call the program with flag \"-verbose\", to print log messages in the standard output.\n\n"; + + log_ << "Parameters are specified as: \"- \", (without <>), and the following parameters are configurable:\n"; + log_ << "\"-imageListFile \" (mandatory)\n"; + log_ << "Path to the list containing the image names used in the bundle.out file.\n"; + + log_ << "\"-imagesPath \" (mandatory)\n"; + log_ << "Path folder containing all images in the imageListFile.\n"; + + log_ << "\"-outputCoordFile \" (mandatory)\n"; + log_ << "Path output textfile.\n"; + + log_.setIsPrintingInCout(false); +} diff --git a/modules/odm_extract_utm/src/UtmExtractor.hpp b/modules/odm_extract_utm/src/UtmExtractor.hpp new file mode 100644 index 000000000..64b8e4155 --- /dev/null +++ b/modules/odm_extract_utm/src/UtmExtractor.hpp @@ -0,0 +1,98 @@ +#pragma once + +// Logging +#include "Logger.hpp" +#include + + +/*! +* \breif The Coord struct Class used in UtmExtractor to extract GPS positions from images and ODM output +*/ +struct Coord +{ + double x, y, z; + Coord(double ix, double iy, double iz) : x(ix), y(iy), z(iz) {} +}; + +class UtmExtractor +{ +public: + UtmExtractor(); + ~UtmExtractor(); + + /*! + * \brief run Runs the texturing functionality using the provided input arguments. + * For a list of the accepted arguments, please see the main page documentation or + * call the program with parameter "-help". + * \param argc Application argument count. + * \param argv Argument values. + * \return 0 if successful. + */ + int run (int argc, char **argv); + +private: + + /*! + * \brief parseArguments Parses command line arguments. + * \param argc Application argument count. + * \param argv Argument values. + */ + void parseArguments(int argc, char **argv); + + /*! + * \breif extractUtm Performs the extraction of coordinates inside the run function. + */ + void extractUtm(); + + /*! + * /brief Static method that converts a WGS84 longitude/latitude coordinate in decimal degrees to UTM. + * + * \param lon The longitude in decimal degrees (negative if western hemisphere). + * \param lat The latitude in decimal degrees (negative if southern hemisphere). + * \param alt The altitude in meters. + * \param x Output parameter, the easting UTM value in meters. + * \param y Output parameter, the northing UTM value in meters. + * \param utmZone Input or output parameter, the UTM zone. Set to 99 for automatic selection. + * \param hemisphere Input or output parameter, 'N' for norther hemisphere, 'S' for southern. Automatically selected if utmZone is 99. + * + * \returns True if successful (otherwise output parameters are 0) + */ + static void convert(const double &lon, const double &lat, const double &alt, double &x, double &y, double &z, int &utmZone, char &hemisphere); + + /*! + * \brief Static method that parses a GPS position from jhead data. + * + * \param jheadDataStream Jhead data stream with EXIF information. + * \param lon Output parameter, the longitude in decimal degrees. + * \param lat Output parameter, the latitude in decimal degrees. + * \param alt Output parameter, the altitude in meters. + * + * \returns True if successful (otherwise output parameters are 0) + */ + static void parsePosition(Exiv2::ExifData &exifData, double &lon, double &lat, double &alt); + + /*! + * \brief printHelp Prints help, explaining usage. Can be shown by calling the program with arguments: "-help". + */ + void printHelp(); + + std::string imageListFileName_; /**< Path to the image list. */ + std::string outputCoordFileName_; /**< Path to the file to store the output textfile. */ + std::string imagesPath_; /**< Path to the folder with all images in the image list. */ + + Logger log_; /**< Logging object. */ + std::string logFile_; /**< Path to store the log file. */ + +}; + +class UtmExtractorException : public std::exception +{ +public: + UtmExtractorException() : message("Error in OdmExtractUtm") {} + UtmExtractorException(std::string msgInit) : message("Error in OdmExtractUtm:\n" + msgInit) {} + ~UtmExtractorException() throw() {} + virtual const char* what() const throw() {return message.c_str(); } + +private: + std::string message; /**< The error message. */ +}; diff --git a/modules/odm_extract_utm/src/main.cpp b/modules/odm_extract_utm/src/main.cpp new file mode 100644 index 000000000..1d1aa158b --- /dev/null +++ b/modules/odm_extract_utm/src/main.cpp @@ -0,0 +1,9 @@ + + +#include "UtmExtractor.hpp" + +int main (int argc, char **argv) +{ + UtmExtractor utmExtractor; + return utmExtractor.run(argc, argv); +} diff --git a/modules/odm_georef/CMakeLists.txt b/modules/odm_georef/CMakeLists.txt new file mode 100644 index 000000000..765bf7c87 --- /dev/null +++ b/modules/odm_georef/CMakeLists.txt @@ -0,0 +1,36 @@ +project(odm_georef) +cmake_minimum_required(VERSION 2.8) + +# Set pcl dir to the input spedified with option -DPCL_DIR="path" +set(PCL_DIR "PCL_DIR-NOTFOUND" CACHE "PCL_DIR" "Path to the pcl installation directory") +set(OPENCV_DIR "OPENCV_DIR-NOTFOUND" CACHE "OPENCV_DIR" "Path to the opencv installation directory") +set(PROJ4_INCLUDE_DIR "/usr/include/" CACHE "PROJ4_INCLUDE_DIR" "Path to the proj4 inlcude directory") +find_library(PROJ4_LIBRARY "libproj.so" PATHS "/usr/lib" "/usr/lib/x86_64-linux-gnu") +#set(PROJ4_LIBRARY "/usr/lib/x86_64-linux-gnu/libproj.so" CACHE "PROJ4_LIBRARY" "Path to the proj4 library directory") + +# Add compiler options. +add_definitions(-Wall -Wextra -Wconversion -pedantic) +#add_definitions(-pedantic -pedantic-errors -Wall -Wextra -Werror -Wfatal-errors -Wabi -Wctor-dtor-privacy -Wnon-virtual-dtor -Wreorder -Weffc++ -Wstrict-null-sentinel -Wnon-template-friend -Wold-style-cast -Woverloaded-virtual -Wpmf-conversions -Wsign-promo -Waddress -Warray-bounds -Wattributes -Wbuiltin-macro-redefined -Wc++0x-compat -Wcast-align -Wcast-qual -Wchar-subscripts -Wclobbered -Wcomment -Wconversion -Wcoverage-mismatch -Wdeprecated -Wdeprecated-declarations -Wdisabled-optimization -Wdiv-by-zero -Wempty-body -Wenum-compare -Wendif-labels -Wfatal-errors -Wfloat-equal -Wformat -Wformat=2 -Wformat-contains-nul -Wformat-extra-args -Wformat-nonliteral -Wformat-security -Wformat-y2k -Wignored-qualifiers -Winit-self -Wint-to-pointer-cast -Winvalid-offsetof -Winvalid-pch -Wlogical-op -Wmain -Wvariadic-macros -Wmissing-braces -Wmissing-field-initializers -Wmissing-include-dirs -Wmissing-noreturn -Wvla -Wmultichar -Wfatal-errors -Wnonnull -Woverflow -Woverlength-strings -Wpacked -Wpacked-bitfield-compat -Wparentheses -Wpointer-arith -Wredundant-decls -Wsequence-point -Wshadow -Wsign-compare -Wsign-conversion -Wstack-protector -Wstrict-overflow=5 -Wswitch -Wswitch-enum -Wsync-nand -Wvolatile-register-var -Wtrigraphs -Wtype-limits -Wuninitialized -Wunknown-pragmas -Wwrite-strings -Wpragmas -Wunreachable-code -Wunused -Wunused-function -Wunused-label -Wunused-parameter -Wunused-value -Wunused-variable -Wno-return-type) + +# Find pcl at the location specified by PCL_DIR +find_package(PCL 1.8 HINTS "${PCL_DIR}/share/pcl-1.8") + +# Find OpenCV at the default location +find_package(OpenCV HINTS "${OPENCV_DIR}" REQUIRED) + +# Only link with required opencv modules. +set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui) + +# Add the PCL and Eigen include dirs. +# Necessary since the PCL_INCLUDE_DIR variable set bu find_package is broken.) +include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR}) +include_directories(${EIGEN_ROOT}) + +# Add source directory +aux_source_directory("./src" SRC_LIST) + +# Add exectuteable +add_executable(${PROJECT_NAME} ${SRC_LIST}) + +# Link +target_link_libraries(${PROJECT_NAME} ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${PROJ4_LIBRARY} ${OpenCV_LIBS}) diff --git a/modules/odm_georef/src/FindTransform.cpp b/modules/odm_georef/src/FindTransform.cpp new file mode 100644 index 000000000..a0f0a581c --- /dev/null +++ b/modules/odm_georef/src/FindTransform.cpp @@ -0,0 +1,149 @@ +// This +#include "FindTransform.hpp" + +Vec3::Vec3(double x, double y, double z) :x_(x), y_(y), z_(z) +{ + +} +Vec3::Vec3(const Vec3 &o) : x_(o.x_), y_(o.y_), z_(o.z_) +{ + +} + +Vec3 Vec3::cross(Vec3 o) const +{ + Vec3 res; + res.x_ = y_*o.z_ - z_*o.y_; + res.y_ = z_*o.x_ - x_*o.z_; + res.z_ = x_*o.y_ - y_*o.x_; + return res; +} + +double Vec3::dot(Vec3 o) const +{ + return x_*o.x_ + y_*o.y_ + z_*o.z_; +} + +double Vec3::length() const +{ + return sqrt(x_*x_ + y_*y_ + z_*z_); +} + +Vec3 Vec3::norm() const +{ + Vec3 res; + double l = length(); + res.x_ = x_ / l; + res.y_ = y_ / l; + res.z_ = z_ / l; + return res; +} + +Vec3 Vec3::operator*(double d) const +{ + return Vec3(x_*d, y_*d, z_*d); +} + +Vec3 Vec3::operator+(Vec3 o) const +{ + return Vec3(x_ + o.x_, y_ + o.y_,z_ + o.z_); +} + +Vec3 Vec3::operator-(Vec3 o) const +{ + return Vec3(x_ - o.x_, y_ - o.y_,z_ - o.z_); +} + +OnMat3::OnMat3(Vec3 r1, Vec3 r2, Vec3 r3) : r1_(r1), r2_(r2), r3_(r3) +{ + c1_.x_ = r1_.x_; c2_.x_ = r1_.y_; c3_.x_ = r1_.z_; + c1_.y_ = r2_.x_; c2_.y_ = r2_.y_; c3_.y_ = r2_.z_; + c1_.z_ = r3_.x_; c2_.z_ = r3_.y_; c3_.z_ = r3_.z_; +} +OnMat3::OnMat3(const OnMat3 &o) : r1_(o.r1_), r2_(o.r2_), r3_(o.r3_) +{ + c1_.x_ = r1_.x_; c2_.x_ = r1_.y_; c3_.x_ = r1_.z_; + c1_.y_ = r2_.x_; c2_.y_ = r2_.y_; c3_.y_ = r2_.z_; + c1_.z_ = r3_.x_; c2_.z_ = r3_.y_; c3_.z_ = r3_.z_; +} + +double OnMat3::det() const +{ + return r1_.x_*r2_.y_*r3_.z_ + r1_.y_*r2_.z_*r3_.x_ + r1_.z_*r2_.x_*r3_.y_ - r1_.z_*r2_.y_*r3_.x_ - r1_.y_*r2_.x_*r3_.z_ - r1_.x_*r2_.z_*r3_.y_; +} + +OnMat3 OnMat3::transpose() const +{ + return OnMat3(Vec3(r1_.x_, r2_.x_, r3_.x_), Vec3(r1_.y_, r2_.y_, r3_.y_), Vec3(r1_.z_, r2_.z_, r3_.z_)); +} + +OnMat3 OnMat3::operator*(OnMat3 o) const +{ + return OnMat3( Vec3(r1_.dot(o.c1_), r1_.dot(o.c2_), r1_.dot(o.c3_)), + Vec3(r2_.dot(o.c1_), r2_.dot(o.c2_), r2_.dot(o.c3_)), + Vec3(r3_.dot(o.c1_), r3_.dot(o.c2_), r3_.dot(o.c3_))); +} + +Vec3 OnMat3::operator*(Vec3 o) +{ + return Vec3(r1_.dot(o), r2_.dot(o), r3_.dot(o)); +} + +Mat4::Mat4() +{ + r1c1_ = 1.0; r1c2_ = 0.0; r1c3_ = 0.0; r1c4_ = 0.0; + r2c1_ = 0.0; r2c2_ = 1.0; r2c3_ = 0.0; r2c4_ = 0.0; + r3c1_ = 0.0; r3c2_ = 0.0; r3c3_ = 1.0; r3c4_ = 0.0; + r4c1_ = 0.0; r4c2_ = 0.0; r4c3_ = 0.0; r4c4_ = 1.0; +} + +Mat4::Mat4(OnMat3 rotation, Vec3 translation, double scaling) +{ + r1c1_ = scaling * rotation.r1_.x_; r1c2_ = scaling * rotation.r1_.y_; r1c3_ = scaling * rotation.r1_.z_; r1c4_ = translation.x_; + r2c1_ = scaling * rotation.r2_.x_; r2c2_ = scaling * rotation.r2_.y_; r2c3_ = scaling * rotation.r2_.z_; r2c4_ = translation.y_; + r3c1_ = scaling * rotation.r3_.x_; r3c2_ = scaling * rotation.r3_.y_; r3c3_ = scaling * rotation.r3_.z_; r3c4_ = translation.z_; + r4c1_ = 0.0; r4c2_ = 0.0; r4c3_ = 0.0; r4c4_ = 1.0; +} + +Vec3 Mat4::operator*(Vec3 o) +{ + return Vec3( + r1c1_ * o.x_ + r1c2_* o.y_ + r1c3_* o.z_ + r1c4_, + r2c1_ * o.x_ + r2c2_* o.y_ + r2c3_* o.z_ + r2c4_, + r3c1_ * o.x_ + r3c2_* o.y_ + r3c3_* o.z_ + r3c4_ + ); +} + +void FindTransform::findTransform(Vec3 fromA, Vec3 fromB, Vec3 fromC, Vec3 toA, Vec3 toB, Vec3 toC) +{ + Vec3 a1 = toA; + Vec3 b1 = toB; + Vec3 c1 = toC; + Vec3 a2 = fromA; + Vec3 b2 = fromB; + Vec3 c2 = fromC; + + Vec3 y1 = (a1 - c1).cross(b1 - c1).norm(); + Vec3 z1 = (a1 - c1).norm(); + Vec3 x1 = y1.cross(z1); + + Vec3 y2 = (a2 - c2).cross(b2 - c2).norm(); + Vec3 z2 = (a2 - c2).norm(); + Vec3 x2 = y2.cross(z2); + OnMat3 mat1 = OnMat3(x1, y1, z1).transpose(); + OnMat3 mat2 = OnMat3(x2, y2, z2).transpose(); + + OnMat3 rotation = mat1 * mat2.transpose(); + Vec3 translation = c1 - c2; + + double scale = (a1 - c1).length() / (a2 - c2).length(); + + translation = rotation * c2 * (-scale) + c1; + Mat4 transformation(rotation, translation, scale); + transform_ = transformation; +} + +double FindTransform::error(Vec3 fromA, Vec3 toA) +{ + return (transform_*fromA - toA).length(); +} diff --git a/modules/odm_georef/src/FindTransform.hpp b/modules/odm_georef/src/FindTransform.hpp new file mode 100644 index 000000000..19842e423 --- /dev/null +++ b/modules/odm_georef/src/FindTransform.hpp @@ -0,0 +1,165 @@ +// C++ +#include +#include +#include +#include +#include + +/*! + * \brief Handles basic 3d vector math. + **/ +struct Vec3 +{ + Vec3(double x = 0.0, double y = 0.0, double z = 0.0); + Vec3(const Vec3 &o); + + double x_,y_,z_; /**< The x, y and z values of the vector. **/ + + /*! + * \brief cross The cross product between two vectors. + **/ + Vec3 cross(Vec3 o) const; + + /*! + * \brief dot The scalar product between two vectors. + **/ + double dot(Vec3 o) const; + + /*! + * \brief length The length of the vector. + **/ + double length() const; + + /*! + * \brief norm Returns a normalized version of this vector. + **/ + Vec3 norm() const; + + /*! + * \brief Scales this vector. + **/ + Vec3 operator*(double d) const; + + /*! + * \brief Addition between two vectors. + **/ + Vec3 operator+(Vec3 o) const; + + /*! + * \brief Subtraction between two vectors. + **/ + Vec3 operator-(Vec3 o) const; + + friend std::ostream & operator<<(std::ostream &os, Vec3 v) + { + return os << "[" << std::setprecision(8) << v.x_ << ", " << std::setprecision(4) << v.y_ << ", " << v.z_ << "]"; + } +}; + +/*! + * \brief Describes a 3d orthonormal matrix. + **/ +class OnMat3 +{ +public: + OnMat3(Vec3 r1, Vec3 r2, Vec3 r3); + OnMat3(const OnMat3 &o); + + Vec3 r1_; /**< The first row of the matrix. **/ + Vec3 r2_; /**< The second row of the matrix. **/ + Vec3 r3_; /**< The third row of the matrix. **/ + Vec3 c1_; /**< The first column of the matrix. **/ + Vec3 c2_; /**< The second column of the matrix. **/ + Vec3 c3_; /**< The third column of the matrix. **/ + + /*! + * \brief The determinant of the matrix. + **/ + double det() const; + + /*! + * \brief The transpose of the OnMat3 (equal to inverse). + **/ + OnMat3 transpose() const; + + /*! + * \brief Matrix multiplication between two ON matrices. + **/ + OnMat3 operator*(OnMat3 o) const; + + /*! + * \brief Right side multiplication with a 3d vector. + **/ + Vec3 operator*(Vec3 o); + + friend std::ostream & operator<<(std::ostream &os, OnMat3 m) + { + return os << "[" << std::endl << m.r1_ << std::endl << m.r2_ << std::endl << m.r3_ << std::endl << "]" << std::endl; + } +}; + +/*! + * \brief Describes an affine transformation. + **/ +class Mat4 +{ +public: + Mat4(); + Mat4(OnMat3 rotation, Vec3 translation, double scaling); + + /*! + * \brief Right side multiplication with a 3d vector. + **/ + Vec3 operator*(Vec3 o); + + double r1c1_; /**< Matrix element 0 0 **/ + double r1c2_; /**< Matrix element 0 1 **/ + double r1c3_; /**< Matrix element 0 2 **/ + double r1c4_; /**< Matrix element 0 3 **/ + double r2c1_; /**< Matrix element 1 0 **/ + double r2c2_; /**< Matrix element 1 1 **/ + double r2c3_; /**< Matrix element 1 2 **/ + double r2c4_; /**< Matrix element 1 3 **/ + double r3c1_; /**< Matrix element 2 0 **/ + double r3c2_; /**< Matrix element 2 1 **/ + double r3c3_; /**< Matrix element 2 2 **/ + double r3c4_; /**< Matrix element 2 3 **/ + double r4c1_; /**< Matrix element 3 0 **/ + double r4c2_; /**< Matrix element 3 1 **/ + double r4c3_; /**< Matrix element 3 2 **/ + double r4c4_; /**< Matrix element 3 3 **/ + + friend std::ostream & operator<<(std::ostream &os, Mat4 m) + { + std::stringstream ss; + ss.precision(8); + ss.setf(std::ios::fixed, std::ios::floatfield); + + ss << "[ " << m.r1c1_ << ",\t" << m.r1c2_ << ",\t" << m.r1c3_ << ",\t" << m.r1c4_ << " ]" << std::endl << + "[ " << m.r2c1_ << ",\t" << m.r2c2_ << ",\t" << m.r2c3_ << ",\t" << m.r2c4_ << " ]" << std::endl << + "[ " << m.r3c1_ << ",\t" << m.r3c2_ << ",\t" << m.r3c3_ << ",\t" << m.r3c4_ << " ]" << std::endl << + "[ " << m.r4c1_ << ",\t" << m.r4c2_ << ",\t" << m.r4c3_ << ",\t" << m.r4c4_ << " ]"; + + return os << ss.str(); + } + +}; + +class FindTransform +{ +public: + /*! + * \brief findTransform Generates an affine transform from the three 'from' vector to the three 'to' vectors. + * The transform is such that transform * fromA = toA, + * transform * fromB = toB, + * transform * fromC = toC, + **/ + void findTransform(Vec3 fromA, Vec3 fromB, Vec3 fromC, Vec3 toA, Vec3 toB, Vec3 toC); + + /*! + * \brief error Returns the distance beteween the 'from' and 'to' vectors, after the transform has been applied. + **/ + double error(Vec3 fromA, Vec3 toA); + + Mat4 transform_; /**< The affine transform. **/ +}; diff --git a/modules/odm_georef/src/Georef.cpp b/modules/odm_georef/src/Georef.cpp new file mode 100644 index 000000000..3676a65a7 --- /dev/null +++ b/modules/odm_georef/src/Georef.cpp @@ -0,0 +1,1732 @@ +// PCL +#include +#include + +// OpenCV +#include +#include + +// This +#include "Georef.hpp" + +std::ostream& operator<<(std::ostream &os, const GeorefSystem &geo) +{ + return os << geo.system_ << "\n" << static_cast(geo.eastingOffset_) << " " << static_cast(geo.northingOffset_); +} + +GeorefGCP::GeorefGCP() + :x_(0.0), y_(0.0), z_(0.0), use_(false), localX_(0.0), localY_(0.0), localZ_(0.0),cameraIndex_(0), pixelX_(0), pixelY_(0.0), image_("") +{ +} + +GeorefGCP::~GeorefGCP() +{ +} + +void GeorefGCP::extractGCP(std::istringstream &gcpStream) +{ + gcpStream >> x_ >> y_ >> z_ >> pixelX_ >> pixelY_ >> image_; +} + +Vec3 GeorefGCP::getPos() +{ + return Vec3(localX_,localY_,localZ_); +} + +Vec3 GeorefGCP::getReferencedPos() +{ + return Vec3(x_,y_,z_); +} + +GeorefCamera::GeorefCamera() + :focalLength_(0.0), k1_(0.0), k2_(0.0), transform_(NULL), position_(NULL), pose_(NULL) +{ +} + +GeorefCamera::GeorefCamera(const GeorefCamera &other) + : focalLength_(other.focalLength_), k1_(other.k1_), k2_(other.k2_), + easting_(other.easting_), northing_(other.northing_), altitude_(other.altitude_), + transform_(NULL), position_(NULL), pose_(NULL) +{ + if(NULL != other.transform_) + { + transform_ = new Eigen::Affine3f(*other.transform_); + } + if(NULL != other.position_) + { + position_ = new Eigen::Vector3f(*other.position_); + } + if(pose_ != other.pose_) + { + pose_ = new Eigen::Affine3f(*other.pose_); + } +} + +GeorefCamera::~GeorefCamera() +{ + if(NULL != transform_) + { + delete transform_; + transform_ = NULL; + } + if(NULL != position_) + { + delete position_; + position_ = NULL; + } + if(pose_ != NULL) + { + delete pose_; + pose_ = NULL; + } +} + +void GeorefCamera::extractCamera(std::ifstream &bundleStream) +{ + // Extract intrinsic parameters. + bundleStream >> focalLength_ >> k1_ >> k2_; + + Eigen::Vector3f t; + Eigen::Matrix3f rot; + Eigen::Affine3f transform; + Eigen::Affine3f pose; + + bundleStream >> transform(0,0); // Read rotation (0,0) from bundle file + bundleStream >> transform(0,1); // Read rotation (0,1) from bundle file + bundleStream >> transform(0,2); // Read rotation (0,2) from bundle file + + bundleStream >> transform(1,0); // Read rotation (1,0) from bundle file + bundleStream >> transform(1,1); // Read rotation (1,1) from bundle file + bundleStream >> transform(1,2); // Read rotation (1,2) from bundle file + + bundleStream >> transform(2,0); // Read rotation (2,0) from bundle file + bundleStream >> transform(2,1); // Read rotation (2,1) from bundle file + bundleStream >> transform(2,2); // Read rotation (2,2) from bundle file + + bundleStream >> t(0); // Read translation (0,3) from bundle file + bundleStream >> t(1); // Read translation (1,3) from bundle file + bundleStream >> t(2); // Read translation (2,3) from bundle file + + // + pose(0,0) = transform(0,0); + pose(0,1) = transform(0,1); + pose(0,2) = transform(0,2); + + pose(1,0) = transform(1,0); + pose(1,1) = transform(1,1); + pose(1,2) = transform(1,2); + + pose(2,0) = transform(2,0); + pose(2,1) = transform(2,1); + pose(2,2) = transform(2,2); + + pose(0,3) = t(0); + pose(1,3) = t(1); + pose(2,3) = t(2); + + pose(3,0) = 0.0; + pose(3,1) = 0.0; + pose(3,2) = 0.0; + pose(3,3) = 1.0; + + pose = pose.inverse(); + + // Column negation + pose(0,2) = -1.0*pose(0,2); + pose(1,2) = -1.0*pose(1,2); + pose(2,2) = -1.0*pose(2,2); + + pose(0,1) = -1.0*pose(0,1); + pose(1,1) = -1.0*pose(1,1); + pose(2,1) = -1.0*pose(2,1); + + if (pose_ != NULL) + { + delete pose_; + pose_ = NULL; + } + + pose_ = new Eigen::Affine3f(pose); + + rot = transform.matrix().topLeftCorner<3,3>(); + + // Calculate translation according to -R't and store in vector. + t = -rot.transpose()*t; + + transform(0,3) = t(0); + transform(1,3) = t(1); + transform(2,3) = t(2); + + + // Set transform and position. + if(NULL != transform_) + { + delete transform_; + transform_ = NULL; + } + + transform_ = new Eigen::Affine3f(transform); + + if(NULL != position_) + { + delete position_; + position_ = NULL; + } + position_ = new Eigen::Vector3f(t); +} + +void GeorefCamera::extractCameraGeoref(std::istringstream &coordStream) +{ + coordStream >> easting_ >> northing_ >> altitude_; +} + +Vec3 GeorefCamera::getPos() +{ + return Vec3((*position_)(0),(*position_)(1),(*position_)(2)); +} + +Vec3 GeorefCamera::getReferencedPos() +{ + return Vec3(easting_,northing_,altitude_); +} + +bool GeorefCamera::isValid() +{ + return focalLength_ != 0 && k1_ != 0 && k2_ != 0; +} + +std::ostream& operator<<(std::ostream &os, const GeorefCamera &cam) +{ + os << "Focal, k1, k2 : " << cam.focalLength_ << ", " << cam.k1_ << ", " << cam.k2_ << "\n"; + if(NULL != cam.transform_) + { + os << "Transform :\n" << cam.transform_->matrix() << "\n"; + } + else + { + os << "Transform :\nNULL\n"; + } + if(NULL != cam.position_) + { + os << "Position :\n" << cam.position_->matrix() << "\n"; + } + else + { + os << "Position :\nNULL\n"; + } + os << "east, north, alt : " << cam.easting_ << ", " << cam.northing_ << ", " << cam.altitude_ << '\n'; + return os; +} + +Georef::Georef() : log_(false) +{ + georeferencePointCloud_ = false; + useGCP_ = false; + bundleFilename_ = ""; + inputCoordFilename_ = ""; + outputCoordFilename_ = ""; + inputObjFilename_ = ""; + outputObjFilename_ = ""; + exportCoordinateFile_ = false; + exportGeorefSystem_ = false; +} + +Georef::~Georef() +{ +} + +int Georef::run(int argc, char *argv[]) +{ + try + { + parseArguments(argc, argv); + createGeoreferencedModel(); + } + catch (const GeorefException& e) + { + log_.setIsPrintingInCout(true); + log_ << e.what() << "\n"; + log_.print(logFile_); + return EXIT_FAILURE; + } + catch (const std::exception& e) + { + log_.setIsPrintingInCout(true); + log_ << "Error in Georef:\n"; + log_ << e.what() << "\n"; + log_.print(logFile_); + return EXIT_FAILURE; + } + catch (...) + { + log_.setIsPrintingInCout(true); + log_ << "Unknown error, terminating:\n"; + log_.print(logFile_); + return EXIT_FAILURE; + } + + log_.print(logFile_); + + return EXIT_SUCCESS; +} + +void Georef::parseArguments(int argc, char *argv[]) +{ + bool outputSpecified = false; + bool outputPointCloudSpecified = false; + bool imageListSpecified = false; + bool gcpFileSpecified = false; + bool imageLocation = false; + // bool bundleResized = false; + bool outputCoordSpecified = false; + bool inputCoordSpecified = false; + + logFile_ = std::string(argv[0]) + "_log.txt"; + log_ << logFile_ << "\n"; + + finalTransformFile_ = std::string(argv[0]) + "_transform.txt"; + + // If no arguments were passed, print help. + if (argc == 1) + { + printHelp(); + } + + log_ << "Arguments given\n"; + for(int argIndex = 1; argIndex < argc; ++argIndex) + { + log_ << argv[argIndex] << '\n'; + } + + log_ << '\n'; + for(int argIndex = 1; argIndex < argc; ++argIndex) + { + // The argument to be parsed. + std::string argument = std::string(argv[argIndex]); + + if(argument == "-help") + { + printHelp(); + } + else if(argument == "-verbose") + { + log_.setIsPrintingInCout(true); + } + else if (argument == "-logFile") + { + ++argIndex; + if (argIndex >= argc) + { + throw GeorefException("Missing argument for '" + argument + "'."); + } + logFile_ = std::string(argv[argIndex]); + std::ofstream testFile(logFile_.c_str()); + if (!testFile.is_open()) + { + throw GeorefException("Argument '" + argument + "' has a bad value."); + } + log_ << "Log file path was set to: " << logFile_ << "\n"; + } + else if (argument == "-outputTransformFile") + { + ++argIndex; + if (argIndex >= argc) + { + throw GeorefException("Missing argument for '" + argument + "'."); + } + finalTransformFile_ = std::string(argv[argIndex]); + std::ofstream testFile(logFile_.c_str()); + if (!testFile.is_open()) + { + throw GeorefException("Argument '" + argument + "' has a bad value."); + } + log_ << "Transform file path was set to: " << finalTransformFile_ << "\n"; + } + else if(argument == "-bundleFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + bundleFilename_ = std::string(argv[argIndex]); + log_ << "Reading cameras from: " << bundleFilename_ << "\n"; + } + else if(argument == "-inputCoordFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + inputCoordFilename_ = std::string(argv[argIndex]); + log_ << "Reading cameras gps exif positions from: " << inputCoordFilename_ << "\n"; + inputCoordSpecified = true; + } + else if(argument == "-outputCoordFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputCoordFilename_ = std::string(argv[argIndex]); + log_ << "Exporting cameras georeferenced gps positions to: " << outputCoordFilename_ << "\n"; + exportCoordinateFile_ = true; + outputCoordSpecified = true; + } + else if(argument == "-inputFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + inputObjFilename_ = std::string(argv[argIndex]); + log_ << "Reading textured mesh from: " << inputObjFilename_ << "\n"; + } + else if(argument == "-inputPointCloudFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + inputPointCloudFilename_ = std::string(argv[argIndex]); + log_ << "Reading point cloud from: " << inputPointCloudFilename_ << "\n"; + georeferencePointCloud_ = true; + } + else if(argument == "-gcpFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + gcpFilename_ = std::string(argv[argIndex]); + log_ << "Reading GCPs from: " << gcpFilename_ << "\n"; + gcpFileSpecified = true; + } + else if(argument == "-imagesListPath" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + imagesListPath_ = std::string(argv[argIndex]); + log_ << "Reading image list from: " << imagesListPath_ << "\n"; + imageListSpecified = true; + } + else if(argument == "-imagesPath" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + imagesLocation_ = std::string(argv[argIndex]); + log_ << "Images location is set to: " << imagesLocation_ << "\n"; + imageLocation = true; + } + else if(argument == "-georefFileOutputPath" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + georefFilename_ = std::string(argv[argIndex]); + log_ << "Georef file output path is set to: " << georefFilename_ << "\n"; + exportGeorefSystem_ = true; + } + /*else if(argument == "-bundleResizedTo" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + std::stringstream ss(argv[argIndex]); + ss >> bundleResizedTo_; + if (ss.bad()) + { + throw GeorefException("Argument '" + argument + "' has a bad value. (wrong type)"); + } + log_ << "Bundle resize value is set to: " << bundleResizedTo_ << "\n"; + bundleResized = true; + }*/ + else if(argument == "-outputFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputObjFilename_ = std::string(argv[argIndex]); + log_ << "Writing output to: " << outputObjFilename_ << "\n"; + outputSpecified = true; + } + else if(argument == "-outputPointCloudFile" && argIndex < argc) + { + argIndex++; + if (argIndex >= argc) + { + throw GeorefException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputPointCloudFilename_ = std::string(argv[argIndex]); + log_ << "Writing output to: " << outputPointCloudFilename_ << "\n"; + outputPointCloudSpecified = true; + } + else + { + printHelp(); + throw GeorefException("Unrecognised argument '" + argument + "'"); + } + } + + if (inputCoordSpecified && outputCoordSpecified) + { + throw GeorefException("Both output and input coordfile specified, only one of those are accepted."); + } + + if (imageListSpecified && gcpFileSpecified && imageLocation ) // && bundleResized) + { + useGCP_ = true; + } + else + { + log_ << '\n'; + log_ << "Missing input in order to use GCP for georeferencing. Using EXIF data instead.\n"; + } + + if(georeferencePointCloud_ && !outputPointCloudSpecified) + { + setDefaultPointCloudOutput(); + } + + if(!outputSpecified) + { + setDefaultOutput(); + } +} + +void Georef::printHelp() +{ + bool printInCoutPop = log_.isPrintingInCout(); + log_.setIsPrintingInCout(true); + + log_ << "Georef.exe\n\n"; + + log_ << "Purpose:" << "\n"; + log_ << "Georeference a textured mesh with the use of ground control points or exif data from the images." << "\n"; + + log_ << "Usage:" << "\n"; + log_ << "The program requires a path to a camera bundle file, a camera georeference coords file, and an input OBJ mesh file. All other input parameters are optional." << "\n\n"; + + log_ << "The following flags are available\n"; + log_ << "Call the program with flag \"-help\", or without parameters to print this message, or check any generated log file.\n"; + log_ << "Call the program with flag \"-verbose\", to print log messages in the standard output stream as well as in the log file.\n\n"; + + log_ << "Parameters are specified as: \"- \", (without <>), and the following parameters are configureable: " << "\n"; + log_ << "\"-bundleFile \" (mandatory)" << "\n"; + log_ << "\"Input cameras bundle file.\n\n"; + + log_ << "\"-gcpFile \" (mandatory if using ground control points)\n"; + log_ << "Path to the file containing the ground control points used for georeferencing.\n"; + log_ << "The file needs to be on the following line format:\n"; + log_ << "easting northing height pixelrow pixelcol imagename\n\n"; + + log_ << "\"-inputCoordFile \" (mandatory if using exif data)" << "\n"; + log_ << "\"Input cameras geroreferenced coords file.\n\n"; + + log_ << "\"-outputCoordFile \" (optional)" << "\n"; + log_ << "\"Output cameras geroreferenced coords file.\n\n"; + + log_ << "\"-inputFile \" (mandatory)" << "\n"; + log_ << "\"Input obj file that must contain a textured mesh.\n\n"; + + log_ << "\"-inputPointCloudFile \" (optional)" << "\n"; + log_ << "\"Input ply file that must contain a point cloud.\n\n"; + + log_ << "\"-imagesListPath \" (mandatory if using ground control points)\n"; + log_ << "Path to the list containing the image names used in the bundle.out file.\n\n"; + + log_ << "\"-imagesPath \" (mandatory if using ground control points)\n"; + log_ << "Path to the folder containing full resolution images.\n\n"; + + // log_ << "\"-bundleResizedTo \" (mandatory if using ground control points)\n"; + // log_ << "The resized resolution used in bundler.\n\n"; + + log_ << "\"-outputFile \" (optional, default _geo)" << "\n"; + log_ << "\"Output obj file that will contain the georeferenced texture mesh.\n\n"; + + log_ << "\"-outputPointCloudFile \" (mandatory if georeferencing a point cloud)" << "\n"; + log_ << "\"Output ply file that will contain the georeferenced point cloud.\n\n"; + + log_.setIsPrintingInCout(printInCoutPop); +} + +void Georef::setDefaultOutput() +{ + if(inputObjFilename_.empty()) + { + throw GeorefException("Tried to generate default output file without having an input file."); + } + + std::string tmp = inputObjFilename_; + size_t findPos = tmp.find_last_of("."); + + if(std::string::npos == findPos) + { + throw GeorefException("Tried to generate default ouptut file, could not find .obj in the input file:\n\'"+inputObjFilename_+"\'"); + } + + tmp = tmp.substr(0, findPos); + + outputObjFilename_ = tmp + "_geo.obj"; + log_ << "Writing output to: " << outputObjFilename_ << "\n"; +} + +void Georef::setDefaultPointCloudOutput() +{ + if(inputPointCloudFilename_.empty()) + { + throw GeorefException("Tried to generate default point cloud ouptut file without having an input file."); + } + + std::string tmp = inputPointCloudFilename_; + size_t findPos = tmp.find_last_of("."); + + if(std::string::npos == findPos) + { + throw GeorefException("Tried to generate default ouptut file, could not find .ply in the input file:\n\'"+inputPointCloudFilename_+"\'"); + } + + tmp = tmp.substr(0, findPos); + + outputPointCloudFilename_ = tmp + "_geo.ply"; + log_ << "Writing output to: " << outputPointCloudFilename_ << "\n"; +} + +void Georef::createGeoreferencedModel() +{ + if (useGCP_) + { + createGeoreferencedModelFromGCPData(); + } + else + { + createGeoreferencedModelFromExifData(); + } +} + +void Georef::readCameras() +{ + // Read translations from bundle file + std::ifstream bundleStream(bundleFilename_.c_str()); + if (!bundleStream.good()) + { + throw GeorefException("Failed opening bundle file " + bundleFilename_ + " for reading." + '\n'); + } + + // Read Cameras. + std::string bundleLine; + std::getline(bundleStream, bundleLine); // Read past bundle version comment + int numCameras, numPoints; + bundleStream >> numCameras >> numPoints; + for (int i=0; i> imageName; + imageList_.push_back(imageName); + } + + // Number of GCPs read + size_t nrGCPs = 0; + + std::ifstream gcpStream(gcpFilename_.c_str()); + if (!gcpStream.good()) + { + throw GeorefException("Failed opening gcp file " + gcpFilename_ + " for reading.\n"); + } + std::string gcpString; + + // Read the first line in the file as the format of the projected coordinates + std::getline(gcpStream, georefSystem_.system_); + + log_ << '\n'; + log_<< "Reading following GCPs from file:\n"; + + // Read all GCPs + while(std::getline(gcpStream, gcpString)) + { + std::istringstream istr(gcpString); + GeorefGCP gcp; + gcp.extractGCP(istr); + gcps_.push_back(gcp); + ++nrGCPs; + + log_<<"x_: "<(gcps_.size()); + northingOffset += (gcps_[gcpIndex].y_)/static_cast(gcps_.size()); + } + + georefSystem_.eastingOffset_ = static_cast(std::floor(eastingOffset)); + georefSystem_.northingOffset_ = static_cast(std::floor(northingOffset)); + + log_ << '\n'; + log_<<"The calculated easting offset for the georeferenced system: "<(georefSystem_.eastingOffset_); + gcps_[gcpIndex].y_ -= static_cast(georefSystem_.northingOffset_); + log_<<"x_: "<::Ptr meshCloud (new pcl::PointCloud); + pcl::fromPCLPointCloud2 (mesh.cloud, *meshCloud); + + // The number of GCP that is usable + int nrGCPUsable = 0; + + for (size_t gcpIndex = 0; gcpIndex < gcps_.size(); ++gcpIndex) + { + // Bool to check if the GCP is intersecting any triangle + bool exists = false; + + // Translate the GeoreferenceCamera to pcl-format in order to use pcl-functions + pcl::TextureMapping::Camera cam; + cam.focal_length = cameras_[gcps_[gcpIndex].cameraIndex_].focalLength_; + cam.pose = *(cameras_[gcps_[gcpIndex].cameraIndex_].pose_); + cam.texture_file = imagesLocation_ + '/' + gcps_[gcpIndex].image_; + + cv::Mat image = cv::imread(cam.texture_file); + cam.height = static_cast(image.rows); + cam.width = static_cast(image.cols); + + // The pixel position for the GCP in pcl-format in order to use pcl-functions + pcl::PointXY gcpPos; + gcpPos.x = static_cast(gcps_[gcpIndex].pixelX_); + gcpPos.y = static_cast(gcps_[gcpIndex].pixelY_); + + // Move vertices in mesh into the camera coordinate system + pcl::PointCloud::Ptr cameraCloud (new pcl::PointCloud); + pcl::transformPointCloud (*meshCloud, *cameraCloud, cam.pose.inverse()); + + // The vertex indicies to be used in order to calculate the GCP in the models coordinates + size_t vert0Index = 0; size_t vert1Index = 0; size_t vert2Index = 0; + + pcl::PointXY bestPixelPos0; pcl::PointXY bestPixelPos1; pcl::PointXY bestPixelPos2; + + // The closest distance of a triangle to the camera + double bestDistance = std::numeric_limits::infinity(); + + // Loop through all submeshes in model + for (size_t meshIndex = 0; meshIndex < mesh.tex_polygons.size(); ++meshIndex) + { + // Loop through all faces in submesh and check if inside polygon + for (size_t faceIndex = 0; faceIndex < mesh.tex_polygons[meshIndex].size(); ++faceIndex) + { + // Variables for the vertices in face as projections in the camera plane + pcl::PointXY pixelPos0; pcl::PointXY pixelPos1; pcl::PointXY pixelPos2; + if (isFaceProjected(cam, + cameraCloud->points[mesh.tex_polygons[meshIndex][faceIndex].vertices[0]], + cameraCloud->points[mesh.tex_polygons[meshIndex][faceIndex].vertices[1]], + cameraCloud->points[mesh.tex_polygons[meshIndex][faceIndex].vertices[2]], + pixelPos0, pixelPos1, pixelPos2)) + { + // If the pixel position of the GCP is inside the current triangle + if (checkPointInsideTriangle(pixelPos0, pixelPos1, pixelPos2, gcpPos)) + { + // Extract distances for all vertices for face to camera + double d0 = cameraCloud->points[mesh.tex_polygons[meshIndex][faceIndex].vertices[0]].z; + double d1 = cameraCloud->points[mesh.tex_polygons[meshIndex][faceIndex].vertices[1]].z; + double d2 = cameraCloud->points[mesh.tex_polygons[meshIndex][faceIndex].vertices[2]].z; + + // Calculate largest distance and store in distance variable + double distance = std::max(d0, std::max(d1,d2)); + + // If the triangle is closer to the camera use this triangle + if (distance < bestDistance) + { + // Update variables for the closest polygon + bestDistance = distance; + vert0Index = mesh.tex_polygons[meshIndex][faceIndex].vertices[0]; + vert1Index = mesh.tex_polygons[meshIndex][faceIndex].vertices[1]; + vert2Index = mesh.tex_polygons[meshIndex][faceIndex].vertices[2]; + bestPixelPos0 = pixelPos0; + bestPixelPos1 = pixelPos1; + bestPixelPos2 = pixelPos2; + exists = true; + ++nrGCPUsable; + } + } + } + } + } + + if(exists) + { + // Shorthands for the vertices + pcl::PointXYZ v0 = meshCloud->points[vert0Index]; + pcl::PointXYZ v1 = meshCloud->points[vert1Index]; + pcl::PointXYZ v2 = meshCloud->points[vert2Index]; + // Use barycentric coordinates to calculate position for the polygon intersection + pcl::PointXYZ gcpLocal = barycentricCoordinates(gcpPos, v0, v1, v2, bestPixelPos0, bestPixelPos1, bestPixelPos2); + + log_ << "Position in model for gcp " << gcpIndex + 1<< ": x=" < transform; + + transform(0, 0) = static_cast(transFinal.transform_.r1c1_); + transform(1, 0) = static_cast(transFinal.transform_.r2c1_); + transform(2, 0) = static_cast(transFinal.transform_.r3c1_); + transform(3, 0) = static_cast(transFinal.transform_.r4c1_); + + transform(0, 1) = static_cast(transFinal.transform_.r1c2_); + transform(1, 1) = static_cast(transFinal.transform_.r2c2_); + transform(2, 1) = static_cast(transFinal.transform_.r3c2_); + transform(3, 1) = static_cast(transFinal.transform_.r4c2_); + + transform(0, 2) = static_cast(transFinal.transform_.r1c3_); + transform(1, 2) = static_cast(transFinal.transform_.r2c3_); + transform(2, 2) = static_cast(transFinal.transform_.r3c3_); + transform(3, 2) = static_cast(transFinal.transform_.r4c3_); + + transform(0, 3) = static_cast(transFinal.transform_.r1c4_); + transform(1, 3) = static_cast(transFinal.transform_.r2c4_); + transform(2, 3) = static_cast(transFinal.transform_.r3c4_); + transform(3, 3) = static_cast(transFinal.transform_.r4c4_); + + log_ << '\n'; + log_ << "Applying transform to mesh...\n"; + // Move the mesh into position. + pcl::transformPointCloud(*meshCloud, *meshCloud, transform); + log_ << ".. mesh transformed.\n"; + + // Update the mesh. + pcl::toPCLPointCloud2 (*meshCloud, mesh.cloud); + + // Iterate over each part of the mesh (one per material), to make texture file paths relative the .mtl file. + for(size_t t = 0; t < mesh.tex_materials.size(); ++t) + { + // The material of the current submesh. + pcl::TexMaterial& material = mesh.tex_materials[t]; + + size_t find = material.tex_file.find_last_of("/\\"); + if(std::string::npos != find) + { + material.tex_file = material.tex_file.substr(find + 1); + } + } + + log_ << '\n'; + if (saveOBJFile(outputObjFilename_, mesh, 8) == -1) + { + throw GeorefException("Error when saving model:\n" + outputObjFilename_ + "\n"); + } + else + { + log_ << "Successfully saved model.\n"; + } + + if(georeferencePointCloud_) + { + //pcl::PointCloud2::Ptr pointCloud; + pcl::PointCloud::Ptr pointCloud(new pcl::PointCloud()); + if(pcl::io::loadPLYFile (inputPointCloudFilename_.c_str(), *pointCloud.get()) == -1) { + throw GeorefException("Error when reading point cloud:\n" + inputPointCloudFilename_ + "\n"); + } + else + { + log_ << "Successfully loaded " << pointCloud->size() << " points with corresponding normals from file.\n"; + } + log_ << '\n'; + log_ << "Applying transform to point cloud...\n"; + pcl::transformPointCloud(*pointCloud, *pointCloud, transform); + log_ << ".. point cloud transformed.\n"; + + pcl::PLYWriter plyWriter; + + log_ << '\n'; + log_ << "Saving point cloud file to \'" << outputPointCloudFilename_ << "\'...\n"; + //pcl::io::savePLYFileASCII(outputPointCloudFilename_.c_str(), *pointCloud.get()); + plyWriter.write(outputPointCloudFilename_.c_str(), *pointCloud.get(), false, false); + log_ << ".. point cloud file saved.\n"; + } + + if(exportCoordinateFile_) + { + log_ << '\n'; + log_ << "Saving georeferenced camera positions to "; + log_ << outputCoordFilename_; + log_<< "\n"; + std::ofstream coordStream(outputCoordFilename_.c_str()); + coordStream << georefSystem_.system_ <(georefSystem_.eastingOffset_) << " " << static_cast(georefSystem_.northingOffset_) << std::endl; + for(size_t cameraIndex = 0; cameraIndex < cameras_.size(); ++cameraIndex) + { + Vec3 globalCameraPosition = (transFinal.transform_)*(cameras_[cameraIndex].getPos()); + coordStream << globalCameraPosition.x_ << " " << globalCameraPosition.y_ << " " << globalCameraPosition.z_ << std::endl; + } + coordStream.close(); + log_ << "...coordinate file saved.\n"; + } + + if(exportGeorefSystem_) + { + printGeorefSystem(); + } +} + +void Georef::createGeoreferencedModelFromGCPData() +{ + readCameras(); + + readGCPs(); + + calculateGCPOffset(); + + performGeoreferencingWithGCP(); + +} + +void Georef::createGeoreferencedModelFromExifData() +{ + readCameras(); + + // Read coords from coord file generated by extract_utm tool + std::ifstream coordStream(inputCoordFilename_.c_str()); + if (!coordStream.good()) + { + throw GeorefException("Failed opening coordinate file " + inputCoordFilename_ + " for reading." + '\n'); + } + + std::string coordString; + std::getline(coordStream, georefSystem_.system_); // System + { + std::getline(coordStream, coordString); + std::stringstream ss(coordString); + + ss >> georefSystem_.eastingOffset_ >> georefSystem_.northingOffset_; + } + + log_ << '\n'; + log_ << "Geographical reference system\n"; + log_ << georefSystem_ << '\n'; + + // The number of cameras in the coords file. + size_t nGeorefCameras = 0; + + // Read the georefernced position for all cameras. + while (std::getline(coordStream, coordString)) + { + if(nGeorefCameras >= cameras_.size()) + { + throw GeorefException("Error, to many cameras in \'" + inputCoordFilename_ + "\' coord file.\n"); + } + + std::istringstream istr(coordString); + cameras_[nGeorefCameras].extractCameraGeoref(istr); + + ++nGeorefCameras; + } + coordStream.close(); + + if(nGeorefCameras < cameras_.size()) + { + throw GeorefException("Not enough cameras in \'" + inputCoordFilename_ + "\' coord file.\n"); + } + + // Remove invalid cameras + std::vector goodCameras; + for (size_t i = 0; i < cameras_.size(); i++){ + if (cameras_[i].isValid()) goodCameras.push_back(GeorefCamera(cameras_[i])); + } + cameras_.clear(); + cameras_ = goodCameras; + + // The optimal camera triplet. + size_t cam0, cam1, cam2; + + log_ << '\n'; + log_ << "Choosing optimal camera triplet...\n"; + chooseBestCameraTriplet(cam0, cam1, cam2); + log_ << "... optimal camera triplet chosen:\n"; + log_ << cam0 << ", " << cam1 << ", " << cam2 << '\n'; + log_ << '\n'; + FindTransform transFinal; + transFinal.findTransform(cameras_[cam0].getPos(), cameras_[cam1].getPos(), cameras_[cam2].getPos(), + cameras_[cam0].getReferencedPos(), cameras_[cam1].getReferencedPos(), cameras_[cam2].getReferencedPos()); + log_ << "Final transform:\n"; + log_ << transFinal.transform_ << '\n'; + + printFinalTransform(transFinal.transform_); + + // The transform used to move the chosen area into the ortho photo. + Eigen::Transform transform; + + transform(0, 0) = static_cast(transFinal.transform_.r1c1_); transform(1, 0) = static_cast(transFinal.transform_.r2c1_); + transform(2, 0) = static_cast(transFinal.transform_.r3c1_); transform(3, 0) = static_cast(transFinal.transform_.r4c1_); + + transform(0, 1) = static_cast(transFinal.transform_.r1c2_); transform(1, 1) = static_cast(transFinal.transform_.r2c2_); + transform(2, 1) = static_cast(transFinal.transform_.r3c2_); transform(3, 1) = static_cast(transFinal.transform_.r4c2_); + + transform(0, 2) = static_cast(transFinal.transform_.r1c3_); transform(1, 2) = static_cast(transFinal.transform_.r2c3_); + transform(2, 2) = static_cast(transFinal.transform_.r3c3_); transform(3, 2) = static_cast(transFinal.transform_.r4c3_); + + transform(0, 3) = static_cast(transFinal.transform_.r1c4_); transform(1, 3) = static_cast(transFinal.transform_.r2c4_); + transform(2, 3) = static_cast(transFinal.transform_.r3c4_); transform(3, 3) = static_cast(transFinal.transform_.r4c4_); + + log_ << '\n'; + log_ << "Reading mesh file...\n"; + pcl::TextureMesh mesh; + loadObjFile(inputObjFilename_, mesh); + log_ << ".. mesh file read.\n"; + + // Contains the vertices of the mesh. + pcl::PointCloud::Ptr meshCloud (new pcl::PointCloud); + pcl::fromPCLPointCloud2 (mesh.cloud, *meshCloud); + + log_ << '\n'; + log_ << "Applying transform to mesh...\n"; + // Move the mesh into position. + pcl::transformPointCloud(*meshCloud, *meshCloud, transform); + log_ << ".. mesh transformed.\n"; + + // Update the mesh. + pcl::toPCLPointCloud2 (*meshCloud, mesh.cloud); + + // Iterate over each part of the mesh (one per material), to make texture file paths relative the .mtl file. + for(size_t t = 0; t < mesh.tex_materials.size(); ++t) + { + // The material of the current submesh. + pcl::TexMaterial& material = mesh.tex_materials[t]; + + size_t find = material.tex_file.find_last_of("/\\"); + if(std::string::npos != find) + { + material.tex_file = material.tex_file.substr(find + 1); + } + } + + log_ << '\n'; + log_ << "Saving mesh file to \'" << outputObjFilename_ << "\'...\n"; + saveOBJFile(outputObjFilename_, mesh, 8); + log_ << ".. mesh file saved.\n"; + + if(georeferencePointCloud_) + { + //pcl::PointCloud2::Ptr pointCloud; + pcl::PointCloud::Ptr pointCloud(new pcl::PointCloud()); + if(pcl::io::loadPLYFile (inputPointCloudFilename_.c_str(), *pointCloud.get()) == -1) { + throw GeorefException("Error when reading point cloud:\n" + inputPointCloudFilename_ + "\n"); + } + else + { + log_ << "Successfully loaded " << pointCloud->size() << " points with corresponding normals from file.\n"; + } + log_ << '\n'; + log_ << "Applying transform to point cloud...\n"; + pcl::transformPointCloud(*pointCloud, *pointCloud, transform); + log_ << ".. point cloud transformed.\n"; + + pcl::PLYWriter plyWriter; + + log_ << '\n'; + log_ << "Saving point cloud file to \'" << outputPointCloudFilename_ << "\'...\n"; + //pcl::io::savePLYFileASCII(outputPointCloudFilename_.c_str(), *pointCloud.get()); + plyWriter.write(outputPointCloudFilename_.c_str(), *pointCloud.get(), false, false); + log_ << ".. point cloud file saved.\n"; + } + + if(exportGeorefSystem_) + { + printGeorefSystem(); + } +} + +void Georef::chooseBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2) +{ + size_t numThreads = boost::thread::hardware_concurrency(); + boost::thread_group threads; + std::vector triplets; + for(size_t t = 0; t < numThreads; ++t) + { + GeorefBestTriplet* triplet = new GeorefBestTriplet(); + triplets.push_back(triplet); + threads.create_thread(boost::bind(&Georef::findBestGCPTriplet, this, boost::ref(triplet->t_), boost::ref(triplet->s_), boost::ref(triplet->p_), t, numThreads, boost::ref(triplet->err_))); + } + + threads.join_all(); + + double minTotError = std::numeric_limits::infinity(); + for(size_t t = 0; t triplet->err_) + { + minTotError = triplet->err_; + gcp0 = triplet->t_; + gcp1 = triplet->s_; + gcp2 = triplet->p_; + } + delete triplet; + } + + log_ << "Mean georeference error " << minTotError / static_cast(gcps_.size()) << '\n'; +} + +void Georef::findBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2, size_t offset, size_t stride, double &minTotError) +{ + minTotError = std::numeric_limits::infinity(); + + for(size_t t = offset; t < gcps_.size(); t+=stride) + { + if (gcps_[t].use_) + { + for(size_t s = t; s < gcps_.size(); ++s) + { + if (gcps_[s].use_) + { + for(size_t p = s; p < gcps_.size(); ++p) + { + if (gcps_[p].use_) + { + FindTransform trans; + trans.findTransform(gcps_[t].getPos(), gcps_[s].getPos(), gcps_[p].getPos(), + gcps_[t].getReferencedPos(), gcps_[s].getReferencedPos(), gcps_[p].getReferencedPos()); + + // The total error for the curren camera triplet. + double totError = 0.0; + + for(size_t r = 0; r < gcps_.size(); ++r) + { + totError += trans.error(gcps_[r].getPos(), gcps_[r].getReferencedPos()); + } + + if(minTotError > totError) + { + minTotError = totError; + gcp0 = t; + gcp1 = s; + gcp2 = p; + } + } + } + } + } + } + } + + log_ << '[' << offset+1 << " of " << stride << "] Mean georeference error " << minTotError / static_cast(gcps_.size()); + log_ << " (" << gcp0 << ", " << gcp1 << ", " << gcp2 << ")\n"; +} + +void Georef::chooseBestCameraTriplet(size_t &cam0, size_t &cam1, size_t &cam2) +{ + size_t numThreads = boost::thread::hardware_concurrency(); + boost::thread_group threads; + std::vector triplets; + for(size_t t = 0; t < numThreads; ++t) + { + GeorefBestTriplet* triplet = new GeorefBestTriplet(); + triplets.push_back(triplet); + threads.create_thread(boost::bind(&Georef::findBestCameraTriplet, this, boost::ref(triplet->t_), boost::ref(triplet->s_), boost::ref(triplet->p_), t, numThreads, boost::ref(triplet->err_))); + } + + threads.join_all(); + + double minTotError = std::numeric_limits::infinity(); + for(size_t t = 0; t triplet->err_) + { + minTotError = triplet->err_; + cam0 = triplet->t_; + cam1 = triplet->s_; + cam2 = triplet->p_; + } + delete triplet; + } + + log_ << "Mean georeference error " << minTotError / static_cast(cameras_.size()) << '\n'; +} + +void Georef::findBestCameraTriplet(size_t &cam0, size_t &cam1, size_t &cam2, size_t offset, size_t stride, double &minTotError) +{ + minTotError = std::numeric_limits::infinity(); + + for(size_t t = offset; t < cameras_.size(); t+=stride) + { + for(size_t s = t; s < cameras_.size(); ++s) + { + for(size_t p = s; p < cameras_.size(); ++p) + { + FindTransform trans; + trans.findTransform(cameras_[t].getPos(), cameras_[s].getPos(), cameras_[p].getPos(), + cameras_[t].getReferencedPos(), cameras_[s].getReferencedPos(), cameras_[p].getReferencedPos()); + + // The total error for the current camera triplet. + double totError = 0.0; + + for(size_t r = 0; r < cameras_.size(); ++r) + { + totError += trans.error(cameras_[r].getPos(), cameras_[r].getReferencedPos()); + } + + if(minTotError > totError) + { + minTotError = totError; + cam0 = t; + cam1 = s; + cam2 = p; + } + } + } + } + + log_ << '[' << offset+1 << " of " << stride << "] Mean georeference error " << minTotError / static_cast(cameras_.size()); + log_ << " (" << cam0 << ", " << cam1 << ", " << cam2 << ")\n"; +} + +void Georef::printGeorefSystem() +{ + if(outputObjFilename_.empty()) + { + throw GeorefException("Output file path empty!."); + } + + std::string tmp = outputObjFilename_; + size_t findPos = tmp.find_last_of("."); + + if(std::string::npos == findPos) + { + throw GeorefException("Tried to generate default ouptut file, could not find .obj in the output file:\n\'"+outputObjFilename_+"\'"); + } + + //tmp = tmp.substr(0, findPos); + + //tmp = tmp + "_georef_system.txt"; + log_ << '\n'; + log_ << "Saving georeference system file to \'" << georefFilename_ << "\'...\n"; + std::ofstream geoStream(georefFilename_.c_str()); + geoStream << georefSystem_ << std::endl; + geoStream.close(); + log_ << "... georeference system saved.\n"; +} + + +void Georef::printFinalTransform(Mat4 transform) +{ + if(outputObjFilename_.empty()) + { + throw GeorefException("Output file path empty!."); + } + + std::string tmp = outputObjFilename_; + size_t findPos = tmp.find_last_of("."); + + if(std::string::npos == findPos) + { + throw GeorefException("Tried to generate default ouptut file, could not find .obj in the output file:\n\'"+outputObjFilename_+"\'"); + } + + log_ << '\n'; + log_ << "Saving final transform file to \'" << finalTransformFile_ << "\'...\n"; + std::ofstream transformStream(finalTransformFile_.c_str()); + transformStream << transform << std::endl; + transformStream.close(); + log_ << "... final transform saved.\n"; +} + + +bool Georef::loadObjFile(std::string inputFile, pcl::TextureMesh &mesh) +{ + int data_type; + unsigned int data_idx; + int file_version; + int offset = 0; + Eigen::Vector4f origin; + Eigen::Quaternionf orientation; + + if (!readHeader(inputFile, mesh.cloud, origin, orientation, file_version, data_type, data_idx, offset)) + { + throw GeorefException("Problem reading header in modelfile!\n"); + } + + std::ifstream fs; + + fs.open (inputFile.c_str (), std::ios::binary); + if (!fs.is_open () || fs.fail ()) + { + //PCL_ERROR ("[pcl::OBJReader::readHeader] Could not open file '%s'! Error : %s\n", file_name.c_str (), strerror(errno)); + fs.close (); + log_<<"Could not read mesh from file "; + log_ << inputFile.c_str(); + log_ <<"\n"; + + throw GeorefException("Problem reading mesh from file!\n"); + } + + // Seek at the given offset + fs.seekg (data_idx, std::ios::beg); + + // Get normal_x field indices + int normal_x_field = -1; + for (std::size_t i = 0; i < mesh.cloud.fields.size (); ++i) + { + if (mesh.cloud.fields[i].name == "normal_x") + { + normal_x_field = i; + break; + } + } + + std::size_t v_idx = 0; + std::size_t vn_idx = 0; + std::size_t vt_idx = 0; + std::size_t f_idx = 0; + std::string line; + std::vector st; + std::vector > coordinates; + std::vector > allTexCoords; + + std::map f2vt; + + try + { + while (!fs.eof ()) + { + getline (fs, line); + // Ignore empty lines + if (line == "") + continue; + + // Tokenize the line + std::stringstream sstream (line); + sstream.imbue (std::locale::classic ()); + line = sstream.str (); + boost::trim (line); + boost::split (st, line, boost::is_any_of ("\t\r "), boost::token_compress_on); + + // Ignore comments + if (st[0] == "#") + continue; + // Vertex + if (st[0] == "v") + { + try + { + for (int i = 1, f = 0; i < 4; ++i, ++f) + { + float value = boost::lexical_cast (st[i]); + memcpy (&mesh.cloud.data[v_idx * mesh.cloud.point_step + mesh.cloud.fields[f].offset], &value, sizeof (float)); + } + + ++v_idx; + } + catch (const boost::bad_lexical_cast &e) + { + log_<<"Unable to convert %s to vertex coordinates!\n"; + throw GeorefException("Unable to convert %s to vertex coordinates!"); + } + continue; + } + // Vertex normal + if (st[0] == "vn") + { + try + { + for (int i = 1, f = normal_x_field; i < 4; ++i, ++f) + { + float value = boost::lexical_cast (st[i]); + memcpy (&mesh.cloud.data[vn_idx * mesh.cloud.point_step + mesh.cloud.fields[f].offset], + &value, + sizeof (float)); + } + ++vn_idx; + } + catch (const boost::bad_lexical_cast &e) + { + log_<<"Unable to convert %s to vertex normal!\n"; + throw GeorefException("Unable to convert %s to vertex normal!"); + } + continue; + } + // Texture coordinates + if (st[0] == "vt") + { + try + { + Eigen::Vector3f c (0, 0, 0); + for (std::size_t i = 1; i < st.size (); ++i) + c[i-1] = boost::lexical_cast (st[i]); + + if (c[2] == 0) + coordinates.push_back (Eigen::Vector2f (c[0], c[1])); + else + coordinates.push_back (Eigen::Vector2f (c[0]/c[2], c[1]/c[2])); + ++vt_idx; + + } + catch (const boost::bad_lexical_cast &e) + { + log_<<"Unable to convert %s to vertex texture coordinates!\n"; + throw GeorefException("Unable to convert %s to vertex texture coordinates!"); + } + continue; + } + // Material + if (st[0] == "usemtl") + { + mesh.tex_polygons.push_back (std::vector ()); + mesh.tex_materials.push_back (pcl::TexMaterial ()); + for (std::size_t i = 0; i < companions_.size (); ++i) + { + std::vector::const_iterator mat_it = companions_[i].getMaterial (st[1]); + if (mat_it != companions_[i].materials_.end ()) + { + mesh.tex_materials.back () = *mat_it; + break; + } + } + // We didn't find the appropriate material so we create it here with name only. + if (mesh.tex_materials.back ().tex_name == "") + mesh.tex_materials.back ().tex_name = st[1]; + mesh.tex_coordinates.push_back (coordinates); + coordinates.clear (); + continue; + } + // Face + if (st[0] == "f") + { + //We only care for vertices indices + pcl::Vertices face_v; face_v.vertices.resize (st.size () - 1); + for (std::size_t i = 1; i < st.size (); ++i) + { + int v; + sscanf (st[i].c_str (), "%d", &v); + v = (v < 0) ? v_idx + v : v - 1; + face_v.vertices[i-1] = v; + + int v2, vt, vn; + sscanf (st[i].c_str (), "%d/%d/%d", &v2, &vt, &vn); + f2vt[3*(f_idx) + i-1] = vt-1; + } + mesh.tex_polygons.back ().push_back (face_v); + ++f_idx; + continue; + } + } + } + catch (const char *exception) + { + fs.close (); + log_<<"Unable to read file!\n"; + throw GeorefException("Unable to read file!"); + } + + if (vt_idx != v_idx) + { + std::vector > texcoordinates = std::vector >(0); + texcoordinates.reserve(3*f_idx); + + for (size_t faceIndex = 0; faceIndex < f_idx; ++faceIndex) + { + for(size_t i = 0; i < 3; ++i) + { + Eigen::Vector2f vt = mesh.tex_coordinates[0][f2vt[3*faceIndex+i]]; + texcoordinates.push_back(vt); + } + } + + mesh.tex_coordinates.clear(); + mesh.tex_coordinates.push_back(texcoordinates); + } + + fs.close(); + return (0); +} + +bool Georef::readHeader (const std::string &file_name, pcl::PCLPointCloud2 &cloud, + Eigen::Vector4f &origin, Eigen::Quaternionf &orientation, + int &file_version, int &data_type, unsigned int &data_idx, + const int offset) +{ + origin = Eigen::Vector4f::Zero (); + orientation = Eigen::Quaternionf::Identity (); + file_version = 0; + cloud.width = cloud.height = cloud.point_step = cloud.row_step = 0; + cloud.data.clear (); + data_type = 0; + data_idx = offset; + + std::ifstream fs; + std::string line; + + if (file_name == "" || !boost::filesystem::exists (file_name)) + { + return false; + } + + // Open file in binary mode to avoid problem of + // std::getline() corrupting the result of ifstream::tellg() + fs.open (file_name.c_str (), std::ios::binary); + if (!fs.is_open () || fs.fail ()) + { + fs.close (); + return false; + } + + // Seek at the given offset + fs.seekg (offset, std::ios::beg); + + // Read the header and fill it in with wonderful values + bool vertex_normal_found = false; + bool vertex_texture_found = false; + // Material library, skip for now! + // bool material_found = false; + std::vector material_files; + std::size_t nr_point = 0; + std::vector st; + + try + { + while (!fs.eof ()) + { + getline (fs, line); + // Ignore empty lines + if (line == "") + continue; + + // Tokenize the line + std::stringstream sstream (line); + sstream.imbue (std::locale::classic ()); + line = sstream.str (); + boost::trim (line); + boost::split (st, line, boost::is_any_of ("\t\r "), boost::token_compress_on); + // Ignore comments + if (st.at (0) == "#") + continue; + + // Vertex + if (st.at (0) == "v") + { + ++nr_point; + continue; + } + + // Vertex texture + if ((st.at (0) == "vt") && !vertex_texture_found) + { + vertex_texture_found = true; + continue; + } + + // Vertex normal + if ((st.at (0) == "vn") && !vertex_normal_found) + { + vertex_normal_found = true; + continue; + } + + // Material library, skip for now! + if (st.at (0) == "mtllib") + { + material_files.push_back (st.at (1)); + continue; + } + } + } + catch (const char *exception) + { + fs.close (); + return false; + } + + if (!nr_point) + { + fs.close (); + return false; + } + + int field_offset = 0; + for (int i = 0; i < 3; ++i, field_offset += 4) + { + cloud.fields.push_back (pcl::PCLPointField ()); + cloud.fields[i].offset = field_offset; + cloud.fields[i].datatype = pcl::PCLPointField::FLOAT32; + cloud.fields[i].count = 1; + } + + cloud.fields[0].name = "x"; + cloud.fields[1].name = "y"; + cloud.fields[2].name = "z"; + + if (vertex_normal_found) + { + std::string normals_names[3] = { "normal_x", "normal_y", "normal_z" }; + for (int i = 0; i < 3; ++i, field_offset += 4) + { + cloud.fields.push_back (pcl::PCLPointField ()); + pcl::PCLPointField& last = cloud.fields.back (); + last.name = normals_names[i]; + last.offset = field_offset; + last.datatype = pcl::PCLPointField::FLOAT32; + last.count = 1; + } + } + + if (material_files.size () > 0) + { + for (std::size_t i = 0; i < material_files.size (); ++i) + { + pcl::MTLReader companion; + + if (companion.read (file_name, material_files[i])) + { + log_<<"Problem reading material file."; + } + + companions_.push_back (companion); + } + } + + cloud.point_step = field_offset; + cloud.width = nr_point; + cloud.height = 1; + cloud.row_step = cloud.point_step * cloud.width; + cloud.is_dense = true; + cloud.data.resize (cloud.point_step * nr_point); + fs.close (); + return true; +} + diff --git a/modules/odm_georef/src/Georef.hpp b/modules/odm_georef/src/Georef.hpp new file mode 100644 index 000000000..17b09a6eb --- /dev/null +++ b/modules/odm_georef/src/Georef.hpp @@ -0,0 +1,306 @@ +#pragma once + +// C++ +#include +#include +#include + +// PCL +#include +#include +// Modified PCL +#include "modifiedPclFunctions.hpp" + +// Logger +#include "Logger.hpp" + +// Transformation +#include "FindTransform.hpp" + +/*! + * \brief The GeorefSystem struct is used to store information about a georeference system. + */ +struct GeorefSystem +{ + std::string system_; /**< The name of the system. **/ + double eastingOffset_; /**< The easting offset for the georeference system. **/ + double northingOffset_; /**< The northing offset for the georeference system. **/ + + friend std::ostream& operator<<(std::ostream &os, const GeorefSystem &geo); +}; + +/*! + * \brief The GeorefGCP struct used to store information about a GCP. + */ +struct GeorefGCP +{ + double x_; /**< The X coordinate of the GCP **/ + double y_; /**< The Y coordinate of the GCP **/ + double z_; /**< The Z coordinate of the GCP **/ + + bool use_; /**< Bool to check if the GCP is corresponding in the local model **/ + + double localX_; /**< The corresponding X coordinate in the model **/ + double localY_; /**< The corresponding Y coordinate in the model **/ + double localZ_; /**< The corresponding Z coordinate in the model **/ + + size_t cameraIndex_; /**< The index to the corresponding camera for the image. **/ + + int pixelX_; /**< The pixels x-position for the GCP in the corresponding image **/ + int pixelY_; /**< The pixels y-position for the GCP in the corresponding image **/ + + std::string image_; /**< The corresponding image for the GCP **/ + + GeorefGCP(); + ~GeorefGCP(); + + void extractGCP(std::istringstream &gcpStream); + + /*! + * \brief getPos Get the local position of the GCP. + */ + Vec3 getPos(); + + /*! + * \brief getReferencedPos Get the georeferenced position of the GCP. + */ + Vec3 getReferencedPos(); +}; + +/*! + * \brief The GeorefCamera struct is used to store information about a camera. + */ +struct GeorefCamera +{ + GeorefCamera(); + GeorefCamera(const GeorefCamera &other); + ~GeorefCamera(); + + /*! + * \brief extractCamera Extracts a camera's intrinsic and extrinsic parameters from a stream. + */ + void extractCamera(std::ifstream &bundleStream); + + /*! + * \brief extractCameraGeoref Extracts a camera's world position from a stream. + */ + void extractCameraGeoref(std::istringstream &coordStream); + + /*! + * \brief getPos Get the local position of the camera. + */ + Vec3 getPos(); + + /*! + * \brief getReferencedPos Get the georeferenced position of the camera. + */ + Vec3 getReferencedPos(); + + /*! + * \brief isValid Whether this camera is valid based on its parameters. + */ + bool isValid(); + + double focalLength_; /**< The focal length of the camera. */ + double k1_; /**< The k1 lens distortion parameter. **/ + double k2_; /**< The k2 lens distortion parameter. **/ + + double easting_; /**< The easting of the camera. **/ + double northing_; /**< The northing of the camera. **/ + double altitude_; /**< The altitude of the camera. **/ + + Eigen::Affine3f* transform_; /**< The rotation of the camera. **/ + Eigen::Vector3f* position_; /**< The position of the camera. **/ + Eigen::Affine3f* pose_; /**< The pose of the camera. **/ + + friend std::ostream& operator<<(std::ostream &os, const GeorefCamera &cam); +}; + +/*! + * \brief The GeorefBestTriplet struct is used to store the best triplet found. + */ +struct GeorefBestTriplet +{ + size_t t_; /**< First ordinate of the best triplet found. **/ + size_t s_; /**< Second ordinate of the best triplet found. **/ + size_t p_; /**< Third ordinate of the best triplet found. **/ + double err_; /**< Error of this triplet. **/ +}; + +/*! + * \brief The Georef class is used to transform a mesh into a georeferenced system. + * The class reads camera positions from a bundle file. + * The class reads the georefenced camera positions from a coords file. + * The class reads a textured mesh from an OBJ-file. + * The class writes the georeferenced textured mesh to an OBJ-file. + * The class uses file read and write from pcl. + */ +class Georef +{ +public: + Georef(); + ~Georef(); + + int run(int argc, char* argv[]); + +private: + + /*! + * \brief parseArguments Parses command line arguments. + * \param argc Application argument count. + * \param argv Argument values. + */ + void parseArguments(int argc, char* argv[]); + + /*! + * \brief printHelp Prints help, explaining usage. Can be shown by calling the program with argument: "-help". + */ + void printHelp(); + + /*! + * \brief setDefaultOutput Setup the output file name given the input file name. + */ + void setDefaultOutput(); + + /*! + * \brief setDefaultPointCloudOutput Setup the output file name given the input file name. + */ + void setDefaultPointCloudOutput(); + + /*! + * \brief createGeoreferencedModel Makes the input file georeferenced and saves it to the output file. + */ + void createGeoreferencedModel(); + + /*! + * \brief readCameras Reads the camera information from the bundle file. + */ + void readCameras(); + + /*! + * \brief readGCP Reads the ground control points from the gcp file. + */ + void readGCPs(); + + /*! + * \brief calculateGCPOffset Calculates an offset weighted from the ground control points read in the readGCP function. + */ + void calculateGCPOffset(); + + /*! + * \brief barycentricCoordinates Returns the world position of a point inside a 2d triangle by using the triangle vertex positions. + */ + pcl::PointXYZ barycentricCoordinates(pcl::PointXY point, pcl::PointXYZ vert0, pcl::PointXYZ vert1, pcl::PointXYZ vert2, pcl::PointXY p0, pcl::PointXY p1, pcl::PointXY p2); + + /*! + * \brief performGeoreferencingWithGCP Performs the georeferencing of the model with the ground control points. + */ + void performGeoreferencingWithGCP(); + + /*! + * \brief createGeoreferencedModelFromGCPData Makes the input file georeferenced and saves it to the output file. + */ + void createGeoreferencedModelFromGCPData(); + + /*! + * \brief createGeoreferencedModelFromExifData Makes the input file georeferenced and saves it to the output file. + */ + void createGeoreferencedModelFromExifData(); + + /*! + * \brief chooseBestGCPTriplet Chooses the best triplet of GCPs to use when making the model georeferenced. + */ + void chooseBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2); + + /*! + * \brief findBestGCPTriplet Partitioned version of chooseBestGCPTriplet. + */ + void findBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2, size_t offset, size_t stride, double &minTotError); + + /*! + * \brief chooseBestCameraTriplet Chooses the best triplet of cameras to use when making the model georeferenced. + */ + void chooseBestCameraTriplet(size_t &cam0, size_t &cam1, size_t &cam2); + + /*! + * \brief findBestCameraTriplet Partitioned version of chooseBestCameraTriplet. + */ + void findBestCameraTriplet(size_t &cam0, size_t &cam1, size_t &cam2, size_t offset, size_t stride, double &minTotError); + + /*! + * \brief printGeorefSystem Prints a file containing information about the georeference system, next to the ouptut file. + **/ + void printGeorefSystem(); + + /*! + * \brief printFinalTransform Prints a file containing the final transform, next to the output file. + **/ + void printFinalTransform(Mat4 transform); + + /*! + * \brief Loads a model from an .obj file (replacement for the pcl obj loader). + * + * \param inputFile Path to the .obj file. + * \param mesh The model. + * \return True if model was loaded successfully. + */ + bool loadObjFile(std::string inputFile, pcl::TextureMesh &mesh); + + /*! + * \brief Function is compied straight from the function in the pcl::io module. + */ + bool readHeader (const std::string &file_name, pcl::PCLPointCloud2 &cloud, + Eigen::Vector4f &origin, Eigen::Quaternionf &orientation, + int &file_version, int &data_type, unsigned int &data_idx, + const int offset); + + + Logger log_; /**< Logging object. */ + std::string logFile_; /**< The path to the output log file. */ + + std::string finalTransformFile_; /**< The path to the file for the final transform. */ + + std::string bundleFilename_; /**< The path to the cameras bundle file. **/ + std::string inputCoordFilename_; /**< The path to the cameras exif gps positions file. **/ + std::string outputCoordFilename_; /**< The path to the cameras georeferenced gps positions file. **/ + std::string gcpFilename_; /**< The path to the GCP file **/ + std::string imagesListPath_; /**< Path to the image list. **/ + std::string imagesLocation_; /**< The folder containing the images in the image list. **/ + std::string inputObjFilename_; /**< The path to the input mesh obj file. **/ + std::string outputObjFilename_; /**< The path to the output mesh obj file. **/ + std::string inputPointCloudFilename_; /**< The path to the input point cloud file. **/ + std::string outputPointCloudFilename_; /**< The path to the output point cloud file. **/ + std::string georefFilename_; /**< The path to the output offset file. **/ + + bool georeferencePointCloud_; + bool exportCoordinateFile_; + bool exportGeorefSystem_; + bool useGCP_; /**< Check if GCP-file is present and use this to georeference the model. **/ + // double bundleResizedTo_; /**< The size used in the previous steps to calculate the camera focal_length. */ + + std::vector cameras_; /**< A vector of all cameras. **/ + std::vector gcps_; /**< A vector of all GCPs. **/ + std::vector imageList_; /**< A vector containing the names of the corresponding cameras. **/ + + GeorefSystem georefSystem_; /**< Contains the georeference system. **/ + + bool multiMaterial_; /**< True if the mesh has multiple materials. **/ + + std::vector companions_; /**< Materials (used by loadOBJFile). **/ +}; + +/*! + * \brief The Georef class + */ +class GeorefException : public std::exception +{ + +public: + GeorefException() : message("Error in Georef") {} + GeorefException(std::string msgInit) : message("Error in Georef:\n" + msgInit) {} + ~GeorefException() throw() {} + virtual const char* what() const throw() {return message.c_str(); } + +private: + std::string message; /**< The error message **/ +}; diff --git a/modules/odm_georef/src/Logger.cpp b/modules/odm_georef/src/Logger.cpp new file mode 100644 index 000000000..46b96fd72 --- /dev/null +++ b/modules/odm_georef/src/Logger.cpp @@ -0,0 +1,31 @@ +#include "Logger.hpp" + + +Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout) +{ + +} + +Logger::~Logger() +{ + +} + +void Logger::print(std::string filePath) +{ + std::ofstream file(filePath.c_str(), std::ios::binary); + file << logStream_.str(); + file.close(); +} + +bool Logger::isPrintingInCout() const +{ + return isPrintingInCout_; +} + +void Logger::setIsPrintingInCout(bool isPrintingInCout) +{ + isPrintingInCout_ = isPrintingInCout; +} + + diff --git a/modules/odm_georef/src/Logger.hpp b/modules/odm_georef/src/Logger.hpp new file mode 100644 index 000000000..61520146e --- /dev/null +++ b/modules/odm_georef/src/Logger.hpp @@ -0,0 +1,68 @@ +#pragma once + +// STL +#include +#include +#include +#include + +/*! + * \brief The Logger class is used to store program messages in a log file. + * \details By using the << operator while printInCout is set, the class writes both to + * cout and to file, if the flag is not set, output is written to file only. + */ +class Logger +{ +public: + /*! + * \brief Logger Contains functionality for printing and displaying log information. + * \param printInCout Flag toggling if operator << also writes to cout. + */ + Logger(bool isPrintingInCout = true); + + /*! + * \brief Destructor. + */ + ~Logger(); + + /*! + * \brief print Prints the contents of the log to file. + * \param filePath Path specifying where to write the log. + */ + void print(std::string filePath); + + /*! + * \brief isPrintingInCout Check if console printing flag is set. + * \return Console printing flag. + */ + bool isPrintingInCout() const; + + /*! + * \brief setIsPrintingInCout Set console printing flag. + * \param isPrintingInCout Value, if true, messages added to the log are also printed in cout. + */ + void setIsPrintingInCout(bool isPrintingInCout); + + /*! + * Operator for printing messages to log and in the standard output stream if desired. + */ + template + friend Logger& operator<< (Logger &log, T t) + { + // If console printing is enabled. + if (log.isPrintingInCout_) + { + std::cout << t; + std::cout.flush(); + } + // Write to log. + log.logStream_ << t; + + return log; + } + +private: + bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */ + + std::stringstream logStream_; /*!< Stream for storing the log. */ +}; diff --git a/modules/odm_georef/src/main.cpp b/modules/odm_georef/src/main.cpp new file mode 100644 index 000000000..eef4096d4 --- /dev/null +++ b/modules/odm_georef/src/main.cpp @@ -0,0 +1,8 @@ +#include "Georef.hpp" + +int main(int argc, char* argv[]) +{ + Georef ref; + return ref.run(argc, argv); +} + diff --git a/modules/odm_georef/src/modifiedPclFunctions.cpp b/modules/odm_georef/src/modifiedPclFunctions.cpp new file mode 100644 index 000000000..cfa8927d2 --- /dev/null +++ b/modules/odm_georef/src/modifiedPclFunctions.cpp @@ -0,0 +1,336 @@ +/* +* Software License Agreement (BSD License) +* +* Point Cloud Library (PCL) - www.pointclouds.org +* Copyright (c) 2012-, Open Perception, Inc. +* +* All rights reserved. +* +* Redistribution and use in source and binary forms, with or without +* modification, are permitted provided that the following conditions +* are met: +* +* * Redistributions of source code must retain the above copyright +* notice, this list of conditions and the following disclaimer. +* * Redistributions in binary form must reproduce the above +* copyright notice, this list of conditions and the following +* disclaimer in the documentation and/or other materials provided +* with the distribution. +* * Neither the name of the copyright holder(s) nor the names of its +* contributors may be used to endorse or promote products derived +* from this software without specific prior written permission. +* +* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +* POSSIBILITY OF SUCH DAMAGE. +* +*/ + +#include "modifiedPclFunctions.hpp" + +int saveOBJFile(const std::string &file_name, const pcl::TextureMesh &tex_mesh, unsigned precision) +{ + if (tex_mesh.cloud.data.empty ()) + { + PCL_ERROR ("[pcl::io::saveOBJFile] Input point cloud has no data!\n"); + return (-1); + } + + // Open file + std::ofstream fs; + fs.precision (precision); + fs.open (file_name.c_str ()); + + // Define material file + std::string mtl_file_name = file_name.substr (0, file_name.find_last_of (".")) + ".mtl"; + // Strip path for "mtllib" command + std::string mtl_file_name_nopath = mtl_file_name; + //std::cout << mtl_file_name_nopath << std::endl; + mtl_file_name_nopath.erase (0, mtl_file_name.find_last_of ('/') + 1); + + /* Write 3D information */ + // number of points + int nr_points = tex_mesh.cloud.width * tex_mesh.cloud.height; + int point_size = tex_mesh.cloud.data.size () / nr_points; + + // mesh size + int nr_meshes = tex_mesh.tex_polygons.size (); + // number of faces for header + int nr_faces = 0; + for (int m = 0; m < nr_meshes; ++m) + nr_faces += tex_mesh.tex_polygons[m].size (); + + // Write the header information + fs << "####" << std::endl; + fs << "# OBJ dataFile simple version. File name: " << file_name << std::endl; + fs << "# Vertices: " << nr_points << std::endl; + fs << "# Faces: " < 0) + f_idx += tex_mesh.tex_polygons[m-1].size (); + + if(tex_mesh.tex_materials.size() !=0) + { + fs << "# The material will be used for mesh " << m << std::endl; + //TODO pbl here with multi texture and unseen faces + fs << "usemtl " << tex_mesh.tex_materials[m].tex_name << std::endl; + fs << "# Faces" << std::endl; + } + for (size_t i = 0; i < tex_mesh.tex_polygons[m].size(); ++i) + { + // Write faces with "f" + fs << "f"; + size_t j = 0; + // There's one UV per vertex per face, i.e., the same vertex can have + // different UV depending on the face. + for (j = 0; j < tex_mesh.tex_polygons[m][i].vertices.size (); ++j) + { + unsigned int idx = tex_mesh.tex_polygons[m][i].vertices[j] + 1; + fs << " " << idx + << "/" << 3*(i+f_idx) +j+1; + //<< "/" << idx; // vertex index in obj file format starting with 1 + } + fs << std::endl; + } + //PCL_INFO ("%d faces in mesh %d \n", tex_mesh.tex_polygons[m].size () , m); + fs << "# "<< tex_mesh.tex_polygons[m].size() << " faces in mesh " << m << std::endl; + } + fs << "# End of File"; + + // Close obj file + //PCL_INFO ("Closing obj file\n"); + fs.close (); + + /* Write material defination for OBJ file*/ + // Open file + //PCL_INFO ("Writing material files\n"); + //dont do it if no material to write + if(tex_mesh.tex_materials.size() ==0) + return (0); + + std::ofstream m_fs; + m_fs.precision (precision); + m_fs.open (mtl_file_name.c_str ()); + //std::cout << "MTL file is located at_ " << mtl_file_name << std::endl; + // default + m_fs << "#" << std::endl; + m_fs << "# Wavefront material file" << std::endl; + m_fs << "#" << std::endl; + for(int m = 0; m < nr_meshes; ++m) + { + m_fs << "newmtl " << tex_mesh.tex_materials[m].tex_name << std::endl; + m_fs << "Ka "<< tex_mesh.tex_materials[m].tex_Ka.r << " " << tex_mesh.tex_materials[m].tex_Ka.g << " " << tex_mesh.tex_materials[m].tex_Ka.b << std::endl; // defines the ambient color of the material to be (r,g,b). + m_fs << "Kd "<< tex_mesh.tex_materials[m].tex_Kd.r << " " << tex_mesh.tex_materials[m].tex_Kd.g << " " << tex_mesh.tex_materials[m].tex_Kd.b << std::endl; // defines the diffuse color of the material to be (r,g,b). + m_fs << "Ks "<< tex_mesh.tex_materials[m].tex_Ks.r << " " << tex_mesh.tex_materials[m].tex_Ks.g << " " << tex_mesh.tex_materials[m].tex_Ks.b << std::endl; // defines the specular color of the material to be (r,g,b). This color shows up in highlights. + m_fs << "d " << tex_mesh.tex_materials[m].tex_d << std::endl; // defines the transparency of the material to be alpha. + m_fs << "Ns "<< tex_mesh.tex_materials[m].tex_Ns << std::endl; // defines the shininess of the material to be s. + m_fs << "illum "<< tex_mesh.tex_materials[m].tex_illum << std::endl; // denotes the illumination model used by the material. + // illum = 1 indicates a flat material with no specular highlights, so the value of Ks is not used. + // illum = 2 denotes the presence of specular highlights, and so a specification for Ks is required. + m_fs << "map_Kd " << tex_mesh.tex_materials[m].tex_file << std::endl; + m_fs << "###" << std::endl; + } + m_fs.close (); + return (0); +} + +bool getPixelCoordinates(const pcl::PointXYZ &pt, const pcl::TextureMapping::Camera &cam, pcl::PointXY &UV_coordinates) +{ + if (pt.z > 0) + { + // compute image center and dimension + double sizeX = cam.width; + double sizeY = cam.height; + double cx, cy; + if (cam.center_w > 0) + cx = cam.center_w; + else + cx = sizeX / 2.0; + if (cam.center_h > 0) + cy = cam.center_h; + else + cy = sizeY / 2.0; + + double focal_x, focal_y; + if (cam.focal_length_w > 0) + focal_x = cam.focal_length_w; + else + focal_x = cam.focal_length; + if (cam.focal_length_h > 0) + focal_y = cam.focal_length_h; + else + focal_y = cam.focal_length; + + // project point on camera's image plane + UV_coordinates.x = static_cast ((focal_x * (pt.x / pt.z) + cx)); //horizontal + UV_coordinates.y = static_cast ((focal_y * (pt.y / pt.z) + cy)); //vertical + + // point is visible! + if (UV_coordinates.x >= 1.0 && UV_coordinates.x <= (sizeX - 1.0) && UV_coordinates.y >= 1.0 && UV_coordinates.y <= (sizeY - 1.0)) + { + return (true); // point was visible by the camera + } + } + + // point is NOT visible by the camera + UV_coordinates.x = -1.0f; + UV_coordinates.y = -1.0f; + return (false); // point was not visible by the camera +} + +bool isFaceProjected (const pcl::TextureMapping::Camera &camera, const pcl::PointXYZ &p1, const pcl::PointXYZ &p2, const pcl::PointXYZ &p3, pcl::PointXY &proj1, pcl::PointXY &proj2, pcl::PointXY &proj3) +{ + return (getPixelCoordinates(p1, camera, proj1) && getPixelCoordinates(p2, camera, proj2) && getPixelCoordinates(p3, camera, proj3)); +} + +void getTriangleCircumscribedCircleCentroid( const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, pcl::PointXY &circumcenter, double &radius) +{ + // compute centroid's coordinates (translate back to original coordinates) + circumcenter.x = static_cast (p1.x + p2.x + p3.x ) / 3; + circumcenter.y = static_cast (p1.y + p2.y + p3.y ) / 3; + double r1 = (circumcenter.x - p1.x) * (circumcenter.x - p1.x) + (circumcenter.y - p1.y) * (circumcenter.y - p1.y) ; + double r2 = (circumcenter.x - p2.x) * (circumcenter.x - p2.x) + (circumcenter.y - p2.y) * (circumcenter.y - p2.y) ; + double r3 = (circumcenter.x - p3.x) * (circumcenter.x - p3.x) + (circumcenter.y - p3.y) * (circumcenter.y - p3.y) ; + + // radius + radius = std::sqrt( std::max( r1, std::max( r2, r3) )) ; +} + +bool checkPointInsideTriangle(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, const pcl::PointXY &pt) +{ + // Compute vectors + Eigen::Vector2d v0, v1, v2; + v0(0) = p3.x - p1.x; v0(1) = p3.y - p1.y; // v0= C - A + v1(0) = p2.x - p1.x; v1(1) = p2.y - p1.y; // v1= B - A + v2(0) = pt.x - p1.x; v2(1) = pt.y - p1.y; // v2= P - A + + // Compute dot products + double dot00 = v0.dot(v0); // dot00 = dot(v0, v0) + double dot01 = v0.dot(v1); // dot01 = dot(v0, v1) + double dot02 = v0.dot(v2); // dot02 = dot(v0, v2) + double dot11 = v1.dot(v1); // dot11 = dot(v1, v1) + double dot12 = v1.dot(v2); // dot12 = dot(v1, v2) + + // Compute barycentric coordinates + double invDenom = 1.0 / (dot00*dot11 - dot01*dot01); + double u = (dot11*dot02 - dot01*dot12) * invDenom; + double v = (dot00*dot12 - dot01*dot02) * invDenom; + + // Check if point is in triangle + return ((u >= 0) && (v >= 0) && (u + v < 1)); +} diff --git a/modules/odm_georef/src/modifiedPclFunctions.hpp b/modules/odm_georef/src/modifiedPclFunctions.hpp new file mode 100644 index 000000000..0e6a77dd3 --- /dev/null +++ b/modules/odm_georef/src/modifiedPclFunctions.hpp @@ -0,0 +1,21 @@ +#pragma once + +// STL +#include +#include + +// PCL +#include +#include +#include +#include + +int saveOBJFile(const std::string &file_name, const pcl::TextureMesh &tex_mesh, unsigned precision); + +bool getPixelCoordinates(const pcl::PointXYZ &pt, const pcl::TextureMapping::Camera &cam, pcl::PointXY &UV_coordinates); + +bool isFaceProjected (const pcl::TextureMapping::Camera &camera, const pcl::PointXYZ &p1, const pcl::PointXYZ &p2, const pcl::PointXYZ &p3, pcl::PointXY &proj1, pcl::PointXY &proj2, pcl::PointXY &proj3); + +void getTriangleCircumscribedCircleCentroid(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, pcl::PointXY &circumcenter, double &radius); + +bool checkPointInsideTriangle(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, const pcl::PointXY &pt); diff --git a/modules/odm_meshing/CMakeLists.txt b/modules/odm_meshing/CMakeLists.txt new file mode 100644 index 000000000..4be138c91 --- /dev/null +++ b/modules/odm_meshing/CMakeLists.txt @@ -0,0 +1,26 @@ +project(odm_meshing) +cmake_minimum_required(VERSION 2.8) + +# Set pcl dir to the input spedified with option -DPCL_DIR="path" +set(PCL_DIR "PCL_DIR-NOTFOUND" CACHE "PCL_DIR" "Path to the pcl installation directory") + +# Add compiler options. +add_definitions(-Wall -Wextra) + +# Find pcl at the location specified by PCL_DIR +find_package(PCL 1.8 HINTS "${PCL_DIR}/share/pcl-1.8") + +# Add the PCL and Eigen include dirs. +# Necessary since the PCL_INCLUDE_DIR variable set by find_package is broken.) +include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR}) +include_directories(${EIGEN_ROOT}) + +# Add source directory +aux_source_directory("./src" SRC_LIST) + +# Add exectuteable +add_executable(${PROJECT_NAME} ${SRC_LIST}) + +# Link +target_link_libraries(odm_meshing ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES}) + diff --git a/modules/odm_meshing/src/Logger.cpp b/modules/odm_meshing/src/Logger.cpp new file mode 100644 index 000000000..a6c81a8b5 --- /dev/null +++ b/modules/odm_meshing/src/Logger.cpp @@ -0,0 +1,31 @@ +#include "Logger.hpp" + + +Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout) +{ + +} + +Logger::~Logger() +{ + +} + +void Logger::printToFile(std::string filePath) +{ + std::ofstream file(filePath.c_str(), std::ios::binary); + file << logStream_.str(); + file.close(); +} + +bool Logger::isPrintingInCout() const +{ + return isPrintingInCout_; +} + +void Logger::setIsPrintingInCout(bool isPrintingInCout) +{ + isPrintingInCout_ = isPrintingInCout; +} + + diff --git a/modules/odm_meshing/src/Logger.hpp b/modules/odm_meshing/src/Logger.hpp new file mode 100644 index 000000000..31c5538cb --- /dev/null +++ b/modules/odm_meshing/src/Logger.hpp @@ -0,0 +1,68 @@ +#pragma once + +// STL +#include +#include +#include +#include + +/*! + * \brief The Logger class is used to store program messages in a log file. + * \details By using the << operator while printInCout is set, the class writes both to + * cout and to file, if the flag is not set, output is written to file only. + */ +class Logger +{ +public: + /*! + * \brief Logger Contains functionality for printing and displaying log information. + * \param printInCout Flag toggling if operator << also writes to cout. + */ + Logger(bool isPrintingInCout = true); + + /*! + * \brief Destructor. + */ + ~Logger(); + + /*! + * \brief print Prints the contents of the log to file. + * \param filePath Path specifying where to write the log. + */ + void printToFile(std::string filePath); + + /*! + * \brief isPrintingInCout Check if console printing flag is set. + * \return Console printing flag. + */ + bool isPrintingInCout() const; + + /*! + * \brief setIsPrintingInCout Set console printing flag. + * \param isPrintingInCout Value, if true, messages added to the log are also printed in cout. + */ + void setIsPrintingInCout(bool isPrintingInCout); + + /*! + * Operator for printing messages to log and in the standard output stream if desired. + */ + template + friend Logger& operator<< (Logger &log, T t) + { + // If console printing is enabled. + if (log.isPrintingInCout_) + { + std::cout << t; + std::cout.flush(); + } + // Write to log. + log.logStream_ << t; + + return log; + } + +private: + bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */ + + std::stringstream logStream_; /*!< Stream for storing the log. */ +}; diff --git a/modules/odm_meshing/src/OdmMeshing.cpp b/modules/odm_meshing/src/OdmMeshing.cpp new file mode 100644 index 000000000..68bdeaad1 --- /dev/null +++ b/modules/odm_meshing/src/OdmMeshing.cpp @@ -0,0 +1,361 @@ +#include "OdmMeshing.hpp" + + +OdmMeshing::OdmMeshing() : log_(false) +{ + meshCreator_ = pcl::Poisson::Ptr(new pcl::Poisson()); + points_ = pcl::PointCloud::Ptr(new pcl::PointCloud()); + mesh_ = pcl::PolygonMeshPtr(new pcl::PolygonMesh); + decimatedMesh_ = pcl::PolygonMeshPtr(new pcl::PolygonMesh); + + // Set default values + outputFile_ = ""; + logFilePath_ = ""; + + maxVertexCount_ = 0; + treeDepth_ = 0; + + solverDivide_ = 9.0; + samplesPerNode_ = 1.0; + decimationFactor_ = 0.0; + + logFilePath_ = "odm_meshing_log.txt"; + log_ << logFilePath_ << "\n"; +} + +OdmMeshing::~OdmMeshing() +{ + +} + +int OdmMeshing::run(int argc, char **argv) +{ + // If no arguments were passed, print help and return early. + if (argc <= 1) + { + printHelp(); + return EXIT_SUCCESS; + } + + try + { + parseArguments(argc, argv); + + loadPoints(); + + createMesh(); + + decimateMesh(); + + writePlyFile(); + + } + catch (const OdmMeshingException& e) + { + log_.setIsPrintingInCout(true); + log_ << e.what() << "\n"; + log_.printToFile(logFilePath_); + log_ << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } + catch (const std::exception& e) + { + log_.setIsPrintingInCout(true); + log_ << "Error in OdmMeshing:\n"; + log_ << e.what() << "\n"; + log_.printToFile(logFilePath_); + log_ << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } + catch (...) + { + log_.setIsPrintingInCout(true); + log_ << "Unknwon error in OdmMeshing:\n"; + log_.printToFile(logFilePath_); + log_ << "For more detailed information, see log file." << "\n"; + return EXIT_FAILURE; + } + + log_.printToFile(logFilePath_); + return EXIT_SUCCESS; +} + + +void OdmMeshing::parseArguments(int argc, char **argv) +{ + + for(int argIndex = 1; argIndex < argc; ++argIndex) + { + // The argument to be parsed. + std::string argument = std::string(argv[argIndex]); + + if(argument == "-help") + { + printHelp(); + } + else if(argument == "-verbose") + { + log_.setIsPrintingInCout(true); + } + else if(argument == "-maxVertexCount" && argIndex < argc) + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + std::stringstream ss(argv[argIndex]); + ss >> maxVertexCount_; + if (ss.bad()) + { + throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type)."); + } + log_ << "Vertex count was manually set to: " << maxVertexCount_ << "\n"; + } + else if(argument == "-octreeDepth" && argIndex < argc) + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + std::stringstream ss(argv[argIndex]); + ss >> treeDepth_; + if (ss.bad()) + { + throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type)."); + } + log_ << "Octree depth was manually set to: " << treeDepth_ << "\n"; + } + else if(argument == "-solverDivide" && argIndex < argc) + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + std::stringstream ss(argv[argIndex]); + ss >> solverDivide_; + if (ss.bad()) + { + throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type)."); + } + log_ << "Numerical solver divisions was manually set to: " << treeDepth_ << "\n"; + } + else if(argument == "-samplesPerNode" && argIndex < argc) + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + std::stringstream ss(argv[argIndex]); + ss >> samplesPerNode_; + if (ss.bad()) + { + throw OdmMeshingException("Argument '" + argument + "' has a bad value (wrong type)."); + } + log_ << "The number of samples per octree node was manually set to: " << samplesPerNode_ << "\n"; + } + else if(argument == "-inputFile" && argIndex < argc) + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + inputFile_ = std::string(argv[argIndex]); + std::ifstream testFile(inputFile_.c_str(), std::ios::binary); + if (!testFile.is_open()) + { + throw OdmMeshingException("Argument '" + argument + "' has a bad value. (file not accessible)"); + } + testFile.close(); + log_ << "Reading point cloud at: " << inputFile_ << "\n"; + } + else if(argument == "-outputFile" && argIndex < argc) + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputFile_ = std::string(argv[argIndex]); + std::ofstream testFile(outputFile_.c_str()); + if (!testFile.is_open()) + { + throw OdmMeshingException("Argument '" + argument + "' has a bad value."); + } + testFile.close(); + log_ << "Writing output to: " << outputFile_ << "\n"; + } + else if(argument == "-logFile" && argIndex < argc) + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmMeshingException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + logFilePath_ = std::string(argv[argIndex]); + std::ofstream testFile(outputFile_.c_str()); + if (!testFile.is_open()) + { + throw OdmMeshingException("Argument '" + argument + "' has a bad value."); + } + testFile.close(); + log_ << "Writing log information to: " << logFilePath_ << "\n"; + } + else + { + printHelp(); + throw OdmMeshingException("Unrecognised argument '" + argument + "'"); + } + } +} + +void OdmMeshing::loadPoints() +{ + + if(pcl::io::loadPLYFile (inputFile_.c_str(), *points_.get()) == -1) { + throw OdmMeshingException("Error when reading points and normals from:\n" + inputFile_ + "\n"); + } + else + { + log_ << "Successfully loaded " << points_->size() << " points with corresponding normals from file.\n"; + } +} + +void OdmMeshing::printHelp() +{ + bool printInCoutPop = log_.isPrintingInCout(); + log_.setIsPrintingInCout(true); + + log_ << "OpenDroneMapMeshing.exe\n\n"; + + log_ << "Purpose:" << "\n"; + log_ << "Create a mesh from an oriented point cloud (points with normals) using the Poisson surface reconstruction method." << "\n"; + + log_ << "Usage:" << "\n"; + log_ << "The program requires a path to an input PLY point cloud file, all other input parameters are optional." << "\n\n"; + + log_ << "The following flags are available\n"; + log_ << "Call the program with flag \"-help\", or without parameters to print this message, or check any generated log file.\n"; + log_ << "Call the program with flag \"-verbose\", to print log messages in the standard output stream as well as in the log file.\n\n"; + + log_ << "Parameters are specified as: \"- \", (without <>), and the following parameters are configureable: " << "\n"; + log_ << "\"-inputFile \" (mandatory)" << "\n"; + log_ << "\"Input ascii ply file that must contain a point cloud with normals.\n\n"; + + log_ << "\"-outputFile \" (optional, default: odm_mesh.ply)" << "\n"; + log_ << "\"Target file in which the mesh is saved.\n\n"; + + log_ << "\"-logFile \" (optional, default: odm_meshing_log.txt)" << "\n"; + log_ << "\"Target file in which the mesh is saved.\n\n"; + + log_ << "\"-maxVertexCount \" (optional, default: 100,000)" << "\n"; + log_ << "Desired final vertex count (after decimation), set to 0 to disable decimation.\n\n"; + + log_ << "\"-treeDepth \" (optional, default: 0 (automatic))" << "\n"; + log_ << "Controls octree depth used for poisson reconstruction. Recommended values (9-11).\n" + << "Increasing the value on this parameter will raise initial vertex count." + << "If omitted or zero, the depth is calculated automatically from the input point count.\n\n"; + + log_ << "\"-samplesPerNode \" (optional, default: 1)" << "\n"; + log_ << "Average number of samples (points) per octree node. Increasing this value might help if data is very noisy.\n\n"; + + log_ << "\"-solverDivide \" (optional, default: 9)" << "\n"; + log_ << "Ocree depth at which the Laplacian equation is solved in the surface reconstruction step.\n"; + log_ << "Increasing this value increases computation times slightly but helps reduce memory usage.\n\n"; + + log_.setIsPrintingInCout(printInCoutPop); +} + +void OdmMeshing::createMesh() +{ + + // Attempt to calculate the depth of the tree if unspecified + if (treeDepth_ == 0) + { + treeDepth_ = calcTreeDepth(points_->size()); + } + + log_ << "Octree depth used for reconstruction is: " << treeDepth_ << "\n"; + log_ << "Estimated initial vertex count: " << pow(4, treeDepth_) << "\n\n"; + + meshCreator_->setDepth(treeDepth_); + meshCreator_->setSamplesPerNode(samplesPerNode_); + meshCreator_->setInputCloud(points_); + + // Guarantee manifold mesh. + meshCreator_->setManifold(true); + + // Begin reconstruction + meshCreator_->reconstruct(*mesh_.get()); + + log_ << "Reconstruction complete:\n"; + log_ << "Vertex count: " << mesh_->cloud.width << "\n"; + log_ << "Triangle count: " << mesh_->polygons.size() << "\n\n"; + +} + +void OdmMeshing::decimateMesh() +{ + if (maxVertexCount_ <= 0) + { + log_ << "Vertex count not specified, decimation cancelled.\n"; + return; + } + + if (maxVertexCount_ > mesh_->cloud.height*mesh_->cloud.width) + { + log_ << "Vertex count in mesh lower than initially generated mesh, unable to decimate.\n"; + return; + } + else + { + decimatedMesh_ = pcl::PolygonMeshPtr(new pcl::PolygonMesh); + + double reductionFactor = 1.0 - double(maxVertexCount_)/double(mesh_->cloud.height*mesh_->cloud.width); + + log_ << "Decimating mesh, removing " << reductionFactor*100 << " percent of vertices.\n"; + + pcl::MeshQuadricDecimationVTK decimator; + decimator.setInputMesh(mesh_); + decimator.setTargetReductionFactor(reductionFactor); + decimator.process(*decimatedMesh_.get()); + + log_ << "Decimation complete.\n"; + log_ << "Decimated vertex count: " << decimatedMesh_->cloud.width << "\n"; + log_ << "Decimated triangle count: " << decimatedMesh_->polygons.size() << "\n\n"; + + mesh_ = decimatedMesh_; + } +} + +int OdmMeshing::calcTreeDepth(size_t nPoints) +{ + // Assume points are located (roughly) in a plane. + double squareSide = std::sqrt(double(nPoints)); + + // Calculate octree depth such that if points were equally distributed in + // a quadratic plane, there would be at least 1 point per octree node. + int depth = 0; + while(std::pow(2,depth) < squareSide/2) + { + depth++; + } + return depth; +} + +void OdmMeshing::writePlyFile() +{ + log_ << "Saving mesh to file.\n"; + if (pcl::io::savePLYFile(outputFile_.c_str(), *mesh_.get()) == -1) { + throw OdmMeshingException("Error when saving mesh to file:\n" + outputFile_ + "\n"); + } + else + { + log_ << "Successfully wrote mesh to:\n" + << outputFile_ << "\n"; + } +} diff --git a/modules/odm_meshing/src/OdmMeshing.hpp b/modules/odm_meshing/src/OdmMeshing.hpp new file mode 100644 index 000000000..8e5599894 --- /dev/null +++ b/modules/odm_meshing/src/OdmMeshing.hpp @@ -0,0 +1,117 @@ +#pragma once + +// STL +#include +#include + +// PCL +#include +#include +#include + +// Logging +#include "Logger.hpp" + +/*! + * \brief The OdmMeshing class is used to create a triangulated mesh using the Poisson method. + * The class reads an oriented point cloud (coordinates and normals) from a PLY ascii + * file and outputs the resulting welded manifold mesh on the form of an ASCII PLY-file. + * The class uses file read and write functions from pcl. + */ +class OdmMeshing +{ +public: + OdmMeshing(); + ~OdmMeshing(); + + /*! + * \brief run Runs the meshing functionality using the provided input arguments. + * For a list of accepted arguments, please see the main page documentation or + * call the program with parameter "-help". + * \param argc Application argument count. + * \param argv Argument values. + * \return 0 If successful. + */ + int run(int argc, char **argv); + +private: + + /*! + * \brief parseArguments Parses command line arguments. + * \param argc Application argument count. + * \param argv Argument values. + */ + void parseArguments(int argc, char** argv); + + /*! + * \brief createMesh Sets up the pcl::Poisson meshing class using provided arguments and calls + * it to start the meshing. + */ + void createMesh(); + + /*! + * \brief loadPoints Loads a PLY ascii file with points and normals from file. + */ + void loadPoints(); + + /*! + * \brief decimateMesh Performs post-processing on the form of quadric decimation to generate a mesh + * that has a higher density in areas with a lot of structure. + */ + void decimateMesh(); + + /*! + * \brief writePlyFile Writes the mesh to file on the Ply format. + */ + void writePlyFile(); + + /*! + * \brief printHelp Prints help, explaining usage. Can be shown by calling the program with argument: "-help". + */ + void printHelp(); + + /*! + * \brief calcTreeDepth Attepts to calculate the depth of the tree using the point cloud. + * The function makes the assumption points are located roughly in a plane + * (fairly reasonable for ortho-terrain photos) and tries to generate a mesh using + * an octree with an appropriate resolution. + * \param nPoints The total number of points in the input point cloud. + * \return The calcualated octree depth. + */ + int calcTreeDepth(size_t nPoints); + + Logger log_; /**< Logging object. */ + + pcl::Poisson::Ptr meshCreator_; /**< PCL poisson meshing class. */ + + pcl::PointCloud::Ptr points_; /**< Input point and normals. */ + pcl::PolygonMeshPtr mesh_; /**< PCL polygon mesh. */ + pcl::PolygonMeshPtr decimatedMesh_; /**< Decimated polygon mesh. */ + + std::string inputFile_; /**< Path to a file containing points and normals. */ + std::string outputFile_; /**< Path to the destination file. */ + std::string logFilePath_; /**< Path to the log file. */ + + unsigned int maxVertexCount_; /**< Desired output vertex count. */ + unsigned int treeDepth_; /**< Depth of octree used for reconstruction. */ + + double samplesPerNode_; /**< Samples per octree node.*/ + double solverDivide_; /**< Depth at which the Laplacian equation solver is run during surface estimation.*/ + double decimationFactor_; /**< Percentage of points to remove when decimating the mesh. */ +}; + +/*! + * \brief The OdmMeshingException class + */ +class OdmMeshingException : public std::exception +{ + +public: + OdmMeshingException() : message("Error in OdmMeshing") {} + OdmMeshingException(std::string msgInit) : message("Error in OdmMeshing:\n" + msgInit) {} + ~OdmMeshingException() throw() {} + virtual const char* what() const throw() {return message.c_str(); } + +private: + std::string message; /**< The error message **/ +}; diff --git a/modules/odm_meshing/src/main.cpp b/modules/odm_meshing/src/main.cpp new file mode 100644 index 000000000..f99f20ce7 --- /dev/null +++ b/modules/odm_meshing/src/main.cpp @@ -0,0 +1,20 @@ +// Insert license here. + +// Include meshing source code. +#include "OdmMeshing.hpp" + +/*! + * \mainpage main OpenDroneMap Meshing Module + * + * The OpenDroneMap Meshing Module generates a welded, manifold mesh using the Poisson + * surface reconstruction algorithm from any oriented point cloud (points with corresponding normals). + * + */ + +int main(int argc, char** argv) +{ + + OdmMeshing meshCreator; + return meshCreator.run(argc, argv); + +} diff --git a/modules/odm_orthophoto/CMakeLists.txt b/modules/odm_orthophoto/CMakeLists.txt new file mode 100644 index 000000000..4a81e6658 --- /dev/null +++ b/modules/odm_orthophoto/CMakeLists.txt @@ -0,0 +1,33 @@ +project(odm_orthophoto) +cmake_minimum_required(VERSION 2.8) + +# Set pcl dir to the input spedified with option -DPCL_DIR="path" +set(PCL_DIR "PCL_DIR-NOTFOUND" CACHE "PCL_DIR" "Path to the pcl installation directory") +set(OPENCV_DIR "OPENCV_DIR-NOTFOUND" CACHE "OPENCV_DIR" "Path to the OPENCV installation directory") + +# Add compiler options. +add_definitions(-Wall -Wextra) + +# Find pcl at the location specified by PCL_DIR +find_package(PCL 1.8 HINTS "${PCL_DIR}/share/pcl-1.8" REQUIRED) + +# Find OpenCV at the default location +find_package(OpenCV HINTS "${OPENCV_DIR}" REQUIRED) + +# Only link with required opencv modules. +set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui) + +# Add the PCL, Eigen and OpenCV include dirs. +# Necessary since the PCL_INCLUDE_DIR variable set by find_package is broken.) +include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR}) +include_directories(${EIGEN_ROOT}) +include_directories(${OpenCV_INCLUDE_DIRS}) + +#library_directories(${OpenCV_LIBRARY_DIRS}) + +# Add source directory +aux_source_directory("./src" SRC_LIST) + +# Add exectuteable +add_executable(${PROJECT_NAME} ${SRC_LIST}) +target_link_libraries(odm_orthophoto ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${OpenCV_LIBS}) \ No newline at end of file diff --git a/modules/odm_orthophoto/src/Logger.cpp b/modules/odm_orthophoto/src/Logger.cpp new file mode 100644 index 000000000..29cb80487 --- /dev/null +++ b/modules/odm_orthophoto/src/Logger.cpp @@ -0,0 +1,29 @@ +#include "Logger.hpp" + + +Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout) +{ + +} + +Logger::~Logger() +{ + +} + +void Logger::print(std::string filePath) +{ + std::ofstream file(filePath.c_str(), std::ios::binary); + file << logStream_.str(); + file.close(); +} + +bool Logger::isPrintingInCout() const +{ + return isPrintingInCout_; +} + +void Logger::setIsPrintingInCout(bool isPrintingInCout) +{ + isPrintingInCout_ = isPrintingInCout; +} diff --git a/modules/odm_orthophoto/src/Logger.hpp b/modules/odm_orthophoto/src/Logger.hpp new file mode 100644 index 000000000..61520146e --- /dev/null +++ b/modules/odm_orthophoto/src/Logger.hpp @@ -0,0 +1,68 @@ +#pragma once + +// STL +#include +#include +#include +#include + +/*! + * \brief The Logger class is used to store program messages in a log file. + * \details By using the << operator while printInCout is set, the class writes both to + * cout and to file, if the flag is not set, output is written to file only. + */ +class Logger +{ +public: + /*! + * \brief Logger Contains functionality for printing and displaying log information. + * \param printInCout Flag toggling if operator << also writes to cout. + */ + Logger(bool isPrintingInCout = true); + + /*! + * \brief Destructor. + */ + ~Logger(); + + /*! + * \brief print Prints the contents of the log to file. + * \param filePath Path specifying where to write the log. + */ + void print(std::string filePath); + + /*! + * \brief isPrintingInCout Check if console printing flag is set. + * \return Console printing flag. + */ + bool isPrintingInCout() const; + + /*! + * \brief setIsPrintingInCout Set console printing flag. + * \param isPrintingInCout Value, if true, messages added to the log are also printed in cout. + */ + void setIsPrintingInCout(bool isPrintingInCout); + + /*! + * Operator for printing messages to log and in the standard output stream if desired. + */ + template + friend Logger& operator<< (Logger &log, T t) + { + // If console printing is enabled. + if (log.isPrintingInCout_) + { + std::cout << t; + std::cout.flush(); + } + // Write to log. + log.logStream_ << t; + + return log; + } + +private: + bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */ + + std::stringstream logStream_; /*!< Stream for storing the log. */ +}; diff --git a/modules/odm_orthophoto/src/OdmOrthoPhoto.cpp b/modules/odm_orthophoto/src/OdmOrthoPhoto.cpp new file mode 100644 index 000000000..906f54644 --- /dev/null +++ b/modules/odm_orthophoto/src/OdmOrthoPhoto.cpp @@ -0,0 +1,1424 @@ +// C++ +#include +#include +#include +#include + +// This +#include "OdmOrthoPhoto.hpp" + +std::ostream & operator<< (std::ostream &os, const WorldPoint &worldPoint) +{ + return os << worldPoint.eastInteger_ + worldPoint.eastFractional_ << " " << worldPoint.northInteger_ + worldPoint.northFractional_; +} + +std::istream & operator>> (std::istream &is, WorldPoint &worldPoint) +{ + is >> worldPoint.eastInteger_; + // Check if east coordinate is given as rational. + if('.' == is.peek()) + { + is >> worldPoint.eastFractional_; + } + else + { + worldPoint.eastFractional_ = 0.0f; + } + + is >> worldPoint.northInteger_; + // Check if north coordinate is given as rational. + if('.' == is.peek()) + { + is >> worldPoint.northFractional_; + } + else + { + worldPoint.northFractional_ = 0.0f; + } + + return is; +} + +OdmOrthoPhoto::OdmOrthoPhoto() + :log_(false) +{ + inputFile_ = ""; + inputGeoRefFile_ = ""; + outputFile_ = "ortho.jpg"; + logFile_ = "log.txt"; + outputCornerFile_ = ""; + + resolution_ = 0.0f; + + boundaryDefined_ = false; + boundaryPoint1_[0] = 0.0f; boundaryPoint1_[1] = 0.0f; + boundaryPoint2_[0] = 0.0f; boundaryPoint2_[1] = 0.0f; + boundaryPoint3_[0] = 0.0f; boundaryPoint3_[1] = 0.0f; + boundaryPoint4_[0] = 0.0f; boundaryPoint4_[1] = 0.0f; +} + +OdmOrthoPhoto::~OdmOrthoPhoto() +{ +} + +int OdmOrthoPhoto::run(int argc, char *argv[]) +{ + try + { + parseArguments(argc, argv); + createOrthoPhoto(); + } + catch (const OdmOrthoPhotoException& e) + { + log_.setIsPrintingInCout(true); + log_ << e.what() << "\n"; + log_.print(logFile_); + return EXIT_FAILURE; + } + catch (const std::exception& e) + { + log_.setIsPrintingInCout(true); + log_ << "Error in OdmOrthoPhoto:\n"; + log_ << e.what() << "\n"; + log_.print(logFile_); + return EXIT_FAILURE; + } + catch (...) + { + log_.setIsPrintingInCout(true); + log_ << "Unknown error, terminating:\n"; + log_.print(logFile_); + return EXIT_FAILURE; + } + + log_.print(logFile_); + + return EXIT_SUCCESS; +} + +void OdmOrthoPhoto::parseArguments(int argc, char *argv[]) +{ + logFile_ = std::string(argv[0]) + "_log.txt"; + log_ << logFile_ << "\n\n"; + + // If no arguments were passed, print help. + if (argc == 1) + { + printHelp(); + } + + log_ << "Arguments given\n"; + for(int argIndex = 1; argIndex < argc; ++argIndex) + { + log_ << argv[argIndex] << '\n'; + } + + log_ << '\n'; + for(int argIndex = 1; argIndex < argc; ++argIndex) + { + // The argument to be parsed. + std::string argument = std::string(argv[argIndex]); + + if(argument == "-help") + { + printHelp(); + } + else if(argument == "-resolution") + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + std::stringstream ss(argv[argIndex]); + ss >> resolution_; + log_ << "Resolution count was set to: " << resolution_ << "pixels/meter\n"; + } + else if(argument == "-boundary") + { + if(argIndex+8 >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 8 more input following it, but no more inputs were provided."); + } + + std::stringstream ss; + ss << argv[argIndex+1] << " " << argv[argIndex+2] << " " << argv[argIndex+3] << " " << argv[argIndex+4] << " " << argv[argIndex+5] << " " << argv[argIndex+6] << " " << argv[argIndex+7] << " " << argv[argIndex+8]; + ss >> worldPoint1_ >> worldPoint2_ >> worldPoint3_ >> worldPoint4_; + boundaryDefined_ = true; + + argIndex += 8; + + log_ << "Boundary point 1 was set to: " << worldPoint1_ << '\n'; + log_ << "Boundary point 2 was set to: " << worldPoint2_ << '\n'; + log_ << "Boundary point 3 was set to: " << worldPoint3_ << '\n'; + log_ << "Boundary point 4 was set to: " << worldPoint4_ << '\n'; + } + else if(argument == "-boundaryMinMax") + { + if(argIndex+4 >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 4 more input following it, but no more inputs were provided."); + } + + std::stringstream ss; + ss << argv[argIndex+1] << " " << argv[argIndex+2] << " " << argv[argIndex+3] << " " << argv[argIndex+4]; + ss >> worldPoint1_ >> worldPoint3_; + boundaryDefined_ = true; + + // Set the other world points as the other two corners. + worldPoint2_.eastFractional_ = worldPoint1_.eastFractional_; + worldPoint2_.eastInteger_ = worldPoint1_.eastInteger_; + worldPoint2_.northFractional_ = worldPoint3_.northFractional_; + worldPoint2_.northInteger_ = worldPoint3_.northInteger_; + + worldPoint4_.eastFractional_ = worldPoint3_.eastFractional_; + worldPoint4_.eastInteger_ = worldPoint3_.eastInteger_; + worldPoint4_.northFractional_ = worldPoint1_.northFractional_; + worldPoint4_.northInteger_ = worldPoint1_.northInteger_; + + argIndex += 4; + + log_ << "Boundary point 1 was set to: " << worldPoint1_ << '\n'; + log_ << "Boundary point 2 was set to: " << worldPoint2_ << '\n'; + log_ << "Boundary point 3 was set to: " << worldPoint3_ << '\n'; + log_ << "Boundary point 4 was set to: " << worldPoint4_ << '\n'; + } + else if(argument == "-verbose") + { + log_.setIsPrintingInCout(true); + } + else if (argument == "-logFile") + { + ++argIndex; + if (argIndex >= argc) + { + throw OdmOrthoPhotoException("Missing argument for '" + argument + "'."); + } + logFile_ = std::string(argv[argIndex]); + std::ofstream testFile(logFile_.c_str()); + if (!testFile.is_open()) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' has a bad value."); + } + log_ << "Log file path was set to: " << logFile_ << "\n"; + } + else if(argument == "-inputFile") + { + argIndex++; + if (argIndex >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + inputFile_ = std::string(argv[argIndex]); + log_ << "Reading textured mesh from: " << inputFile_ << "\n"; + } + else if(argument == "-inputGeoRefFile") + { + argIndex++; + if (argIndex >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + inputGeoRefFile_ = std::string(argv[argIndex]); + log_ << "Reading georef from: " << inputGeoRefFile_ << "\n"; + } + else if(argument == "-outputFile") + { + argIndex++; + if (argIndex >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputFile_ = std::string(argv[argIndex]); + log_ << "Writing output to: " << outputFile_ << "\n"; + } + else if(argument == "-outputCornerFile") + { + argIndex++; + if (argIndex >= argc) + { + throw OdmOrthoPhotoException("Argument '" + argument + "' expects 1 more input following it, but no more inputs were provided."); + } + outputCornerFile_ = std::string(argv[argIndex]); + log_ << "Writing corners to: " << outputCornerFile_ << "\n"; + } + else + { + printHelp(); + throw OdmOrthoPhotoException("Unrecognised argument '" + argument + "'"); + } + } + log_ << "\n"; +} + +void OdmOrthoPhoto::printHelp() +{ + log_.setIsPrintingInCout(true); + + log_ << "OpenDroneMapOrthoPhoto.exe\n\n"; + + log_ << "Purpose\n"; + log_ << "Create an orthograpical photo from an oriented textured mesh.\n\n"; + + log_ << "Usage:\n"; + log_ << "The program requires a path to an input OBJ mesh file and a resolution, as pixels/m. All other input parameters are optional.\n\n"; + + log_ << "The following flags are available\n"; + log_ << "Call the program with flag \"-help\", or without parameters to print this message, or check any generated log file.\n"; + log_ << "Call the program with flag \"-verbose\", to print log messages in the standard output stream as well as in the log file.\n\n"; + + log_ << "Parameters are specified as: \"- \", (without <>), and the following parameters are configureable:\n"; + log_ << "\"-inputFile \" (mandatory)\n"; + log_ << "\"Input obj file that must contain a textured mesh.\n\n"; + + log_ << "\"-inputGeoRefFile \" (optional, if specified boundary points are assumed to be given as world coordinates. If not specified, the boundary points are assumed to be local coordinates)\n"; + log_ << "\"Input geograpical reference system file that describes the world position of the model's origin.\n\n"; + + log_ << "\"-outputFile \" (optional, default: ortho.jpg)\n"; + log_ << "\"Target file in which the orthophoto is saved.\n\n"; + + log_ << "\"-outputCornerFile \" (optional)\n"; + log_ << "\"Target text file for boundary corner points, written as \"xmin ymin xmax ymax\".\n\n"; + + log_ << "\"-resolution \" (mandatory)\n"; + log_ << "\"The number of pixels used per meter.\n\n"; + + log_ << "\"-boundary \" (optional, if not specified the entire model will be rendered)\n"; + log_ << "\"Describes the area which should be covered in the ortho photo. The area will be a bounding box containing all four points. The points should be given in the same georeference system as the model.\n\n"; + + log_ << "\"-boundaryMinMax \" (optional, if not specified the entire model will be rendered.)\n"; + log_ << "\"Describes the area which should be covered in the ortho photo. The area will be a bounding box with corners at MinX, MinY and MaxX, MaxY. The points should be given in the same georeference system as the model.\n\n"; + + log_.setIsPrintingInCout(false); +} + +void OdmOrthoPhoto::createOrthoPhoto() +{ + if(inputFile_.empty()) + { + throw OdmOrthoPhotoException("Failed to create ortho photo, no texture mesh given."); + } + + if(boundaryDefined_) + { + if(inputGeoRefFile_.empty()) + { + // Points are assumed to be given in as local points. + adjustBoundsForLocal(); + } + else + { + // Points are assumed to be given in as world points. + adjustBoundsForGeoRef(); + } + } + else if(!inputGeoRefFile_.empty()) + { + // No boundary points specified, but georeference system file was given. + log_ << "Warning:\n"; + log_ << "\tSpecified -inputGeoRefFile, but no boundary points. The georeference system will be ignored.\n"; + } + + log_ << "Reading mesh file...\n"; + // The textured mesh. + pcl::TextureMesh mesh; + loadObjFile(inputFile_, mesh); + log_ << ".. mesh file read.\n\n"; + + // Does the model have more than one material? + multiMaterial_ = 1 < mesh.tex_materials.size(); + + bool splitModel = false; + + if(multiMaterial_) + { + // Need to check relationship between texture coordinates and faces. + if(!isModelOk(mesh)) + { + splitModel = true; + } + } + + if(!boundaryDefined_) + { + // Determine boundary from model. + adjustBoundsForEntireModel(mesh); + } + + // The minimum and maximum boundary values. + float xMax, xMin, yMax, yMin; + xMin = std::min(std::min(boundaryPoint1_[0], boundaryPoint2_[0]), std::min(boundaryPoint3_[0], boundaryPoint4_[0])); + xMax = std::max(std::max(boundaryPoint1_[0], boundaryPoint2_[0]), std::max(boundaryPoint3_[0], boundaryPoint4_[0])); + yMin = std::min(std::min(boundaryPoint1_[1], boundaryPoint2_[1]), std::min(boundaryPoint3_[1], boundaryPoint4_[1])); + yMax = std::max(std::max(boundaryPoint1_[1], boundaryPoint2_[1]), std::max(boundaryPoint3_[1], boundaryPoint4_[1])); + + log_ << "Ortho photo bounds x : " << xMin << " -> " << xMax << '\n'; + log_ << "Ortho photo bounds y : " << yMin << " -> " << yMax << '\n'; + + // The size of the area. + float xDiff = xMax - xMin; + float yDiff = yMax - yMin; + log_ << "Ortho photo area : " << xDiff*yDiff << "m2\n"; + + // The resolution necessary to fit the area with the given resolution. + int rowRes = static_cast(std::ceil(resolution_*yDiff)); + int colRes = static_cast(std::ceil(resolution_*xDiff)); + log_ << "Ortho photo resolution, width x height : " << colRes << "x" << rowRes << '\n'; + + // Check size of photo. + if(0 >= rowRes*colRes) + { + if(0 >= rowRes) + { + log_ << "Warning: ortho photo has zero area, height = " << rowRes << ". Forcing height = 1.\n"; + rowRes = 1; + } + if(0 >= colRes) + { + log_ << "Warning: ortho photo has zero area, width = " << colRes << ". Forcing width = 1.\n"; + colRes = 1; + } + log_ << "New ortho photo resolution, width x height : " << colRes << "x" << rowRes << '\n'; + } + + // Init ortho photo + try{ + photo_ = cv::Mat::zeros(rowRes, colRes, CV_8UC4) + cv::Scalar(255, 255, 255, 0); + depth_ = cv::Mat::zeros(rowRes, colRes, CV_32F) - std::numeric_limits::infinity(); + }catch(const cv::Exception &e){ + std::cerr << "Couldn't allocate enough memory to render the orthophoto (" << colRes << "x" << rowRes << " cells = " << ((long long)colRes * (long long)rowRes * 4) << " bytes). Try to reduce the -resolution parameter or add more RAM.\n"; + exit(1); + } + + // Contains the vertices of the mesh. + pcl::PointCloud::Ptr meshCloud (new pcl::PointCloud); + pcl::fromPCLPointCloud2 (mesh.cloud, *meshCloud); + + // Split model and make copies of vertices and texture coordinates for all faces + if (splitModel) + { + pcl::PointCloud::Ptr meshCloudSplit (new pcl::PointCloud); + std::vector > textureCoordinates = std::vector >(0); + + size_t vertexIndexCount = 0; + for(size_t t = 0; t < mesh.tex_polygons.size(); ++t) + { + vertexIndexCount += 3 * mesh.tex_polygons[t].size(); + } + textureCoordinates.reserve(vertexIndexCount); + + for(size_t t = 0; t < mesh.tex_polygons.size(); ++t) + { + + for(size_t faceIndex = 0; faceIndex < mesh.tex_polygons[t].size(); ++faceIndex) + { + pcl::Vertices polygon = mesh.tex_polygons[t][faceIndex]; + + // The index to the vertices of the polygon. + size_t v1i = polygon.vertices[0]; + size_t v2i = polygon.vertices[1]; + size_t v3i = polygon.vertices[2]; + + // The polygon's points. + pcl::PointXYZ v1 = meshCloud->points[v1i]; + pcl::PointXYZ v2 = meshCloud->points[v2i]; + pcl::PointXYZ v3 = meshCloud->points[v3i]; + + Eigen::Vector2f vt1 = mesh.tex_coordinates[0][3*faceIndex]; + Eigen::Vector2f vt2 = mesh.tex_coordinates[0][3*faceIndex + 1]; + Eigen::Vector2f vt3 = mesh.tex_coordinates[0][3*faceIndex + 2]; + + meshCloudSplit->points.push_back(v1); + textureCoordinates.push_back(vt1); + mesh.tex_polygons[t][faceIndex].vertices[0] = vertexIndexCount; + + meshCloudSplit->points.push_back(v2); + textureCoordinates.push_back(vt2); + mesh.tex_polygons[t][faceIndex].vertices[1] = vertexIndexCount; + + meshCloudSplit->points.push_back(v3); + textureCoordinates.push_back(vt3); + mesh.tex_polygons[t][faceIndex].vertices[2] = vertexIndexCount; + } + } + + mesh.tex_coordinates.clear(); + mesh.tex_coordinates.push_back(textureCoordinates); + + meshCloud = meshCloudSplit; + } + + // Creates a transformation which aligns the area for the ortho photo. + Eigen::Transform transform = getROITransform(xMin, -yMax); + + log_ << "Translating and scaling mesh...\n"; + + // Move the mesh into position. + pcl::transformPointCloud(*meshCloud, *meshCloud, transform); + log_ << ".. mesh translated and scaled.\n\n"; + + // Flatten texture coordinates. + std::vector uvs; + uvs.reserve(mesh.tex_coordinates.size()); + for(size_t t = 0; t < mesh.tex_coordinates.size(); ++t) + { + uvs.insert(uvs.end(), mesh.tex_coordinates[t].begin(), mesh.tex_coordinates[t].end()); + } + //cv::namedWindow("dsfs"); + + // The current material texture + cv::Mat texture; + + // Used to keep track of the global face index. + size_t faceOff = 0; + + log_ << "Rendering the ortho photo...\n"; + + // Iterate over each part of the mesh (one per material). + for(size_t t = 0; t < mesh.tex_materials.size(); ++t) + { + // The material of the current submesh. + pcl::TexMaterial material = mesh.tex_materials[t]; + texture = cv::imread(material.tex_file); + + // Check for missing files. + if(texture.empty()) + { + log_ << "Material texture could not be read:\n"; + log_ << material.tex_file << '\n'; + log_ << "Could not be read as image, does the file exist?\n"; + continue; // Skip to next material. + } + + // The faces of the current submesh. + std::vector faces = mesh.tex_polygons[t]; + + // Iterate over each face... + for(size_t faceIndex = 0; faceIndex < faces.size(); ++faceIndex) + { + // The current polygon. + pcl::Vertices polygon = faces[faceIndex]; + + // ... and draw it into the ortho photo. + drawTexturedTriangle(texture, polygon, meshCloud, uvs, faceIndex+faceOff); + } + faceOff += faces.size(); + log_ << "Material " << t << " rendered.\n"; + } + log_ << "...ortho photo rendered\n"; + + log_ << '\n'; + log_ << "Writing ortho photo to " << outputFile_ << "\n"; + cv::imwrite(outputFile_, photo_); + + if (!outputCornerFile_.empty()) + { + log_ << "Writing corner coordinates to " << outputCornerFile_ << "\n"; + std::ofstream cornerStream(outputCornerFile_.c_str()); + if (!cornerStream.is_open()) + { + throw OdmOrthoPhotoException("Failed opening output corner file " + outputCornerFile_ + "."); + } + cornerStream.setf(std::ios::scientific, std::ios::floatfield); + cornerStream.precision(17); + cornerStream << xMin << " " << yMin << " " << xMax << " " << yMax; + cornerStream.close(); + } + + log_ << "Orthophoto generation done.\n"; +} + +void OdmOrthoPhoto::adjustBoundsForGeoRef() +{ + log_ << "Adjusting bounds for world coordinates\n"; + + // A stream of the georef system. + std::ifstream geoRefStream(inputGeoRefFile_.c_str()); + + // The system name + std::string system; + // The east and north offsets + int eastOffset, northOffset; + + // Parse file + std::getline(geoRefStream, system); + if(!(geoRefStream >> eastOffset)) + { + throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract east offset."); + } + if(!(geoRefStream >> northOffset)) + { + throw OdmOrthoPhotoException("Could not extract geographical reference system from \n" + inputGeoRefFile_ + "\nCould not extract north offset."); + } + + log_ << "Georeference system:\n"; + log_ << system << "\n"; + log_ << "East offset: " << eastOffset << "\n"; + log_ << "North offset: " << northOffset << "\n"; + + // Adjust boundary points. + boundaryPoint1_[0] = static_cast(worldPoint1_.eastInteger_ - eastOffset) + worldPoint1_.eastFractional_; + boundaryPoint1_[1] = static_cast(worldPoint1_.northInteger_ - northOffset) + worldPoint1_.northFractional_; + boundaryPoint2_[0] = static_cast(worldPoint2_.eastInteger_ - eastOffset) + worldPoint2_.eastFractional_; + boundaryPoint2_[1] = static_cast(worldPoint2_.northInteger_ - northOffset) + worldPoint2_.northFractional_; + boundaryPoint3_[0] = static_cast(worldPoint3_.eastInteger_ - eastOffset) + worldPoint3_.eastFractional_; + boundaryPoint3_[1] = static_cast(worldPoint3_.northInteger_ - northOffset) + worldPoint3_.northFractional_; + boundaryPoint4_[0] = static_cast(worldPoint4_.eastInteger_ - eastOffset) + worldPoint4_.eastFractional_; + boundaryPoint4_[1] = static_cast(worldPoint4_.northInteger_ - northOffset) + worldPoint4_.northFractional_; + + log_ << "Local boundary points:\n"; + log_ << "Point 1: " << boundaryPoint1_[0] << " " << boundaryPoint1_[1] << "\n"; + log_ << "Point 2: " << boundaryPoint2_[0] << " " << boundaryPoint2_[1] << "\n"; + log_ << "Point 3: " << boundaryPoint3_[0] << " " << boundaryPoint3_[1] << "\n"; + log_ << "Point 4: " << boundaryPoint4_[0] << " " << boundaryPoint4_[1] << "\n"; +} + +void OdmOrthoPhoto::adjustBoundsForLocal() +{ + log_ << "Adjusting bounds for local coordinates\n"; + + // Set boundary points from world points. + boundaryPoint1_[0] = static_cast(worldPoint1_.eastInteger_ ) + worldPoint1_.eastFractional_; + boundaryPoint1_[1] = static_cast(worldPoint1_.northInteger_) + worldPoint1_.northFractional_; + boundaryPoint2_[0] = static_cast(worldPoint2_.eastInteger_ ) + worldPoint2_.eastFractional_; + boundaryPoint2_[1] = static_cast(worldPoint2_.northInteger_) + worldPoint2_.northFractional_; + boundaryPoint3_[0] = static_cast(worldPoint3_.eastInteger_ ) + worldPoint3_.eastFractional_; + boundaryPoint3_[1] = static_cast(worldPoint3_.northInteger_) + worldPoint3_.northFractional_; + boundaryPoint4_[0] = static_cast(worldPoint4_.eastInteger_ ) + worldPoint4_.eastFractional_; + boundaryPoint4_[1] = static_cast(worldPoint4_.northInteger_) + worldPoint4_.northFractional_; + + log_ << "Local boundary points:\n"; + log_ << "Point 1: " << boundaryPoint1_[0] << " " << boundaryPoint1_[1] << "\n"; + log_ << "Point 2: " << boundaryPoint2_[0] << " " << boundaryPoint2_[1] << "\n"; + log_ << "Point 3: " << boundaryPoint3_[0] << " " << boundaryPoint3_[1] << "\n"; + log_ << "Point 4: " << boundaryPoint4_[0] << " " << boundaryPoint4_[1] << "\n"; + log_ << "\n"; +} + +void OdmOrthoPhoto::adjustBoundsForEntireModel(const pcl::TextureMesh &mesh) +{ + log_ << "Set boundary to contain entire model.\n"; + + // The boundary of the model. + float xMin, xMax, yMin, yMax; + + xMin = std::numeric_limits::infinity(); + xMax = -std::numeric_limits::infinity(); + yMin = std::numeric_limits::infinity(); + yMax = -std::numeric_limits::infinity(); + + // Contains the vertices of the mesh. + pcl::PointCloud::Ptr meshCloud (new pcl::PointCloud); + pcl::fromPCLPointCloud2 (mesh.cloud, *meshCloud); + + for(size_t t = 0; t < mesh.tex_materials.size(); ++t) + { + // The faces of the current submesh. + std::vector faces = mesh.tex_polygons[t]; + + // Iterate over each face... + for(size_t faceIndex = 0; faceIndex < faces.size(); ++faceIndex) + { + // The current polygon. + pcl::Vertices polygon = faces[faceIndex]; + + // The index to the vertices of the polygon. + size_t v1i = polygon.vertices[0]; + size_t v2i = polygon.vertices[1]; + size_t v3i = polygon.vertices[2]; + + // The polygon's points. + pcl::PointXYZ v1 = meshCloud->points[v1i]; + pcl::PointXYZ v2 = meshCloud->points[v2i]; + pcl::PointXYZ v3 = meshCloud->points[v3i]; + + xMin = std::min(std::min(xMin, v1.x), std::min(v2.x, v3.x)); + xMax = std::max(std::max(xMax, v1.x), std::max(v2.x, v3.x)); + yMin = std::min(std::min(yMin, v1.y), std::min(v2.y, v3.y)); + yMax = std::max(std::max(yMax, v1.y), std::max(v2.y, v3.y)); + } + } + + // Create dummy boundary points. + boundaryPoint1_[0] = xMin; boundaryPoint1_[1] = yMin; + boundaryPoint2_[0] = xMin; boundaryPoint2_[1] = yMax; + boundaryPoint3_[0] = xMax; boundaryPoint3_[1] = yMax; + boundaryPoint4_[0] = xMax; boundaryPoint4_[1] = yMin; + + log_ << "Local boundary points:\n"; + log_ << "Point 1: " << boundaryPoint1_[0] << " " << boundaryPoint1_[1] << "\n"; + log_ << "Point 2: " << boundaryPoint2_[0] << " " << boundaryPoint2_[1] << "\n"; + log_ << "Point 3: " << boundaryPoint3_[0] << " " << boundaryPoint3_[1] << "\n"; + log_ << "Point 4: " << boundaryPoint4_[0] << " " << boundaryPoint4_[1] << "\n"; + log_ << "\n"; +} + +Eigen::Transform OdmOrthoPhoto::getROITransform(float xMin, float yMin) const +{ + // The transform used to move the chosen area into the ortho photo. + Eigen::Transform transform; + + transform(0, 0) = resolution_; // x Scaling. + transform(1, 0) = 0.0f; + transform(2, 0) = 0.0f; + transform(3, 0) = 0.0f; + + transform(0, 1) = 0.0f; + transform(1, 1) = -resolution_; // y Scaling, mirrored for easier rendering. + transform(2, 1) = 0.0f; + transform(3, 1) = 0.0f; + + transform(0, 2) = 0.0f; + transform(1, 2) = 0.0f; + transform(2, 2) = 1.0f; + transform(3, 2) = 0.0f; + + transform(0, 3) = -xMin*resolution_; // x Translation + transform(1, 3) = -yMin*resolution_; // y Translation + transform(2, 3) = 0.0f; + transform(3, 3) = 1.0f; + + return transform; +} + +void OdmOrthoPhoto::drawTexturedTriangle(const cv::Mat &texture, const pcl::Vertices &polygon, const pcl::PointCloud::Ptr &meshCloud, const std::vector &uvs, size_t faceIndex) +{ + // The index to the vertices of the polygon. + size_t v1i = polygon.vertices[0]; + size_t v2i = polygon.vertices[1]; + size_t v3i = polygon.vertices[2]; + + // The polygon's points. + pcl::PointXYZ v1 = meshCloud->points[v1i]; + pcl::PointXYZ v2 = meshCloud->points[v2i]; + pcl::PointXYZ v3 = meshCloud->points[v3i]; + + if(isSliverPolygon(v1, v2, v3)) + { + log_ << "Warning: Sliver polygon found at face index " << faceIndex << '\n'; + return; + } + + // The face data. Position v*{x,y,z}. Texture coordinate v*{u,v}. * is the vertex number in the polygon. + float v1x, v1y, v1z, v1u, v1v; + float v2x, v2y, v2z, v2u, v2v; + float v3x, v3y, v3z, v3u, v3v; + + // Barycentric coordinates of the currently rendered point. + float l1, l2, l3; + + // The size of the photo, as float. + float fRows, fCols; + fRows = static_cast(texture.rows); + fCols = static_cast(texture.cols); + + // Get vertex position. + v1x = v1.x; v1y = v1.y; v1z = v1.z; + v2x = v2.x; v2y = v2.y; v2z = v2.z; + v3x = v3.x; v3y = v3.y; v3z = v3.z; + + // Get texture coordinates. (Special cases for PCL when using multiple materials vs one material) + if(multiMaterial_) + { + v1u = uvs[3*faceIndex][0]; v1v = uvs[3*faceIndex][1]; + v2u = uvs[3*faceIndex+1][0]; v2v = uvs[3*faceIndex+1][1]; + v3u = uvs[3*faceIndex+2][0]; v3v = uvs[3*faceIndex+2][1]; + + } + else + { + v1u = uvs[v1i][0]; v1v = uvs[v1i][1]; + v2u = uvs[v2i][0]; v2v = uvs[v2i][1]; + v3u = uvs[v3i][0]; v3v = uvs[v3i][1]; + } + + // Check bounding box overlap. + int xMin = static_cast(std::min(std::min(v1x, v2x), v3x)); + if(xMin > photo_.cols) + { + return; // Completely outside to the right. + } + int xMax = static_cast(std::max(std::max(v1x, v2x), v3x)); + if(xMax < 0) + { + return; // Completely outside to the left. + } + int yMin = static_cast(std::min(std::min(v1y, v2y), v3y)); + if(yMin > photo_.rows) + { + return; // Completely outside to the top. + } + int yMax = static_cast(std::max(std::max(v1y, v2y), v3y)); + if(yMax < 0) + { + return; // Completely outside to the bottom. + } + + // Top point row and column positions + float topR, topC; + // Middle point row and column positions + float midR, midC; + // Bottom point row and column positions + float botR, botC; + + // Find top, middle and bottom points. + if(v1y < v2y) + { + if(v1y < v3y) + { + if(v2y < v3y) + { + // 1 -> 2 -> 3 + topR = v1y; topC = v1x; + midR = v2y; midC = v2x; + botR = v3y; botC = v3x; + } + else + { + // 1 -> 3 -> 2 + topR = v1y; topC = v1x; + midR = v3y; midC = v3x; + botR = v2y; botC = v2x; + } + } + else + { + // 3 -> 1 -> 2 + topR = v3y; topC = v3x; + midR = v1y; midC = v1x; + botR = v2y; botC = v2x; + } + } + else // v2y <= v1y + { + if(v2y < v3y) + { + if(v1y < v3y) + { + // 2 -> 1 -> 3 + topR = v2y; topC = v2x; + midR = v1y; midC = v1x; + botR = v3y; botC = v3x; + } + else + { + // 2 -> 3 -> 1 + topR = v2y; topC = v2x; + midR = v3y; midC = v3x; + botR = v1y; botC = v1x; + } + } + else + { + // 3 -> 2 -> 1 + topR = v3y; topC = v3x; + midR = v2y; midC = v2x; + botR = v1y; botC = v1x; + } + } + + // General appreviations: + // --------------------- + // tm : Top(to)Middle. + // mb : Middle(to)Bottom. + // tb : Top(to)Bottom. + // c : column. + // r : row. + // dr : DeltaRow, step value per row. + + // The step along column for every step along r. Top to middle. + float ctmdr; + // The step along column for every step along r. Top to bottom. + float ctbdr; + // The step along column for every step along r. Middle to bottom. + float cmbdr; + + ctbdr = (botC-topC)/(botR-topR); + + // The current column position, from top to middle. + float ctm = topC; + // The current column position, from top to bottom. + float ctb = topC; + + // Check for vertical line between middle and top. + if(FLT_EPSILON < midR-topR) + { + ctmdr = (midC-topC)/(midR-topR); + + // The first pixel row for the bottom part of the triangle. + int rqStart = std::max(static_cast(std::floor(topR+0.5f)), 0); + // The last pixel row for the top part of the triangle. + int rqEnd = std::min(static_cast(std::floor(midR+0.5f)), photo_.rows); + + // Traverse along row from top to middle. + for(int rq = rqStart; rq < rqEnd; ++rq) + { + // Set the current column positions. + ctm = topC + ctmdr*(static_cast(rq)+0.5f-topR); + ctb = topC + ctbdr*(static_cast(rq)+0.5f-topR); + + // The first pixel column for the current row. + int cqStart = std::max(static_cast(std::floor(0.5f+std::min(ctm, ctb))), 0); + // The last pixel column for the current row. + int cqEnd = std::min(static_cast(std::floor(0.5f+std::max(ctm, ctb))), photo_.cols); + + for(int cq = cqStart; cq < cqEnd; ++cq) + { + // Get barycentric coordinates for the current point. + getBarycentricCoordinates(v1, v2, v3, static_cast(cq)+0.5f, static_cast(rq)+0.5f, l1, l2, l3); + + if(0.f > l1 || 0.f > l2 || 0.f > l3) + { + //continue; + } + + // The z value for the point. + float z = v1z*l1+v2z*l2+v3z*l3; + + // Check depth + float depthValue = depth_.at(rq, cq); + if(z < depthValue) + { + // Current is behind another, don't draw. + continue; + } + + // The uv values of the point. + float u, v; + u = v1u*l1+v2u*l2+v3u*l3; + v = v1v*l1+v2v*l2+v3v*l3; + + renderPixel(rq, cq, u*fCols, (1.0f-v)*fRows, texture); + + // Update depth buffer. + depth_.at(rq, cq) = z; + } + } + } + + if(FLT_EPSILON < botR-midR) + { + cmbdr = (botC-midC)/(botR-midR); + + // The current column position, from middle to bottom. + float cmb = midC; + + // The first pixel row for the bottom part of the triangle. + int rqStart = std::max(static_cast(std::floor(midR+0.5f)), 0); + // The last pixel row for the bottom part of the triangle. + int rqEnd = std::min(static_cast(std::floor(botR+0.5f)), photo_.rows); + + // Traverse along row from middle to bottom. + for(int rq = rqStart; rq < rqEnd; ++rq) + { + // Set the current column positions. + ctb = topC + ctbdr*(static_cast(rq)+0.5f-topR); + cmb = midC + cmbdr*(static_cast(rq)+0.5f-midR); + + // The first pixel column for the current row. + int cqStart = std::max(static_cast(std::floor(0.5f+std::min(cmb, ctb))), 0); + // The last pixel column for the current row. + int cqEnd = std::min(static_cast(std::floor(0.5f+std::max(cmb, ctb))), photo_.cols); + + for(int cq = cqStart; cq < cqEnd; ++cq) + { + // Get barycentric coordinates for the current point. + getBarycentricCoordinates(v1, v2, v3, static_cast(cq)+0.5f, static_cast(rq)+0.5f, l1, l2, l3); + + if(0.f > l1 || 0.f > l2 || 0.f > l3) + { + //continue; + } + + // The z value for the point. + float z = v1z*l1+v2z*l2+v3z*l3; + + // Check depth + float depthValue = depth_.at(rq, cq); + if(z < depthValue) + { + // Current is behind another, don't draw. + continue; + } + + // The uv values of the point. + float u, v; + u = v1u*l1+v2u*l2+v3u*l3; + v = v1v*l1+v2v*l2+v3v*l3; + + renderPixel(rq, cq, u*fCols, (1.0f-v)*fRows, texture); + + // Update depth buffer. + depth_.at(rq, cq) = z; + } + } + } +} + +void OdmOrthoPhoto::renderPixel(int row, int col, float s, float t, const cv::Mat &texture) +{ + // The colors of the texture pixels. tl : top left, tr : top right, bl : bottom left, br : bottom right. + cv::Vec3b tl, tr, bl, br; + + // The offset of the texture coordinate from its pixel positions. + float leftF, topF; + // The position of the top left pixel. + int left, top; + // The distance to the left and right pixel from the texture coordinate. + float dl, dt; + // The distance to the top and bottom pixel from the texture coordinate. + float dr, db; + + dl = modff(s, &leftF); + dr = 1.0f - dl; + dt = modff(t, &topF); + db = 1.0f - dt; + + left = static_cast(leftF); + top = static_cast(topF); + + tl = texture.at(top, left); + tr = texture.at(top, left+1); + bl = texture.at(top+1, left); + br = texture.at(top+1, left+1); + + // The interpolated color values. + float r = 0.0f, g = 0.0f, b = 0.0f; + + // Red + r += static_cast(tl[2]) * dr * db; + r += static_cast(tr[2]) * dl * db; + r += static_cast(bl[2]) * dr * dt; + r += static_cast(br[2]) * dl * dt; + + // Green + g += static_cast(tl[1]) * dr * db; + g += static_cast(tr[1]) * dl * db; + g += static_cast(bl[1]) * dr * dt; + g += static_cast(br[1]) * dl * dt; + + // Blue + b += static_cast(tl[0]) * dr * db; + b += static_cast(tr[0]) * dl * db; + b += static_cast(bl[0]) * dr * dt; + b += static_cast(br[0]) * dl * dt; + + photo_.at(row,col) = cv::Vec4b(static_cast(b), static_cast(g), static_cast(r), 255); +} + +void OdmOrthoPhoto::getBarycentricCoordinates(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3, float x, float y, float &l1, float &l2, float &l3) const +{ + // Diff along y. + float y2y3 = v2.y-v3.y; + float y1y3 = v1.y-v3.y; + float y3y1 = v3.y-v1.y; + float yy3 = y -v3.y; + + // Diff along x. + float x3x2 = v3.x-v2.x; + float x1x3 = v1.x-v3.x; + float xx3 = x -v3.x; + + // Normalization factor. + float norm = (y2y3*x1x3 + x3x2*y1y3); + + l1 = (y2y3*(xx3) + x3x2*(yy3)) / norm; + l2 = (y3y1*(xx3) + x1x3*(yy3)) / norm; + l3 = 1 - l1 - l2; +} + +bool OdmOrthoPhoto::isSliverPolygon(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3) const +{ + // Calculations are made using doubles, to minize rounding errors. + Eigen::Vector3d a = Eigen::Vector3d(static_cast(v1.x), static_cast(v1.y), static_cast(v1.z)); + Eigen::Vector3d b = Eigen::Vector3d(static_cast(v2.x), static_cast(v2.y), static_cast(v2.z)); + Eigen::Vector3d c = Eigen::Vector3d(static_cast(v3.x), static_cast(v3.y), static_cast(v3.z)); + Eigen::Vector3d dummyVec = (a-b).cross(c-b); + + // Area smaller than, or equal to, floating-point epsilon. + return std::numeric_limits::epsilon() >= static_cast(std::sqrt(dummyVec.dot(dummyVec))/2.0); +} + +bool OdmOrthoPhoto::isModelOk(const pcl::TextureMesh &mesh) +{ + // The number of texture coordinates in the model. + size_t nTextureCoordinates = 0; + // The number of faces in the model. + size_t nFaces = 0; + + for(size_t t = 0; t < mesh.tex_coordinates.size(); ++t) + { + nTextureCoordinates += mesh.tex_coordinates[t].size(); + } + for(size_t t = 0; t < mesh.tex_polygons.size(); ++t) + { + nFaces += mesh.tex_polygons[t].size(); + } + + log_ << "Number of faces in the model " << nFaces << '\n'; + + return 3*nFaces == nTextureCoordinates; +} + + +bool OdmOrthoPhoto::loadObjFile(std::string inputFile, pcl::TextureMesh &mesh) +{ + int data_type; + unsigned int data_idx; + int file_version; + int offset = 0; + Eigen::Vector4f origin; + Eigen::Quaternionf orientation; + + if (!readHeader(inputFile, mesh.cloud, origin, orientation, file_version, data_type, data_idx, offset)) + { + throw OdmOrthoPhotoException("Problem reading header in modelfile!\n"); + } + + std::ifstream fs; + + fs.open (inputFile.c_str (), std::ios::binary); + if (!fs.is_open () || fs.fail ()) + { + //PCL_ERROR ("[pcl::OBJReader::readHeader] Could not open file '%s'! Error : %s\n", file_name.c_str (), strerror(errno)); + fs.close (); + log_<<"Could not read mesh from file "; + log_ << inputFile.c_str(); + log_ <<"\n"; + + throw OdmOrthoPhotoException("Problem reading mesh from file!\n"); + } + + // Seek at the given offset + fs.seekg (data_idx, std::ios::beg); + + // Get normal_x field indices + int normal_x_field = -1; + for (std::size_t i = 0; i < mesh.cloud.fields.size (); ++i) + { + if (mesh.cloud.fields[i].name == "normal_x") + { + normal_x_field = i; + break; + } + } + + std::size_t v_idx = 0; + std::size_t vn_idx = 0; + std::size_t vt_idx = 0; + std::size_t f_idx = 0; + std::string line; + std::vector st; + std::vector > coordinates; + std::vector allTexCoords; + + std::map f2vt; + + try + { + while (!fs.eof ()) + { + getline (fs, line); + // Ignore empty lines + if (line == "") + continue; + + // Tokenize the line + std::stringstream sstream (line); + sstream.imbue (std::locale::classic ()); + line = sstream.str (); + boost::trim (line); + boost::split (st, line, boost::is_any_of ("\t\r "), boost::token_compress_on); + + // Ignore comments + if (st[0] == "#") + continue; + // Vertex + if (st[0] == "v") + { + try + { + for (int i = 1, f = 0; i < 4; ++i, ++f) + { + float value = boost::lexical_cast (st[i]); + memcpy (&mesh.cloud.data[v_idx * mesh.cloud.point_step + mesh.cloud.fields[f].offset], &value, sizeof (float)); + } + + ++v_idx; + } + catch (const boost::bad_lexical_cast &e) + { + log_<<"Unable to convert %s to vertex coordinates!\n"; + throw OdmOrthoPhotoException("Unable to convert %s to vertex coordinates!"); + } + continue; + } + // Vertex normal + if (st[0] == "vn") + { + try + { + for (int i = 1, f = normal_x_field; i < 4; ++i, ++f) + { + float value = boost::lexical_cast (st[i]); + memcpy (&mesh.cloud.data[vn_idx * mesh.cloud.point_step + mesh.cloud.fields[f].offset], + &value, + sizeof (float)); + } + ++vn_idx; + } + catch (const boost::bad_lexical_cast &e) + { + log_<<"Unable to convert %s to vertex normal!\n"; + throw OdmOrthoPhotoException("Unable to convert %s to vertex normal!"); + } + continue; + } + // Texture coordinates + if (st[0] == "vt") + { + try + { + Eigen::Vector3f c (0, 0, 0); + for (std::size_t i = 1; i < st.size (); ++i) + c[i-1] = boost::lexical_cast (st[i]); + + if (c[2] == 0) + coordinates.push_back (Eigen::Vector2f (c[0], c[1])); + else + coordinates.push_back (Eigen::Vector2f (c[0]/c[2], c[1]/c[2])); + ++vt_idx; + + } + catch (const boost::bad_lexical_cast &e) + { + log_<<"Unable to convert %s to vertex texture coordinates!\n"; + throw OdmOrthoPhotoException("Unable to convert %s to vertex texture coordinates!"); + } + continue; + } + // Material + if (st[0] == "usemtl") + { + mesh.tex_polygons.push_back (std::vector ()); + mesh.tex_materials.push_back (pcl::TexMaterial ()); + for (std::size_t i = 0; i < companions_.size (); ++i) + { + std::vector::const_iterator mat_it = companions_[i].getMaterial (st[1]); + if (mat_it != companions_[i].materials_.end ()) + { + mesh.tex_materials.back () = *mat_it; + break; + } + } + // We didn't find the appropriate material so we create it here with name only. + if (mesh.tex_materials.back ().tex_name == "") + mesh.tex_materials.back ().tex_name = st[1]; + mesh.tex_coordinates.push_back (coordinates); + coordinates.clear (); + continue; + } + // Face + if (st[0] == "f") + { + //We only care for vertices indices + pcl::Vertices face_v; face_v.vertices.resize (st.size () - 1); + for (std::size_t i = 1; i < st.size (); ++i) + { + int v; + sscanf (st[i].c_str (), "%d", &v); + v = (v < 0) ? v_idx + v : v - 1; + face_v.vertices[i-1] = v; + + int v2, vt, vn; + sscanf (st[i].c_str (), "%d/%d/%d", &v2, &vt, &vn); + f2vt[3*(f_idx) + i-1] = vt-1; + } + mesh.tex_polygons.back ().push_back (face_v); + ++f_idx; + continue; + } + } + } + catch (const char *exception) + { + fs.close (); + log_<<"Unable to read file!\n"; + throw OdmOrthoPhotoException("Unable to read file!"); + } + + if (vt_idx != v_idx) + { + std::vector > texcoordinates = std::vector >(0); + texcoordinates.reserve(3*f_idx); + + for (size_t faceIndex = 0; faceIndex < f_idx; ++faceIndex) + { + for(size_t i = 0; i < 3; ++i) + { + Eigen::Vector2f vt = mesh.tex_coordinates[0][f2vt[3*faceIndex+i]]; + texcoordinates.push_back(vt); + } + } + + mesh.tex_coordinates.clear(); + mesh.tex_coordinates.push_back(texcoordinates); + } + + fs.close(); + return (0); +} + +bool OdmOrthoPhoto::readHeader (const std::string &file_name, pcl::PCLPointCloud2 &cloud, + Eigen::Vector4f &origin, Eigen::Quaternionf &orientation, + int &file_version, int &data_type, unsigned int &data_idx, + const int offset) +{ + origin = Eigen::Vector4f::Zero (); + orientation = Eigen::Quaternionf::Identity (); + file_version = 0; + cloud.width = cloud.height = cloud.point_step = cloud.row_step = 0; + cloud.data.clear (); + data_type = 0; + data_idx = offset; + + std::ifstream fs; + std::string line; + + if (file_name == "" || !boost::filesystem::exists (file_name)) + { + return false; + } + + // Open file in binary mode to avoid problem of + // std::getline() corrupting the result of ifstream::tellg() + fs.open (file_name.c_str (), std::ios::binary); + if (!fs.is_open () || fs.fail ()) + { + fs.close (); + return false; + } + + // Seek at the given offset + fs.seekg (offset, std::ios::beg); + + // Read the header and fill it in with wonderful values + bool vertex_normal_found = false; + bool vertex_texture_found = false; + // Material library, skip for now! + // bool material_found = false; + std::vector material_files; + std::size_t nr_point = 0; + std::vector st; + + try + { + while (!fs.eof ()) + { + getline (fs, line); + // Ignore empty lines + if (line == "") + continue; + + // Tokenize the line + std::stringstream sstream (line); + sstream.imbue (std::locale::classic ()); + line = sstream.str (); + boost::trim (line); + boost::split (st, line, boost::is_any_of ("\t\r "), boost::token_compress_on); + // Ignore comments + if (st.at (0) == "#") + continue; + + // Vertex + if (st.at (0) == "v") + { + ++nr_point; + continue; + } + + // Vertex texture + if ((st.at (0) == "vt") && !vertex_texture_found) + { + vertex_texture_found = true; + continue; + } + + // Vertex normal + if ((st.at (0) == "vn") && !vertex_normal_found) + { + vertex_normal_found = true; + continue; + } + + // Material library, skip for now! + if (st.at (0) == "mtllib") + { + material_files.push_back (st.at (1)); + continue; + } + } + } + catch (const char *exception) + { + fs.close (); + return false; + } + + if (!nr_point) + { + fs.close (); + return false; + } + + int field_offset = 0; + for (int i = 0; i < 3; ++i, field_offset += 4) + { + cloud.fields.push_back (pcl::PCLPointField ()); + cloud.fields[i].offset = field_offset; + cloud.fields[i].datatype = pcl::PCLPointField::FLOAT32; + cloud.fields[i].count = 1; + } + + cloud.fields[0].name = "x"; + cloud.fields[1].name = "y"; + cloud.fields[2].name = "z"; + + if (vertex_normal_found) + { + std::string normals_names[3] = { "normal_x", "normal_y", "normal_z" }; + for (int i = 0; i < 3; ++i, field_offset += 4) + { + cloud.fields.push_back (pcl::PCLPointField ()); + pcl::PCLPointField& last = cloud.fields.back (); + last.name = normals_names[i]; + last.offset = field_offset; + last.datatype = pcl::PCLPointField::FLOAT32; + last.count = 1; + } + } + + if (material_files.size () > 0) + { + for (std::size_t i = 0; i < material_files.size (); ++i) + { + pcl::MTLReader companion; + + if (companion.read (file_name, material_files[i])) + { + log_<<"Problem reading material file."; + } + + companions_.push_back (companion); + } + } + + cloud.point_step = field_offset; + cloud.width = nr_point; + cloud.height = 1; + cloud.row_step = cloud.point_step * cloud.width; + cloud.is_dense = true; + cloud.data.resize (cloud.point_step * nr_point); + fs.close (); + return true; +} diff --git a/modules/odm_orthophoto/src/OdmOrthoPhoto.hpp b/modules/odm_orthophoto/src/OdmOrthoPhoto.hpp new file mode 100644 index 000000000..1b83f0c05 --- /dev/null +++ b/modules/odm_orthophoto/src/OdmOrthoPhoto.hpp @@ -0,0 +1,236 @@ +#pragma once + +// C++ +#include +#include +#include + +// PCL +#include +#include + +// OpenCV +#include +#include + +// PCL +#include +#include + +// OpenCV +#include + +// Logger +#include "Logger.hpp" + +/*! + * \brief The WorldPoint struct encapsules world coordinates used for the ortho photo boundary. + * Points are separated into integers and fractional parts for high numerical stability. + */ +struct WorldPoint +{ + int eastInteger_; /**< The inger part of the east point. */ + float eastFractional_; /**< The farctional part of the east point. */ + int northInteger_; /**< The inger part of the east point. */ + float northFractional_; /**< The farctional part of the east point. */ + + /*! + * \brief Overloads operator '<<' for WorldPoint. + * + * \param os The output stream in which the WorldPoint should be printed. + * \param worldPoint The WorldPoint should be printed. + * \return A reference to the given output stream. + */ + friend std::ostream & operator<< (std::ostream &os, const WorldPoint &worldPoint); + + /*! + * \brief Overloads operator '>>' for WorldPoint. + * + * \param is The input stream from which the WorldPoint should be extracted + * \param worldPoint The modified WorldPoint. + * \return A reference to the given input stream. + */ + friend std::istream & operator>> (std::istream &os, WorldPoint &worldPoint); +}; + +/*! + * \brief The OdmOrthoPhoto class is used to create an orthographic photo over a given area. + * The class reads an oriented textured mesh from an OBJ-file. + * The class uses file read from pcl. + * The class uses image read and write from opencv. + */ +class OdmOrthoPhoto +{ +public: + OdmOrthoPhoto(); + ~OdmOrthoPhoto(); + + /*! + * \brief run Runs the ortho photo functionality using the provided input arguments. + * For a list of accepted arguments, pleas see the main page documentation or + * call the program with parameter "-help". + * \param argc Application argument count. + * \param argv Argument values. + * \return 0 if successful. + */ + int run(int argc, char* argv[]); + +private: + + /*! + * \brief parseArguments Parses command line arguments. + * + * \param argc Application argument count. + * \param argv Argument values. + */ + void parseArguments(int argc, char* argv[]); + + /*! + * \brief printHelp Prints help, explaining usage. Can be shown by calling the program with argument: "-help". + */ + void printHelp(); + + /*! + * \brief Create the ortho photo using the current settings. + */ + void createOrthoPhoto(); + + /*! + * \brief Adjusts the boundary points according to the given georef system. + */ + void adjustBoundsForGeoRef(); + + /*! + * \brief Adjusts the boundary points assuming the wolrd points are relative the local coordinate system. + */ + void adjustBoundsForLocal(); + + /*! + * \brief Adjusts the boundary points so that the entire model fits inside the photo. + * + * \param mesh The model which decides the boundary. + */ + void adjustBoundsForEntireModel(const pcl::TextureMesh &mesh); + + /*! + * \brief Creates a transformation which aligns the area for the orthophoto. + */ + Eigen::Transform getROITransform(float xMin, float yMin) const; + + /*! + * \brief Renders a triangle into the ortho photo. + * + * Pixel center defined as middle of pixel for triangle rasterisation, and in lower left corner for texture look-up. + * + * \param texture The texture of the polygon. + * \param polygon The polygon as athree indices relative meshCloud. + * \param meshCloud Contains all vertices. + * \param uvs Contains the texture coordinates for the active material. + * \param faceIndex The index of the face. + */ + void drawTexturedTriangle(const cv::Mat &texture, const pcl::Vertices &polygon, const pcl::PointCloud::Ptr &meshCloud, const std::vector &uvs, size_t faceIndex); + + /*! + * \brief Sets the color of a pixel in the photo. + * + * \param row The row index of the pixel. + * \param col The column index of the pixel. + * \param s The u texture-coordinate, multiplied with the number of columns in the texture. + * \param t The v texture-coordinate, multiplied with the number of rows in the texture. + * \param texture The texture from which to get the color. + **/ + void renderPixel(int row, int col, float u, float v, const cv::Mat &texture); + + /*! + * \brief Calculates the barycentric coordinates of a point in a triangle. + * + * \param v1 The first triangle vertex. + * \param v2 The second triangle vertex. + * \param v3 The third triangle vertex. + * \param x The x coordinate of the point. + * \param y The y coordinate of the point. + * \param l1 The first vertex weight. + * \param l2 The second vertex weight. + * \param l3 The third vertex weight. + */ + void getBarycentricCoordinates(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3, float x, float y, float &l1, float &l2, float &l3) const; + + /*! + * \brief Check if a given polygon is a sliver polygon. + * + * \param v1 The first vertex of the polygon. + * \param v2 The second vertex of the polygon. + * \param v3 The third vertex of the polygon. + */ + bool isSliverPolygon(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3) const; + + /*! + * \brief Check if the model is suitable for ortho photo generation. + * + * \param mesh The model. + * \return True if the model is ok for generating ortho photo. + */ + bool isModelOk(const pcl::TextureMesh &mesh); + + /*! + * \brief Loads a model from an .obj file (replacement for the pcl obj loader). + * + * \param inputFile Path to the .obj file. + * \param mesh The model. + * \return True if model was loaded successfully. + */ + bool loadObjFile(std::string inputFile, pcl::TextureMesh &mesh); + + /*! + * \brief Function is compied straight from the function in the pcl::io module. + */ + bool readHeader (const std::string &file_name, pcl::PCLPointCloud2 &cloud, + Eigen::Vector4f &origin, Eigen::Quaternionf &orientation, + int &file_version, int &data_type, unsigned int &data_idx, + const int offset); + + Logger log_; /**< Logging object. */ + + std::string inputFile_; /**< Path to the textured mesh as an obj-file. */ + std::string inputGeoRefFile_; /**< Path to the georeference system file. */ + std::string outputFile_; /**< Path to the destination file. */ + std::string outputCornerFile_; /**< Path to the output corner file. */ + std::string logFile_; /**< Path to the log file. */ + + float resolution_; /**< The number of pixels per meter in the ortho photo. */ + + bool boundaryDefined_; /**< True if the user has defined a boundary. */ + + WorldPoint worldPoint1_; /**< The first boundary point for the ortho photo, in world coordinates. */ + WorldPoint worldPoint2_; /**< The second boundary point for the ortho photo, in world coordinates. */ + WorldPoint worldPoint3_; /**< The third boundary point for the ortho photo, in world coordinates. */ + WorldPoint worldPoint4_; /**< The fourth boundary point for the ortho photo, in world coordinates. */ + + Eigen::Vector2f boundaryPoint1_; /**< The first boundary point for the ortho photo, in local coordinates. */ + Eigen::Vector2f boundaryPoint2_; /**< The second boundary point for the ortho photo, in local coordinates. */ + Eigen::Vector2f boundaryPoint3_; /**< The third boundary point for the ortho photo, in local coordinates. */ + Eigen::Vector2f boundaryPoint4_; /**< The fourth boundary point for the ortho photo, in local coordinates. */ + + cv::Mat photo_; /**< The ortho photo as an OpenCV matrix, CV_8UC3. */ + cv::Mat depth_; /**< The depth of the ortho photo as an OpenCV matrix, CV_32F. */ + + bool multiMaterial_; /**< True if the mesh has multiple materials. **/ + + std::vector companions_; /**< Materials (used by loadOBJFile). **/ +}; + +/*! + * \brief The OdmOrthoPhoto class + */ +class OdmOrthoPhotoException : public std::exception +{ + +public: + OdmOrthoPhotoException() : message("Error in OdmOrthoPhoto") {} + OdmOrthoPhotoException(std::string msgInit) : message("Error in OdmOrthoPhoto:\n" + msgInit) {} + ~OdmOrthoPhotoException() throw() {} + virtual const char* what() const throw() {return message.c_str(); } + +private: + std::string message; /**< The error message **/ +}; diff --git a/modules/odm_orthophoto/src/main.cpp b/modules/odm_orthophoto/src/main.cpp new file mode 100644 index 000000000..1490915be --- /dev/null +++ b/modules/odm_orthophoto/src/main.cpp @@ -0,0 +1,8 @@ +// Ortho photo generator. +#include "OdmOrthoPhoto.hpp" + +int main(int argc, char* argv[]) +{ + OdmOrthoPhoto orthoPhotoGenerator; + return orthoPhotoGenerator.run(argc, argv); +} diff --git a/modules/odm_slam/CMakeLists.txt b/modules/odm_slam/CMakeLists.txt new file mode 100644 index 000000000..50e746f99 --- /dev/null +++ b/modules/odm_slam/CMakeLists.txt @@ -0,0 +1,41 @@ +project(odm_slam) +cmake_minimum_required(VERSION 2.8) + +# Set opencv dir to the input spedified with option -DOPENCV_DIR="path" +set(OPENCV_DIR "OPENCV_DIR-NOTFOUND" CACHE "OPENCV_DIR" "Path to the opencv installation directory") + +# Add compiler options. +add_definitions(-Wall -Wextra) + +# Find pcl at the location specified by PCL_DIR +find_package(PCL 1.8 HINTS "${PCL_DIR}/share/pcl-1.8" REQUIRED) + +# Find OpenCV at the default location +find_package(OpenCV HINTS "${OPENCV_DIR}" REQUIRED) + +# Only link with required opencv modules. +set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui) + +# Add the Eigen and OpenCV include dirs. +# Necessary since the PCL_INCLUDE_DIR variable set by find_package is broken.) +include_directories(${EIGEN_ROOT}) +include_directories(${OpenCV_INCLUDE_DIRS}) + +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -std=c++11") + +set(PANGOLIN_ROOT ${CMAKE_BINARY_DIR}/../SuperBuild/install) + +set(ORB_SLAM_ROOT ${CMAKE_BINARY_DIR}/../SuperBuild/src/orb_slam2) + +include_directories(${EIGEN_ROOT}) +include_directories(${ORB_SLAM_ROOT}) +include_directories(${ORB_SLAM_ROOT}/include) +link_directories(${PANGOLIN_ROOT}/lib) +link_directories(${ORB_SLAM_ROOT}/lib) + +# Add source directory +aux_source_directory("./src" SRC_LIST) + +# Add exectuteable +add_executable(${PROJECT_NAME} ${SRC_LIST}) +target_link_libraries(odm_slam ${OpenCV_LIBS} ORB_SLAM2 pangolin) diff --git a/modules/odm_slam/src/OdmSlam.cpp b/modules/odm_slam/src/OdmSlam.cpp new file mode 100644 index 000000000..1fe7505db --- /dev/null +++ b/modules/odm_slam/src/OdmSlam.cpp @@ -0,0 +1,98 @@ +#include + +#include + +#include +#include + + +void SaveKeyFrameTrajectory(ORB_SLAM2::Map *map, const string &filename, const string &tracksfile) { + std::cout << std::endl << "Saving keyframe trajectory to " << filename << " ..." << std::endl; + + vector vpKFs = map->GetAllKeyFrames(); + sort(vpKFs.begin(), vpKFs.end(), ORB_SLAM2::KeyFrame::lId); + + std::ofstream f; + f.open(filename.c_str()); + f << fixed; + + std::ofstream fpoints; + fpoints.open(tracksfile.c_str()); + fpoints << fixed; + + for(size_t i = 0; i < vpKFs.size(); i++) { + ORB_SLAM2::KeyFrame* pKF = vpKFs[i]; + + if(pKF->isBad()) + continue; + + cv::Mat R = pKF->GetRotation().t(); + vector q = ORB_SLAM2::Converter::toQuaternion(R); + cv::Mat t = pKF->GetCameraCenter(); + f << setprecision(6) << pKF->mTimeStamp << setprecision(7) << " " << t.at(0) << " " << t.at(1) << " " << t.at(2) + << " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << std::endl; + + for (auto point : pKF->GetMapPoints()) { + auto coords = point->GetWorldPos(); + fpoints << setprecision(6) + << pKF->mTimeStamp + << " " << point->mnId + << setprecision(7) + << " " << coords.at(0, 0) + << " " << coords.at(1, 0) + << " " << coords.at(2, 0) + << std::endl; + } + } + + f.close(); + fpoints.close(); + std::cout << std::endl << "trajectory saved!" << std::endl; +} + + +int main(int argc, char **argv) { + if(argc != 4) { + std::cerr << std::endl << + "Usage: " << argv[0] << " vocabulary settings video" << + std::endl; + return 1; + } + + cv::VideoCapture cap(argv[3]); + if(!cap.isOpened()) { + std::cerr << "Failed to load video: " << argv[3] << std::endl; + return -1; + } + + ORB_SLAM2::System SLAM(argv[1], argv[2], ORB_SLAM2::System::MONOCULAR, true); + + usleep(10 * 1e6); + + std::cout << "Start processing video ..." << std::endl; + + double T = 0.1; // Seconds between frames + cv::Mat im; + int num_frames = cap.get(CV_CAP_PROP_FRAME_COUNT); + for(int ni = 0;; ++ni){ + std::cout << "processing frame " << ni << "/" << num_frames << std::endl; + // Get frame + bool res = false; + for (int trial = 0; !res && trial < 20; ++trial) { + std::cout << "trial " << trial << std::endl; + res = cap.read(im); + } + if(!res) break; + + double timestamp = ni * T; + + SLAM.TrackMonocular(im, timestamp); + + //usleep(int(T * 1e6)); + } + + SLAM.Shutdown(); + SaveKeyFrameTrajectory(SLAM.GetMap(), "KeyFrameTrajectory.txt", "MapPoints.txt"); + + return 0; +} diff --git a/modules/odm_slam/src/calibrate_video.py b/modules/odm_slam/src/calibrate_video.py new file mode 100644 index 000000000..59ae02aae --- /dev/null +++ b/modules/odm_slam/src/calibrate_video.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python + +import argparse +import sys + +import numpy as np +import cv2 + + +class Calibrator: + """Camera calibration using a chessboard pattern.""" + + def __init__(self, pattern_width, pattern_height, motion_threshold=0.05): + """Init the calibrator. + + The parameter motion_threshold determines the minimal motion required + to add a new frame to the calibration data, as a ratio of image width. + """ + self.pattern_size = (pattern_width, pattern_height) + self.motion_threshold = motion_threshold + self.pattern_points = np.array([ + (i, j, 0.0) + for j in range(pattern_height) + for i in range(pattern_width) + ], dtype=np.float32) + self.object_points = [] + self.image_points = [] + + def process_image(self, image, window_name): + """Find corners of an image and store them internally.""" + if len(image.shape) == 3: + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + else: + gray = image + + h, w = gray.shape + self.image_size = (w, h) + + found, corners = cv2.findChessboardCorners(gray, self.pattern_size) + + if found: + term = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_COUNT, 30, 0.1) + cv2.cornerSubPix(gray, corners, (5, 5), (-1, -1), term) + self._add_points(corners.reshape(-1, 2)) + + if window_name: + cv2.drawChessboardCorners(image, self.pattern_size, corners, found) + cv2.imshow(window_name, image) + + return found + + def calibrate(self): + """Run calibration using points extracted by process_image.""" + rms, camera_matrix, dist_coefs, rvecs, tvecs = cv2.calibrateCamera( + self.object_points, self.image_points, self.image_size, None, None) + return rms, camera_matrix, dist_coefs.ravel() + + def _add_points(self, image_points): + if self.image_points: + delta = np.fabs(image_points - self.image_points[-1]).max() + should_add = (delta > self.image_size[0] * self.motion_threshold) + else: + should_add = True + + if should_add: + self.image_points.append(image_points) + self.object_points.append(self.pattern_points) + + +def video_frames(filename): + """Yield frames in a video.""" + cap = cv2.VideoCapture(args.video) + while True: + ret, frame = cap.read() + if ret: + yield frame + else: + break + cap.release() + + +def orb_slam_calibration_config(camera_matrix, dist_coefs): + """String with calibration parameters in orb_slam config format.""" + lines = [ + "# Camera calibration and distortion parameters (OpenCV)", + "Camera.fx: {}".format(camera_matrix[0, 0]), + "Camera.fy: {}".format(camera_matrix[1, 1]), + "Camera.cx: {}".format(camera_matrix[0, 2]), + "Camera.cy: {}".format(camera_matrix[1, 2]), + "", + "Camera.k1: {}".format(dist_coefs[0]), + "Camera.k2: {}".format(dist_coefs[1]), + "Camera.p1: {}".format(dist_coefs[2]), + "Camera.p2: {}".format(dist_coefs[3]), + "Camera.k3: {}".format(dist_coefs[4]), + ] + return "\n".join(lines) + + +def parse_arguments(): + parser = argparse.ArgumentParser( + description="Camera calibration from video of a chessboard.") + parser.add_argument( + 'video', + help="video of the checkerboard") + parser.add_argument( + '--output', + default='calibration', + help="base name for the output files") + parser.add_argument( + '--size', + default='8x6', + help="size of the chessboard") + parser.add_argument( + '--visual', + action='http://github.com/store_true', + help="display images while calibrating") + return parser.parse_args() + + +if __name__ == '__main__': + args = parse_arguments() + + pattern_size = [int(i) for i in args.size.split('x')] + calibrator = Calibrator(pattern_size[0], pattern_size[1]) + + window_name = None + if args.visual: + window_name = 'Chessboard detection' + cv2.namedWindow(window_name, cv2.WINDOW_NORMAL) + + print "kept\tcurrent\tchessboard found" + + for i, frame in enumerate(video_frames(args.video)): + found = calibrator.process_image(frame, window_name) + + print "{}\t{}\t{} \r".format( + len(calibrator.image_points), i, found), + sys.stdout.flush() + + if args.visual: + if cv2.waitKey(1) & 0xFF == ord('q'): + break + + cv2.destroyAllWindows() + + rms, camera_matrix, dist_coefs = calibrator.calibrate() + + print + print "RMS:", rms + print + print orb_slam_calibration_config(camera_matrix, dist_coefs) diff --git a/modules/odm_slam/src/orb_slam_to_opensfm.py b/modules/odm_slam/src/orb_slam_to_opensfm.py new file mode 100644 index 000000000..fae1fedaf --- /dev/null +++ b/modules/odm_slam/src/orb_slam_to_opensfm.py @@ -0,0 +1,196 @@ +import argparse +import json +import os +import yaml + +import cv2 +import numpy as np +from opensfm import transformations as tf +from opensfm.io import mkdir_p + + +SCALE = 50 + + +def parse_orb_slam2_config_file(filename): + ''' + Parse ORB_SLAM2 config file. + + Parsing manually since neither pyyaml nor cv2.FileStorage seem to work. + ''' + res = {} + with open(filename) as fin: + lines = fin.readlines() + + for line in lines: + line = line.strip() + if line and line[0] != '#' and ':' in line: + key, value = line.split(':') + res[key.strip()] = value.strip() + return res + + +def camera_from_config(video_filename, config_filename): + ''' + Creates an OpenSfM from an ORB_SLAM2 config + ''' + config = parse_orb_slam2_config_file(config_filename) + fx = float(config['Camera.fx']) + fy = float(config['Camera.fy']) + cx = float(config['Camera.cx']) + cy = float(config['Camera.cy']) + k1 = float(config['Camera.k1']) + k2 = float(config['Camera.k2']) + p1 = float(config['Camera.p1']) + p2 = float(config['Camera.p2']) + width, height = get_video_size(video_filename) + size = max(width, height) + return { + 'width': width, + 'height': height, + 'focal': np.sqrt(fx * fy) / size, + 'k1': k1, + 'k2': k2 + } + + +def shot_id_from_timestamp(timestamp): + T = 0.1 # TODO(pau) get this from config + i = int(round(timestamp / T)) + return 'frame{0:06d}.png'.format(i) + + +def shots_from_trajectory(trajectory_filename): + ''' + Create opensfm shots from an orb_slam2/TUM trajectory + ''' + shots = {} + with open(trajectory_filename) as fin: + lines = fin.readlines() + + for line in lines: + a = map(float, line.split()) + timestamp = a[0] + c = np.array(a[1:4]) + q = np.array(a[4:8]) + R = tf.quaternion_matrix([q[3], q[0], q[1], q[2]])[:3, :3].T + t = -R.dot(c) * SCALE + shot = { + 'camera': 'slamcam', + 'rotation': list(cv2.Rodrigues(R)[0].flat), + 'translation': list(t.flat), + 'created_at': timestamp, + } + shots[shot_id_from_timestamp(timestamp)] = shot + return shots + + +def points_from_map_points(filename): + points = {} + with open(filename) as fin: + lines = fin.readlines() + + for line in lines: + words = line.split() + point_id = words[1] + coords = map(float, words[2:5]) + coords = [SCALE * i for i in coords] + points[point_id] = { + 'coordinates': coords, + 'color': [100, 0, 200] + } + + return points + + +def tracks_from_map_points(filename): + tracks = [] + with open(filename) as fin: + lines = fin.readlines() + + for line in lines: + words = line.split() + timestamp = float(words[0]) + shot_id = shot_id_from_timestamp(timestamp) + point_id = words[1] + row = [shot_id, point_id, point_id, '0', '0', '0', '0', '0'] + tracks.append('\t'.join(row)) + + return '\n'.join(tracks) + + +def get_video_size(video): + cap = cv2.VideoCapture(video) + width = int(cap.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)) + height = int(cap.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)) + cap.release() + return width, height + + +def extract_keyframes_from_video(video, reconstruction): + ''' + Reads video and extracts a frame for each shot in reconstruction + ''' + image_path = 'images' + mkdir_p(image_path) + T = 0.1 # TODO(pau) get this from config + cap = cv2.VideoCapture(video) + video_idx = 0 + + shot_ids = sorted(reconstruction['shots'].keys()) + for shot_id in shot_ids: + shot = reconstruction['shots'][shot_id] + timestamp = shot['created_at'] + keyframe_idx = int(round(timestamp / T)) + + while video_idx <= keyframe_idx: + for i in range(20): + ret, frame = cap.read() + if ret: + break + else: + print 'retrying' + if not ret: + raise RuntimeError( + 'Cound not find keyframe {} in video'.format(shot_id)) + if video_idx == keyframe_idx: + cv2.imwrite(os.path.join(image_path, shot_id), frame) + video_idx += 1 + + cap.release() + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Convert ORB_SLAM2 output to OpenSfM') + parser.add_argument( + 'video', + help='the tracked video file') + parser.add_argument( + 'trajectory', + help='the trajectory file') + parser.add_argument( + 'points', + help='the map points file') + parser.add_argument( + 'config', + help='config file with camera calibration') + args = parser.parse_args() + + r = { + 'cameras': {}, + 'shots': {}, + 'points': {}, + } + + r['cameras']['slamcam'] = camera_from_config(args.video, args.config) + r['shots'] = shots_from_trajectory(args.trajectory) + r['points'] = points_from_map_points(args.points) + tracks = tracks_from_map_points(args.points) + + with open('reconstruction.json', 'w') as fout: + json.dump([r], fout, indent=4) + with open('tracks.csv', 'w') as fout: + fout.write(tracks) + + extract_keyframes_from_video(args.video, r) diff --git a/modules/odm_slam/src/undistort_radial.py b/modules/odm_slam/src/undistort_radial.py new file mode 100644 index 000000000..db40afa78 --- /dev/null +++ b/modules/odm_slam/src/undistort_radial.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +import argparse +import os + +import cv2 +import numpy as np + +import opensfm.dataset as dataset +import opensfm.io as io + + +def opencv_calibration_matrix(width, height, focal): + '''Calibration matrix as used by OpenCV and PMVS + ''' + f = focal * max(width, height) + return np.matrix([[f, 0, 0.5 * (width - 1)], + [0, f, 0.5 * (height - 1)], + [0, 0, 1.0]]) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Undistort images') + parser.add_argument('dataset', help='path to the dataset to be processed') + parser.add_argument('--output', help='output folder for the undistorted images') + args = parser.parse_args() + + data = dataset.DataSet(args.dataset) + if args.output: + output_path = args.output + else: + output_path = os.path.join(data.data_path, 'undistorted') + + print "Undistorting images from dataset [%s] to dir [%s]" % (data.data_path, output_path) + + io.mkdir_p(output_path) + + reconstructions = data.load_reconstruction() + for h, reconstruction in enumerate(reconstructions): + print "undistorting reconstruction", h + for image in reconstruction['shots']: + print "undistorting image", image + shot = reconstruction["shots"][image] + + original_image = data.image_as_array(image)[:,:,::-1] + camera = reconstruction['cameras'][shot['camera']] + original_h, original_w = original_image.shape[:2] + K = opencv_calibration_matrix(original_w, original_h, camera['focal']) + k1 = camera["k1"] + k2 = camera["k2"] + undistorted_image = cv2.undistort(original_image, K, np.array([k1, k2, 0, 0])) + + new_image_path = os.path.join(output_path, image.split('/')[-1]) + cv2.imwrite(new_image_path, undistorted_image) diff --git a/opendm/__init__.py b/opendm/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/opendm/config.py b/opendm/config.py new file mode 100644 index 000000000..5e7e91a35 --- /dev/null +++ b/opendm/config.py @@ -0,0 +1,493 @@ +import argparse +from opendm import context +from opendm import io +from opendm import log +from appsettings import SettingsParser + +import sys + +# parse arguments +processopts = ['resize', 'opensfm', 'slam', 'cmvs', 'pmvs', + 'odm_meshing', 'odm_25dmeshing', 'mvs_texturing', 'odm_georeferencing', + 'odm_dem', 'odm_orthophoto'] + +with open(io.join_paths(context.root_path, 'VERSION')) as version_file: + __version__ = version_file.read().strip() + + +def alphanumeric_string(string): + import re + if re.match('^[a-zA-Z0-9_-]+$', string) is None: + msg = '{0} is not a valid name. Must use alphanumeric characters.'.format(string) + raise argparse.ArgumentTypeError(msg) + return string + + +class RerunFrom(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, processopts[processopts.index(values):]) + + +parser = SettingsParser(description='OpenDroneMap', + usage='%(prog)s [options] ', + yaml_file=open(context.settings_path)) + +def config(): + parser.add_argument('--images', '-i', + metavar='', + help='Path to input images'), + + parser.add_argument('--project-path', + metavar='', + help='Path to the project folder') + + parser.add_argument('name', + metavar='', + type=alphanumeric_string, + help='Name of Project (i.e subdirectory of projects folder)') + + parser.add_argument('--resize-to', + metavar='', + default=2048, + type=int, + help='resizes images by the largest side for opensfm. ' + 'Set to -1 to disable. Default: %(default)s') + + parser.add_argument('--start-with', '-s', + metavar='', + default='resize', + choices=processopts, + help=('Can be one of: ' + ' | '.join(processopts))) + + parser.add_argument('--end-with', '-e', + metavar='', + default='odm_orthophoto', + choices=processopts, + help=('Can be one of:' + ' | '.join(processopts))) + + rerun = parser.add_mutually_exclusive_group() + + rerun.add_argument('--rerun', '-r', + metavar='', + choices=processopts, + help=('Can be one of:' + ' | '.join(processopts))) + + rerun.add_argument('--rerun-all', + action='http://github.com/store_true', + default=False, + help='force rerun of all tasks') + + rerun.add_argument('--rerun-from', + action=RerunFrom, + metavar='', + choices=processopts, + help=('Can be one of:' + ' | '.join(processopts))) + + parser.add_argument('--video', + metavar='', + help='Path to the video file to process') + + parser.add_argument('--slam-config', + metavar='', + help='Path to config file for orb-slam') + + parser.add_argument('--force-focal', + metavar='', + type=float, + help=('Override the focal length information for the ' + 'images')) + + parser.add_argument('--force-ccd', + metavar='', + type=float, + help='Override the ccd width information for the images') + + parser.add_argument('--min-num-features', + metavar='', + default=4000, + type=int, + help=('Minimum number of features to extract per image. ' + 'More features leads to better results but slower ' + 'execution. Default: %(default)s')) + + parser.add_argument('--matcher-neighbors', + type=int, + metavar='', + default=8, + help='Number of nearest images to pre-match based on GPS ' + 'exif data. Set to 0 to skip pre-matching. ' + 'Neighbors works together with Distance parameter, ' + 'set both to 0 to not use pre-matching. OpenSFM ' + 'uses both parameters at the same time, Bundler ' + 'uses only one which has value, prefering the ' + 'Neighbors parameter. Default: %(default)s') + + parser.add_argument('--matcher-distance', + metavar='', + default=0, + type=int, + help='Distance threshold in meters to find pre-matching ' + 'images based on GPS exif data. Set both ' + 'matcher-neighbors and this to 0 to skip ' + 'pre-matching. Default: %(default)s') + + parser.add_argument('--use-fixed-camera-params', + action='http://github.com/store_true', + default=False, + help='Turn off camera parameter optimization during bundler') + + parser.add_argument('--opensfm-processes', + metavar='', + default=context.num_cores, + type=int, + help=('The maximum number of processes to use in dense ' + 'reconstruction. Default: %(default)s')) + + parser.add_argument('--use-hybrid-bundle-adjustment', + action='http://github.com/store_true', + default=False, + help='Run local bundle adjustment for every image added to the reconstruction and a global ' + 'adjustment every 100 images. Speeds up reconstruction for very large datasets.') + + parser.add_argument('--use-25dmesh', + action='http://github.com/store_true', + default=False, + help='Use a 2.5D mesh to compute the orthophoto. This option tends to provide better results for planar surfaces. Experimental.') + + parser.add_argument('--use-pmvs', + action='http://github.com/store_true', + default=False, + help='Use pmvs to compute point cloud alternatively') + + parser.add_argument('--cmvs-maxImages', + metavar='', + default=500, + type=int, + help='The maximum number of images per cluster. ' + 'Default: %(default)s') + + parser.add_argument('--pmvs-level', + metavar='', + default=1, + type=int, + help=('The level in the image pyramid that is used ' + 'for the computation. see ' + 'http://www.di.ens.fr/pmvs/documentation.html%20for' + 'more pmvs documentation. Default: %(default)s')) + + parser.add_argument('--pmvs-csize', + metavar='', + default=2, + type=int, + help='Cell size controls the density of reconstructions' + 'Default: %(default)s') + + parser.add_argument('--pmvs-threshold', + metavar='', + default=0.7, + type=float, + help=('A patch reconstruction is accepted as a success ' + 'and kept if its associated photometric consistency ' + 'measure is above this threshold. Default: %(default)s')) + + parser.add_argument('--pmvs-wsize', + metavar='', + default=7, + type=int, + help='pmvs samples wsize x wsize pixel colors from ' + 'each image to compute photometric consistency ' + 'score. For example, when wsize=7, 7x7=49 pixel ' + 'colors are sampled in each image. Increasing the ' + 'value leads to more stable reconstructions, but ' + 'the program becomes slower. Default: %(default)s') + + parser.add_argument('--pmvs-min-images', + metavar='', + default=3, + type=int, + help=('Each 3D point must be visible in at least ' + 'minImageNum images for being reconstructed. 3 is ' + 'suggested in general. Default: %(default)s')) + + parser.add_argument('--pmvs-num-cores', + metavar='', + default=context.num_cores, + type=int, + help=('The maximum number of cores to use in dense ' + 'reconstruction. Default: %(default)s')) + + parser.add_argument('--mesh-size', + metavar='', + default=100000, + type=int, + help=('The maximum vertex count of the output mesh ' + 'Default: %(default)s')) + + parser.add_argument('--mesh-octree-depth', + metavar='', + default=9, + type=int, + help=('Oct-tree depth used in the mesh reconstruction, ' + 'increase to get more vertices, recommended ' + 'values are 8-12. Default: %(default)s')) + + parser.add_argument('--mesh-samples', + metavar='= 1.0>', + default=1.0, + type=float, + help=('Number of points per octree node, recommended ' + 'and default value: %(default)s')) + + parser.add_argument('--mesh-solver-divide', + metavar='', + default=9, + type=int, + help=('Oct-tree depth at which the Laplacian equation ' + 'is solved in the surface reconstruction step. ' + 'Increasing this value increases computation ' + 'times slightly but helps reduce memory usage. ' + 'Default: %(default)s')) + + parser.add_argument('--mesh-remove-outliers', + metavar='', + default=2, + type=float, + help=('Percentage of outliers to remove from the point set. Set to 0 to disable. ' + 'Applies to 2.5D mesh only. ' + 'Default: %(default)s')) + + parser.add_argument('--mesh-wlop-iterations', + metavar='', + default=35, + type=int, + help=('Iterations of the Weighted Locally Optimal Projection (WLOP) simplification algorithm. ' + 'Higher values take longer but produce a smoother mesh. ' + 'Applies to 2.5D mesh only. ' + 'Default: %(default)s')) + + parser.add_argument('--texturing-data-term', + metavar='', + default='gmi', + choices=['gmi', 'area'], + help=('Data term: [area, gmi]. Default: ' + '%(default)s')) + + parser.add_argument('--texturing-outlier-removal-type', + metavar='', + default='gauss_clamping', + choices=['none', 'gauss_clamping', 'gauss_damping'], + help=('Type of photometric outlier removal method: ' + '[none, gauss_damping, gauss_clamping]. Default: ' + '%(default)s')) + + parser.add_argument('--texturing-skip-visibility-test', + action='http://github.com/store_true', + default=False, + help=('Skip geometric visibility test. Default: ' + ' %(default)s')) + + parser.add_argument('--texturing-skip-global-seam-leveling', + action='http://github.com/store_true', + default=False, + help=('Skip global seam leveling. Useful for IR data.' + 'Default: %(default)s')) + + parser.add_argument('--texturing-skip-local-seam-leveling', + action='http://github.com/store_true', + default=False, + help='Skip local seam blending. Default: %(default)s') + + parser.add_argument('--texturing-skip-hole-filling', + action='http://github.com/store_true', + default=False, + help=('Skip filling of holes in the mesh. Default: ' + ' %(default)s')) + + parser.add_argument('--texturing-keep-unseen-faces', + action='http://github.com/store_true', + default=False, + help=('Keep faces in the mesh that are not seen in any camera. ' + 'Default: %(default)s')) + + parser.add_argument('--texturing-tone-mapping', + metavar='', + choices=['none', 'gamma'], + default='none', + help='Turn on gamma tone mapping or none for no tone ' + 'mapping. Choices are \'gamma\' or \'none\'. ' + 'Default: %(default)s ') + + parser.add_argument('--gcp', + metavar='', + default=None, + help=('path to the file containing the ground control ' + 'points used for georeferencing. Default: ' + '%(default)s. The file needs to ' + 'be on the following line format: \neasting ' + 'northing height pixelrow pixelcol imagename')) + + parser.add_argument('--use-exif', + action='http://github.com/store_true', + default=False, + help=('Use this tag if you have a gcp_list.txt but ' + 'want to use the exif geotags instead')) + + parser.add_argument('--dtm', + action='http://github.com/store_true', + default=False, + help='Use this tag to build a DTM (Digital Terrain Model, ground only) using a progressive ' + 'morphological filter. Check the --dem* parameters for fine tuning.') + + parser.add_argument('--dsm', + action='http://github.com/store_true', + default=False, + help='Use this tag to build a DSM (Digital Surface Model, ground + objects) using a progressive ' + 'morphological filter. Check the --dem* parameters for fine tuning.') + + parser.add_argument('--dem-gapfill-steps', + metavar='', + default=4, + type=int, + help='Number of steps used to fill areas with gaps. Set to 0 to disable gap filling. ' + 'Starting with a radius equal to the output resolution, N different DEMs are generated with ' + 'progressively bigger radius using the inverse distance weighted (IDW) algorithm ' + 'and merged together. Remaining gaps are then merged using nearest neighbor interpolation. ' + '\nDefault=%(default)s') + + parser.add_argument('--dem-resolution', + metavar='', + type=float, + default=0.1, + help='Length of raster cell edges in meters.' + '\nDefault: %(default)s') + + parser.add_argument('--dem-maxangle', + metavar='', + type=float, + default=20, + help='Points that are more than maxangle degrees off-nadir are discarded. ' + '\nDefault: ' + '%(default)s') + + parser.add_argument('--dem-maxsd', + metavar='', + type=float, + default=2.5, + help='Points that deviate more than maxsd standard deviations from the local mean ' + 'are discarded. \nDefault: ' + '%(default)s') + + parser.add_argument('--dem-initial-distance', + metavar='', + type=float, + default=0.15, + help='Used to classify ground vs non-ground points. Set this value to account for Z noise in meters. ' + 'If you have an uncertainty of around 15 cm, set this value large enough to not exclude these points. ' + 'Too small of a value will exclude valid ground points, while too large of a value will misclassify non-ground points for ground ones. ' + '\nDefault: ' + '%(default)s') + + parser.add_argument('--dem-approximate', + action='http://github.com/store_true', + default=False, + help='Use this tag use the approximate progressive ' + 'morphological filter, which computes DEMs faster ' + 'but is not as accurate.') + + parser.add_argument('--dem-decimation', + metavar='', + default=1, + type=int, + help='Decimate the points before generating the DEM. 1 is no decimation (full quality). ' + '100 decimates ~99%% of the points. Useful for speeding up ' + 'generation.\nDefault=%(default)s') + + parser.add_argument('--dem-terrain-type', + metavar='', + choices=['FlatNonForest', 'FlatForest', 'ComplexNonForest', 'ComplexForest'], + default='ComplexForest', + help='One of: %(choices)s. Specifies the type of terrain. This mainly helps reduce processing time. ' + '\nFlatNonForest: Relatively flat region with little to no vegetation' + '\nFlatForest: Relatively flat region that is forested' + '\nComplexNonForest: Varied terrain with little to no vegetation' + '\nComplexForest: Varied terrain that is forested' + '\nDefault=%(default)s') + + parser.add_argument('--orthophoto-resolution', + metavar=' 0.0>', + default=20.0, + type=float, + help=('Orthophoto ground resolution in pixels/meter' + 'Default: %(default)s')) + + parser.add_argument('--orthophoto-target-srs', + metavar="", + type=str, + default=None, + help='Target spatial reference for orthophoto creation. ' + 'Not implemented yet.\n' + 'Default: %(default)s') + + parser.add_argument('--orthophoto-no-tiled', + action='http://github.com/store_true', + default=False, + help='Set this parameter if you want a stripped geoTIFF.\n' + 'Default: %(default)s') + + parser.add_argument('--orthophoto-compression', + metavar='', + type=str, + choices=['JPEG', 'LZW', 'PACKBITS', 'DEFLATE', 'LZMA', 'NONE'], + default='DEFLATE', + help='Set the compression to use. Note that this could ' + 'break gdal_translate if you don\'t know what you ' + 'are doing. Options: %(choices)s.\nDefault: %(default)s') + + parser.add_argument('--orthophoto-bigtiff', + type=str, + choices=['YES', 'NO','IF_NEEDED','IF_SAFER'], + default='IF_SAFER', + help='Control whether the created orthophoto is a BigTIFF or ' + 'classic TIFF. BigTIFF is a variant for files larger than ' + '4GiB of data. Options are %(choices)s. See GDAL specs: ' + 'https://www.gdal.org/frmt_gtiff.html%20for%20more%20info.' + '\nDefault: %(default)s') + + parser.add_argument('--build-overviews', + action='http://github.com/store_true', + default=False, + help='Build orthophoto overviews using gdaladdo.') + + parser.add_argument('--zip-results', + action='http://github.com/store_true', + default=False, + help='compress the results using gunzip') + + parser.add_argument('--verbose', '-v', + action='http://github.com/store_true', + default=False, + help='Print additional messages to the console\n' + 'Default: %(default)s') + + parser.add_argument('--time', + action='http://github.com/store_true', + default=False, + help='Generates a benchmark file with runtime info\n' + 'Default: %(default)s') + + parser.add_argument('--version', + action='http://github.com/version', + version='OpenDroneMap {0}'.format(__version__), + help='Displays version number and exits. ') + + args = parser.parse_args() + + # check that the project path setting has been set properly + if not args.project_path: + log.ODM_ERROR('You need to set the project path in the ' + 'settings.yaml file before you can run ODM, ' + 'or use `--project-path `. Run `python ' + 'run.py --help` for more information. ') + sys.exit(1) + + return args diff --git a/opendm/context.py b/opendm/context.py new file mode 100644 index 000000000..198b07a8c --- /dev/null +++ b/opendm/context.py @@ -0,0 +1,48 @@ +import os +import sys +from opendm import io +import multiprocessing + +# Define some needed locations +scripts_path = os.path.abspath(os.path.dirname(__file__)) +root_path, _ = os.path.split(scripts_path) + +superbuild_path = os.path.join(root_path, 'SuperBuild') +superbuild_bin_path = os.path.join(superbuild_path, 'install', 'bin') +tests_path = os.path.join(root_path, 'tests') +tests_data_path = os.path.join(root_path, 'tests/test_data') + +# add opencv to python path +pyopencv_path = os.path.join(superbuild_path, 'install/lib/python2.7/dist-packages') +sys.path.append(pyopencv_path) + +# define opensfm path +opensfm_path = os.path.join(superbuild_path, "src/opensfm") +ccd_widths_path = os.path.join(opensfm_path, 'opensfm/data/sensor_data.json') + +# define orb_slam2 path +orb_slam2_path = os.path.join(superbuild_path, "src/orb_slam2") + +# define pmvs path +cmvs_path = os.path.join(superbuild_path, "install/bin/cmvs") +cmvs_opts_path = os.path.join(superbuild_path, "install/bin/genOption") +pmvs2_path = os.path.join(superbuild_path, "install/bin/pmvs2") + +# define mvstex path +mvstex_path = os.path.join(superbuild_path, "install/bin/texrecon") + +# define txt2las path +txt2las_path = os.path.join(superbuild_path, 'src/las-tools/bin') +pdal_path = os.path.join(superbuild_path, 'build/pdal/bin') + +# define odm modules path +odm_modules_path = os.path.join(root_path, "build/bin") +odm_modules_src_path = os.path.join(root_path, "modules") + +settings_path = os.path.join(root_path, 'settings.yaml') + +# Define supported image extensions +supported_extensions = {'.jpg','.jpeg','.png'} + +# Define the number of cores +num_cores = multiprocessing.cpu_count() diff --git a/opendm/io.py b/opendm/io.py new file mode 100644 index 000000000..fa566c440 --- /dev/null +++ b/opendm/io.py @@ -0,0 +1,42 @@ +import os +import shutil, errno + + +def get_files_list(path_dir): + return os.listdir(path_dir) + + +def absolute_path_file(path_file): + return os.path.abspath(path_file) + + +def extract_file_from_path_file(path_file): + path, file = os.path.split(path_file) + return file + + +def extract_path_from_file(file): + path_file = os.path.abspath(os.path.dirname(file)) + path, file = os.path.split(path_file) + return path + + +def join_paths(path1, path2): + return os.path.join(path1, path2) + + +def file_exists(path_file): + return os.path.isfile(path_file) + + +def dir_exists(dirname): + return os.path.isdir(dirname) + + +def copy(src, dst): + try: + shutil.copytree(src, dst) + except OSError as e: + if e.errno == errno.ENOTDIR: + shutil.copy(src, dst) + else: raise diff --git a/opendm/log.py b/opendm/log.py new file mode 100644 index 000000000..0261267d5 --- /dev/null +++ b/opendm/log.py @@ -0,0 +1,25 @@ +import logging + +HEADER = '\033[95m' +OKBLUE = '\033[94m' +OKGREEN = '\033[92m' +WARNING = '\033[93m' +FAIL = '\033[91m' +ENDC = '\033[0m' + +# TODO add file handling + +logging.addLevelName(logging.INFO, '%s[%s]' % (OKBLUE, logging.getLevelName(logging.INFO))) +logging.addLevelName(logging.WARNING, '%s[%s]' % (WARNING, logging.getLevelName(logging.WARNING))) +logging.addLevelName(logging.ERROR, '%s[%s]' % (FAIL, logging.getLevelName(logging.ERROR))) +logging.addLevelName(logging.DEBUG, '%s[%s]' % (OKGREEN, logging.getLevelName(logging.DEBUG))) + +logging.basicConfig(level=logging.DEBUG, + format='%(levelname)-14s %(message)s' + ENDC) + + +ODM_INFO = logging.info +ODM_WARNING = logging.warning +ODM_ERROR = logging.error +ODM_EXCEPTION = logging.exception +ODM_DEBUG = logging.debug diff --git a/opendm/system.py b/opendm/system.py new file mode 100644 index 000000000..740246d00 --- /dev/null +++ b/opendm/system.py @@ -0,0 +1,79 @@ +import os +import errno +import json +import datetime +import sys +import subprocess +import string + +from opendm import context +from opendm import log + + +def get_ccd_widths(): + """Return the CCD Width of the camera listed in the JSON defs file.""" + with open(context.ccd_widths_path) as f: + sensor_data = json.loads(f.read()) + return dict(zip(map(string.lower, sensor_data.keys()), sensor_data.values())) + + +def run(cmd, env_paths=[]): + """Run a system command""" + log.ODM_DEBUG('running %s' % cmd) + + env = None + if len(env_paths) > 0: + env = os.environ.copy() + env["PATH"] = env["PATH"] + ":" + ":".join(env_paths) + + retcode = subprocess.call(cmd, shell=True, env=env) + + if retcode < 0: + raise Exception("Child was terminated by signal {}".format(-retcode)) + elif retcode > 0: + raise Exception("Child returned {}".format(retcode)) + + +def now(): + """Return the current time""" + return datetime.datetime.now().strftime('%a %b %d %H:%M:%S %Z %Y') + + +def now_raw(): + return datetime.datetime.now() + + +def benchmark(start, benchmarking_file, process): + """ + runs a benchmark with a start datetime object + :return: the running time (delta) + """ + # Write to benchmark file + delta = (datetime.datetime.now() - start).total_seconds() + with open(benchmarking_file, 'a') as b: + b.write('%s runtime: %s seconds\n' % (process, delta)) + + +def run_and_return(cmdSrc, cmdDest=None): + """Run a system command and return the output""" + process = subprocess.Popen(cmdSrc, stdout=subprocess.PIPE, shell=True) + stdout, stderr = process.communicate() + return stdout.decode('ascii') + + +def mkdir_p(path): + """Make a directory including parent directories. + """ + try: + os.makedirs(path) + except os.error as exc: + if exc.errno != errno.EEXIST or not os.path.isdir(path): + raise + + +def calculate_EPSG(utmZone, south): + """Calculate and return the EPSG""" + if south: + return 32700 + utmZone + else: + return 32600 + utmZone diff --git a/opendm/tasks.py b/opendm/tasks.py new file mode 100644 index 000000000..91135ce72 --- /dev/null +++ b/opendm/tasks.py @@ -0,0 +1,120 @@ +import log +import system + +import dataset +import types + +from scripts.opensfm import opensfm + +# Define pipeline tasks +tasks_dict = {'1': 'opensfm', + '2': 'cmvs', + '3': 'pmvs', + '4': 'odm_meshing', + '5': 'mvs_texturing', + '6': 'odm_georeferencing', + '7': 'odm_dem', + '8': 'odm_orthophoto', + '9': 'zip_results'} + + +class ODMTaskManager(object): + """docstring for ODMTaskManager""" + + def __init__(self, odm_app): + self.odm_app = odm_app + self.initial_task_id = 0 + self.current_task_id = 0 + self.final_task_id = len(tasks_dict) + self.tasks = self.init_tasks(tasks_dict, self.odm_app) + + def init_tasks(self, _tasks_dict, _odm_app): + # dict to store tasks objects + tasks = {} + # loop over tasks dict + for key, in _tasks_dict: + # instantiate and append ODMTask + task_name = _tasks_dict[key] + tasks[key] = ODMTask(key, task_name) + + # setup tasks + if task_name == 'resize': + # setup this task + command = resize + inputs = {'project_path': _odm_app.project_path, + 'args': _odm_app.args, + 'photos': _odm_app.photos} + + elif task_name == 'opensfm': + # setup this task + command = opensfm + inputs = {'project_path': _odm_app.project_path, + 'args': _odm_app.args, + 'photos': _odm_app.photos} + + elif task_name in ['cmvs', 'pmvs', 'odm_meshing', 'mvs_texturing', 'odm_georeferencing', 'odm_orthophoto', 'zip_results']: + # setup this task + command = None + inputs = {} + + else: + log.ODM_ERROR('task_name %s is not valid' % task_name) + + # setup task configuration + task = tasks[key] + task.command = command + task.inputs = inputs + + return tasks + + def run_tasks(self): + + # curr_task = self.tasks['resize'] + + # self.tasks['resize'] + + for id in range(self.initial_task_id, self.final_task_id + 1): + # catch task with current id + task = self.tasks[str(id)] + # update task tracking + log.ODM_INFO('Running task %s: %s' % (task.id, task.name)) + self.current_task_id = task.id + # run task + task.state = task.run() + if task.state == 2: + log.ODM_INFO('Succeeded task %s: %s - %s' % (task.id, task.name, system.now())) + else: + log.ODM_ERROR('Aborted task %s: %s' % (task.id, task.name)) + + +class ODMTask(object): + """docstring for ODMTask""" + + def __init__(self, id, name): + # task definition + self.id = id + self.name = name + # task i/o + self.command = None + self.inputs = {} + # Current task state (0:waiting, 1:running, 2:succeded: 3:failed) + # By default we set a task in waiting state + self.state = 0 + + # Launch task + def run(self): + # while doing something + self.state = 1 + return self.launch_command() + + def launch_command(self): + if self.command is None: + log.ODM_ERROR('Call method for task %s not defined' % self.name) + return 3 # failed + # run conmmand + try: + succeed = self.command(**self.inputs) + return 2 if succeed else 3 # 2:succeed, 3:failed + except Exception, e: + log.ODM_ERROR(str(e)) + return 3 # failed diff --git a/opendm/types.py b/opendm/types.py new file mode 100644 index 000000000..a981c93da --- /dev/null +++ b/opendm/types.py @@ -0,0 +1,441 @@ +import cv2 +import pyexiv2 +import re +from fractions import Fraction +from opensfm.exif import sensor_string + +import log +import io +import system +import context + + +class ODM_Photo: + """ ODMPhoto - a class for ODMPhotos + """ + + def __init__(self, path_file, force_focal, force_ccd): + # general purpose + self.path_file = path_file + self.filename = io.extract_file_from_path_file(path_file) + # useful attibutes + self.width = None + self.height = None + self.ccd_width = None + self.focal_length = None + self.focal_length_px = None + # other attributes + self.camera_make = '' + self.camera_model = '' + self.make_model = '' + self.latitude = None + self.longitude = None + self.altitude = None + # parse values from metadata + self.parse_pyexiv2_values(self.path_file, force_focal, force_ccd) + # compute focal length into pixels + self.update_focal() + + # print log message + log.ODM_DEBUG('Loaded {} | camera: {} | dimensions: {} x {} | focal: {} | ccd: {} | lat: {} | lon: {} | alt: {}' + .format(self.filename, self.make_model, self.width, self.height, self.focal_length, + self.ccd_width, self.latitude, self.longitude, self.altitude)) + + def update_focal(self): + # compute focal length in pixels + if self.focal_length and self.ccd_width: + # take width or height as reference + if self.width > self.height: + # f(px) = w(px) * f(mm) / ccd(mm) + self.focal_length_px = \ + self.width * (self.focal_length / self.ccd_width) + else: + # f(px) = h(px) * f(mm) / ccd(mm) + self.focal_length_px = \ + self.height * (self.focal_length / self.ccd_width) + + def parse_pyexiv2_values(self, _path_file, _force_focal, _force_ccd): + # read image metadata + metadata = pyexiv2.ImageMetadata(_path_file) + metadata.read() + # loop over image tags + for key in metadata: + # try/catch tag value due to weird bug in pyexiv2 + # ValueError: invalid literal for int() with base 10: '' + GPS = 'Exif.GPSInfo.GPS' + try: + # parse tag names + if key == 'Exif.Image.Make': + self.camera_make = metadata[key].value + elif key == 'Exif.Image.Model': + self.camera_model = metadata[key].value + elif key == 'Exif.Photo.FocalLength': + self.focal_length = float(metadata[key].value) + elif key == GPS + 'Latitude': + self.latitude = self.dms_to_decimal(*metadata[key].value + + [metadata[GPS + 'LatitudeRef'].value]) + elif key == GPS + 'Longitude': + self.longitude = self.dms_to_decimal(*metadata[key].value + + [metadata[GPS + 'LongitudeRef'].value]) + elif key == GPS + 'Altitude': + self.altitude = float(metadata[key].value) + if metadata[GPS + 'AltitudeRef'] and int(metadata[GPS + 'AltitudeRef'].value) > 0: + self.altitude *= -1. + except (pyexiv2.ExifValueError, ValueError) as e: + pass + except KeyError as e: + log.ODM_DEBUG('Tag not set') + except NotImplementedError as e: + pass + + if self.camera_make and self.camera_model: + self.make_model = sensor_string(self.camera_make, self.camera_model) + + # needed to do that since sometimes metadata contains wrong data + img = cv2.imread(_path_file) + self.width = img.shape[1] + self.height = img.shape[0] + + # force focal and ccd_width with user parameter + if _force_focal: + self.focal_length = _force_focal + if _force_ccd: + self.ccd_width = _force_ccd + + # find ccd_width from file if needed + if self.ccd_width is None and self.camera_model is not None: + # load ccd_widths from file + ccd_widths = system.get_ccd_widths() + # search ccd by camera model + key = [x for x in ccd_widths.keys() if self.make_model in x] + # convert to float if found + if key: + self.ccd_width = float(ccd_widths[key[0]]) + else: + log.ODM_WARNING('Could not find ccd_width in file. Use --force-ccd or edit the sensor_data.json ' + 'file to manually input ccd width') + + def dms_to_decimal(self, degrees, minutes, seconds, sign=' '): + """Converts dms coords to decimal degrees""" + return (-1 if sign[0] in 'SWsw' else 1) * ( + float(degrees) + + float(minutes) / 60 + + float(seconds) / 3600 + ) + + +# TODO: finish this class +class ODM_Reconstruction(object): + """docstring for ODMReconstruction""" + + def __init__(self, arg): + super(ODMReconstruction, self).__init__() + self.arg = arg + + +class ODM_GCPoint(object): + """docstring for ODMPoint""" + + def __init__(self, x, y, z): + self.x = x + self.y = y + self.z = z + + +class ODM_GeoRef(object): + """docstring for ODMUtmZone""" + + def __init__(self): + self.datum = 'WGS84' + self.epsg = None + self.utm_zone = 0 + self.utm_pole = 'N' + self.utm_east_offset = 0 + self.utm_north_offset = 0 + self.gcps = [] + + def calculate_EPSG(self, _utm_zone, _pole): + """Calculate and return the EPSG""" + if _pole == 'S': + return 32700 + _utm_zone + elif _pole == 'N': + return 32600 + _utm_zone + else: + log.ODM_ERROR('Unknown pole format %s' % _pole) + return + + def coord_to_fractions(self, coord, refs): + deg_dec = abs(float(coord)) + deg = int(deg_dec) + minute_dec = (deg_dec - deg) * 60 + minute = int(minute_dec) + + sec_dec = (minute_dec - minute) * 60 + sec_dec = round(sec_dec, 3) + sec_denominator = 1000 + sec_numerator = int(sec_dec * sec_denominator) + if float(coord) >= 0: + latRef = refs[0] + else: + latRef = refs[1] + + output = str(deg) + '/1 ' + str(minute) + '/1 ' + str(sec_numerator) + '/' + str(sec_denominator) + return output, latRef + + def convert_to_las(self, _file, _file_out, json_file): + + if not self.epsg: + log.ODM_ERROR('Empty EPSG: Could not convert to LAS') + return + + kwargs = {'bin': context.pdal_path, + 'f_in': _file, + 'f_out': _file_out, + 'east': self.utm_east_offset, + 'north': self.utm_north_offset, + 'epsg': self.epsg, + 'json': json_file} + + # create pipeline file transform.xml to enable transformation + pipeline = '{{' \ + ' "pipeline":[' \ + ' "untransformed.ply",' \ + ' {{' \ + ' "type":"filters.transformation",' \ + ' "matrix":"1 0 0 {east} 0 1 0 {north} 0 0 1 0 0 0 0 1"' \ + ' }},' \ + ' {{' \ + ' "a_srs":"EPSG:{epsg}",' \ + ' "offset_x":"{east}",' \ + ' "offset_y":"{north}",' \ + ' "offset_z":"0",' \ + ' "filename":"transformed.las"' \ + ' }}' \ + ' ]' \ + '}}'.format(**kwargs) + + with open(json_file, 'w') as f: + f.write(pipeline) + + # call pdal + system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in} ' + '--writers.las.filename={f_out}'.format(**kwargs)) + + def utm_to_latlon(self, _file, _photo, idx): + + gcp = self.gcps[idx] + + kwargs = {'epsg': self.epsg, + 'file': _file, + 'x': gcp.x + self.utm_east_offset, + 'y': gcp.y + self.utm_north_offset, + 'z': gcp.z} + + latlon = system.run_and_return('echo {x} {y} {z} '.format(**kwargs), + 'gdaltransform -s_srs \"EPSG:{epsg}\" ' + '-t_srs \"EPSG:4326\"'.format(**kwargs)).split() + + # Example: 83d18'16.285"W + # Example: 41d2'11.789"N + # Example: 0.998 + + if len(latlon) == 3: + lon_str, lat_str, alt_str = latlon + elif len(latlon) == 2: + lon_str, lat_str = latlon + alt_str = '' + else: + log.ODM_ERROR('Something went wrong %s' % latlon) + + lat_frac = self.coord_to_fractions(latlon[1], ['N', 'S']) + lon_frac = self.coord_to_fractions(latlon[0], ['E', 'W']) + + # read image metadata + metadata = pyexiv2.ImageMetadata(_photo.path_file) + metadata.read() + + # #set values + # + # # GPS latitude + # key = 'Exif.GPSInfo.GPSLatitude' + # value = lat_frac[0].split(' ') + # log.ODM_DEBUG('lat_frac: %s %s %s' % (value[0], value[1], value[2])) + # metadata[key] = pyexiv2.ExifTag(key, + # [Fraction(value[0]), + # Fraction(value[1]), + # Fraction(value[2])]) + # + # key = 'Exif.GPSInfo.GPSLatitudeRef' + # value = lat_frac[1] + # metadata[key] = pyexiv2.ExifTag(key, value) + # + # # GPS longitude + # key = 'Exif.GPSInfo.GPSLongitude' + # value = lon_frac[0].split(' ') + # metadata[key] = pyexiv2.ExifTag(key, + # [Fraction(value[0]), + # Fraction(value[1]), + # Fraction(value[2])]) + # + # key = 'Exif.GPSInfo.GPSLongitudeRef' + # value = lon_frac[1] + # metadata[key] = pyexiv2.ExifTag(key, value) + # + # # GPS altitude + # altitude = abs(int(float(latlon[2]) * 100)) + # key = 'Exif.GPSInfo.GPSAltitude' + # value = Fraction(altitude, 1) + # metadata[key] = pyexiv2.ExifTag(key, value) + # + # if latlon[2] >= 0: + # altref = '0' + # else: + # altref = '1' + # key = 'Exif.GPSInfo.GPSAltitudeRef' + # metadata[key] = pyexiv2.ExifTag(key, altref) + # + # # write values + # metadata.write() + + def parse_coordinate_system(self, _file): + """Write attributes to jobOptions from coord file""" + # check for coordinate file existence + if not io.file_exists(_file): + log.ODM_ERROR('Could not find file %s' % _file) + return + + with open(_file) as f: + # extract reference system and utm zone from first line. + # We will assume the following format: + # 'WGS84 UTM 17N' or 'WGS84 UTM 17N \n' + line = f.readline().rstrip() + log.ODM_DEBUG('Line: %s' % line) + ref = line.split(' ') + # match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I) + if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm: + self.datum = ref[0] + self.utm_pole = ref[2][len(ref[2]) - 1] + self.utm_zone = int(ref[2][:len(ref[2]) - 1]) + # extract east and west offsets from second line. + # We will assume the following format: + # '440143 4588391' + # update EPSG + self.epsg = self.calculate_EPSG(self.utm_zone, self.utm_pole) + # If the first line looks like "EPSG:n" or "epsg:n" + elif ref[0].split(':')[0].lower() == 'epsg': + self.epsg = line.split(':')[1] + else: + log.ODM_ERROR('Could not parse coordinates. Bad CRS supplied: %s' % line) + return + + offsets = f.readline().split(' ') + self.utm_east_offset = int(offsets[0]) + self.utm_north_offset = int(offsets[1]) + + # parse coordinates + lines = f.readlines() + for l in lines: + xyz = l.split(' ') + if len(xyz) == 3: + x, y, z = xyz[:3] + elif len(xyz) == 2: + x, y = xyz[:2] + z = 0 + self.gcps.append(ODM_GCPoint(float(x), float(y), float(z))) + # Write to json file + + +class ODM_Tree(object): + def __init__(self, root_path, images_path): + # root path to the project + self.root_path = io.absolute_path_file(root_path) + if not images_path: + self.input_images = io.join_paths(self.root_path, 'images') + else: + self.input_images = io.absolute_path_file(images_path) + + # modules paths + + # here are defined where all modules should be located in + # order to keep track all files al directories during the + # whole reconstruction process. + self.dataset_raw = io.join_paths(self.root_path, 'images') + self.opensfm = io.join_paths(self.root_path, 'opensfm') + self.pmvs = io.join_paths(self.root_path, 'pmvs') + self.odm_meshing = io.join_paths(self.root_path, 'odm_meshing') + self.odm_texturing = io.join_paths(self.root_path, 'odm_texturing') + self.odm_25dtexturing = io.join_paths(self.root_path, 'odm_texturing_25d') + self.odm_georeferencing = io.join_paths(self.root_path, 'odm_georeferencing') + self.odm_25dgeoreferencing = io.join_paths(self.root_path, 'odm_25dgeoreferencing') + self.odm_orthophoto = io.join_paths(self.root_path, 'odm_orthophoto') + self.odm_pdal = io.join_paths(self.root_path, 'pdal') + + # important files paths + + # benchmarking + self.benchmarking = io.join_paths(self.root_path, 'benchmark.txt') + + # opensfm + self.opensfm_tracks = io.join_paths(self.opensfm, 'tracks.csv') + self.opensfm_bundle = io.join_paths(self.opensfm, 'bundle_r000.out') + self.opensfm_bundle_list = io.join_paths(self.opensfm, 'list_r000.out') + self.opensfm_image_list = io.join_paths(self.opensfm, 'image_list.txt') + self.opensfm_reconstruction = io.join_paths(self.opensfm, 'reconstruction.json') + self.opensfm_reconstruction_meshed = io.join_paths(self.opensfm, 'reconstruction.meshed.json') + self.opensfm_reconstruction_nvm = io.join_paths(self.opensfm, 'reconstruction.nvm') + self.opensfm_model = io.join_paths(self.opensfm, 'depthmaps/merged.ply') + + # pmvs + self.pmvs_rec_path = io.join_paths(self.pmvs, 'recon0') + self.pmvs_bundle = io.join_paths(self.pmvs_rec_path, 'bundle.rd.out') + self.pmvs_visdat = io.join_paths(self.pmvs_rec_path, 'vis.dat') + self.pmvs_options = io.join_paths(self.pmvs_rec_path, 'pmvs_options.txt') + self.pmvs_model = io.join_paths(self.pmvs_rec_path, 'models/option-0000.ply') + + # odm_meshing + self.odm_mesh = io.join_paths(self.odm_meshing, 'odm_mesh.ply') + self.odm_meshing_log = io.join_paths(self.odm_meshing, 'odm_meshing_log.txt') + self.odm_25dmesh = io.join_paths(self.odm_meshing, 'odm_25dmesh.ply') + self.odm_25dmeshing_log = io.join_paths(self.odm_meshing, 'odm_25dmeshing_log.txt') + + # texturing + self.odm_texturing_undistorted_image_path = io.join_paths( + self.odm_texturing, 'undistorted') + self.odm_textured_model_obj = 'odm_textured_model.obj' + self.odm_textured_model_mtl = 'odm_textured_model.mtl' + # Log is only used by old odm_texturing + self.odm_texuring_log = 'odm_texturing_log.txt' + + # odm_georeferencing + self.odm_georeferencing_latlon = io.join_paths( + self.odm_georeferencing, 'latlon.txt') + self.odm_georeferencing_coords = io.join_paths( + self.odm_georeferencing, 'coords.txt') + self.odm_georeferencing_gcp = io.join_paths( + self.odm_georeferencing, 'gcp_list.txt') + self.odm_georeferencing_utm_log = io.join_paths( + self.odm_georeferencing, 'odm_georeferencing_utm_log.txt') + self.odm_georeferencing_log = 'odm_georeferencing_log.txt' + self.odm_georeferencing_transform_file = 'odm_georeferencing_transform.txt' + self.odm_georeferencing_model_txt_geo = 'odm_georeferencing_model_geo.txt' + self.odm_georeferencing_model_ply_geo = 'odm_georeferenced_model.ply' + self.odm_georeferencing_model_obj_geo = 'odm_textured_model_geo.obj' + self.odm_georeferencing_xyz_file = io.join_paths( + self.odm_georeferencing, 'odm_georeferenced_model.csv') + self.odm_georeferencing_las_json = io.join_paths( + self.odm_georeferencing, 'las.json') + self.odm_georeferencing_model_las = io.join_paths( + self.odm_georeferencing, 'odm_georeferenced_model.las') + self.odm_georeferencing_dem = io.join_paths( + self.odm_georeferencing, 'odm_georeferencing_model_dem.tif') + + # odm_orthophoto + self.odm_orthophoto_file = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.png') + self.odm_orthophoto_tif = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.tif') + self.odm_orthophoto_corners = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_corners.txt') + self.odm_orthophoto_log = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_log.txt') + self.odm_orthophoto_tif_log = io.join_paths(self.odm_orthophoto, 'gdal_translate_log.txt') + self.odm_orthophoto_gdaladdo_log = io.join_paths(self.odm_orthophoto, 'gdaladdo_log.txt') + + def path(self, *args): + return io.join_paths(self.root_path, *args) diff --git a/patched_files/src/bundler/src/KeyMatch.cpp b/patched_files/src/bundler/src/KeyMatch.cpp deleted file mode 100644 index 87017b945..000000000 --- a/patched_files/src/bundler/src/KeyMatch.cpp +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2008-2010 Noah Snavely (snavely (at) cs.cornell.edu) - * and the University of Washington - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - */ - -/* KeyMatch.cpp */ -/* Read in keys, match, write results to a file */ - -#include -#include -#include - -#include "keys2a.h" - -int main(int argc, char **argv) { - char *keys1_in; - char *keys2_in; - char *file_out; - double ratio, threshold; - - if (argc != 6) { - printf("Usage: %s \n", argv[0]); - return -1; - } - - keys1_in = argv[1]; - keys2_in = argv[2]; - file_out = argv[3]; - - ratio = atof(argv[4]); - threshold = atof(argv[5]); - - clock_t start = clock(); - - unsigned char *keys1, *keys2; - - int num1 = ReadKeyFile(keys1_in, &keys1); - int num2 = ReadKeyFile(keys2_in, &keys2); - - - /* Compute likely matches between two sets of keypoints */ - std::vector matches = - MatchKeys(num1, keys1, num2, keys2, ratio); - -#if 0 - std::vector matches_sym = - MatchKeys(num2, keys2, num1, keys1); -#endif - - int num_matches = (int) matches.size(); - // int num_matches_sym = (int) matches_sym.size(); - - // printf("num_matches = %d\n", num_matches); - // printf("num_matches_sym = %d\n", num_matches_sym); - -#if 0 - /* Prune asymmetric matches */ - for (int i = 0; i < num_matches; i++) { - int idx1 = matches[i].m_idx1; - int idx2 = matches[i].m_idx2; - - for (int j = 0; j < num_matches_sym; j++) { - if (matches_sym[j].m_idx1 == idx2) { - if (matches_sym[j].m_idx2 != idx1) { - matches.erase(matches.begin() + i); - i--; - num_matches--; - } - - break; - } - } - } -#endif - - clock_t end = clock(); - - int m = (num1 < num2 ? num1 : num2); - float r = ((float)num_matches * 100 / m); - - bool used = false; - - if (num_matches >= 16 && r > threshold) { - used = true; - - FILE *f = fopen(file_out, "w"); - - /* Write the number of matches */ - fprintf(f, "%d\n", (int) matches.size()); - - for (int i = 0; i < num_matches; i++) { - fprintf(f, "%d %d\n", matches[i].m_idx1, matches[i].m_idx2); - } - - fclose(f); - } - - - if(used) printf("\n%8d matches (%4.1f%%) took %5.2fs for %s\t", - num_matches, - r, - (end - start) / ((double) CLOCKS_PER_SEC), - file_out); - - -/* printf("%8d matches = %5.2f%% %d in %6.3fs in %s\n", - num_matches, - r, - m, - (end - start) / ((double) CLOCKS_PER_SEC), - file_out); - */ -} diff --git a/patched_files/src/bundler/src/Makefile b/patched_files/src/bundler/src/Makefile deleted file mode 100644 index f7aaf13ab..000000000 --- a/patched_files/src/bundler/src/Makefile +++ /dev/null @@ -1,82 +0,0 @@ -# Makefile for bundler - -CC=gcc -OPTFLAGS=-O3 -Wall - -OS=$(shell uname -o) - -ifeq ($(OS), Cygwin) -BUNDLER=bundler.exe -KEYMATCHFULL=KeyMatchFull.exe -KEYMATCH=KeyMatch.exe -BUNDLE2PMVS=Bundle2PMVS.exe -BUNDLE2VIS=Bundle2Vis.exe -RADIALUNDISTORT=RadialUndistort.exe -else -BUNDLER=bundler -KEYMATCHFULL=KeyMatchFull -KEYMATCH=KeyMatch -BUNDLE2PMVS=Bundle2PMVS -BUNDLE2VIS=Bundle2Vis -RADIALUNDISTORT=RadialUndistort -endif - -INCLUDE_PATH=-I../lib/imagelib -I../lib/sfm-driver -I../lib/matrix \ - -I../lib/5point -I../lib/sba-1.5 -I../lib/ann_1.1_char/include - -LIB_PATH=-L../lib -L../lib/ann_1.1_char/lib - -CPPFLAGS=$(OPTFLAGS) $(OTHERFLAGS) $(INCLUDE_PATH) $(DEFINES) - -BUNDLER_DEFINES=-D__NO_UI__ -D__BUNDLER__ -D__BUNDLER_DISTR__ - -BUNDLER_OBJS=BaseApp.o BundlerApp.o keys.o Register.o Epipolar.o \ - Bundle.o BundleFast.o MatchTracks.o Camera.o Geometry.o \ - ImageData.o SifterUtil.o BaseGeometry.o BundlerGeometry.o \ - BoundingBox.o BundleAdd.o ComputeTracks.o BruteForceSearch.o \ - BundleIO.o ProcessBundle.o BundleTwo.o Decompose.o \ - RelativePose.o Distortion.o TwoFrameModel.o LoadJPEG.o - -BUNDLER_LIBS=-limage -lsfmdrv -lsba.v1.5 -lmatrix -lz -llapack -lblas \ - -lcblas -lminpack -lm -l5point -ljpeg -lANN_char -lgfortran - - -all: $(BUNDLER) $(KEYMATCHFULL) $(KEYMATCH) $(BUNDLE2PMVS) $(BUNDLE2VIS) $(RADIALUNDISTORT) - -%.o : %.cpp - $(CXX) -c -o $@ $(CPPFLAGS) $(WXFLAGS) $(BUNDLER_DEFINES) $< - -$(BUNDLER): $(BUNDLER_OBJS) - $(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) \ - $(BUNDLER_DEFINES) $(BUNDLER_OBJS) $(BUNDLER_LIBS) - cp $@ ../bin - -$(KEYMATCHFULL): KeyMatchFull.o keys2a.o - $(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) KeyMatchFull.o keys2a.o \ - -lANN_char -lz - cp $@ ../bin - -$(KEYMATCH): KeyMatch.o keys2a.o - $(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) KeyMatch.o keys2a.o \ - -lANN_char -lz - cp $@ ../bin - -$(BUNDLE2PMVS): Bundle2PMVS.o LoadJPEG.o - $(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) Bundle2PMVS.o LoadJPEG.o \ - -limage -lmatrix -llapack -lblas -lcblas -lgfortran \ - -lminpack -ljpeg - cp $@ ../bin - -$(BUNDLE2VIS): Bundle2Vis.o - $(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) Bundle2Vis.o - cp $@ ../bin - -$(RADIALUNDISTORT): RadialUndistort.o LoadJPEG.o - $(CXX) -o $@ $(CPPFLAGS) $(LIB_PATH) $^ \ - -limage -lmatrix -llapack -lblas -lcblas -lgfortran \ - -lminpack -ljpeg - cp $@ ../bin - -clean: - rm -f *.o *~ $(BUNDLER) $(KEYMATCHFULL) $(KEYMATCH) $(BUNDLE2PMVS) \ - $(BUNDLE2VIS) $(RADIALUNDISTORT) diff --git a/patched_files/src/bundler/src/keys2a.cpp b/patched_files/src/bundler/src/keys2a.cpp deleted file mode 100644 index ff75700d9..000000000 --- a/patched_files/src/bundler/src/keys2a.cpp +++ /dev/null @@ -1,465 +0,0 @@ -/* -* Copyright (c) 2008-2010 Noah Snavely (snavely (at) cs.cornell.edu) -* and the University of Washington -* -* This program is free software; you can redistribute it and/or modify -* it under the terms of the GNU General Public License as published by -* the Free Software Foundation; either version 2 of the License, or -* (at your option) any later version. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -* GNU General Public License for more details. -* -*/ - -/* keys2.cpp */ -/* Class for SIFT keypoints */ - -#include -#include -#include -#include -#include -#include - -#include - -#include "keys2a.h" - -int GetNumberOfKeysNormal(FILE *fp) -{ - int num, len; - - if (fscanf(fp, "%d %d", &num, &len) != 2) { - printf("Invalid keypoint file.\n"); - return 0; - } - - return num; -} - -int GetNumberOfKeysGzip(gzFile fp) -{ - int num, len; - - char header[256]; - gzgets(fp, header, 256); - - if (sscanf(header, "%d %d", &num, &len) != 2) { - printf("Invalid keypoint file.\n"); - return 0; - } - - return num; -} - -/* Returns the number of keys in a file */ -int GetNumberOfKeys(const char *filename) -{ - FILE *file; - - file = fopen (filename, "r"); - if (! file) { - /* Try to file a gzipped keyfile */ - char buf[1024]; - sprintf(buf, "%s.gz", filename); - gzFile gzf = gzopen(buf, "rb"); - - if (gzf == NULL) { - printf("Could not open file: %s\n", filename); - return 0; - } else { - int n = GetNumberOfKeysGzip(gzf); - gzclose(gzf); - return n; - } - } - - int n = GetNumberOfKeysNormal(file); - fclose(file); - return n; -} - -/* This reads a keypoint file from a given filename and returns the list -* of keypoints. */ -int ReadKeyFile(const char *filename, unsigned char **keys, keypt_t **info) -{ - FILE *file; - - file = fopen (filename, "r"); - if (! file) { - /* Try to file a gzipped keyfile */ - char buf[1024]; - sprintf(buf, "%s.bin", filename); - FILE *binfile = fopen(buf, "rb"); - - if (binfile == NULL) { - printf("Could not open file: %s\n", filename); - return 0; - } else { - int n = ReadKeysBin(binfile, keys, info); - fclose(binfile); - return n; - } - } - - int n = ReadKeys(file, keys, info); - fclose(file); - return n; - - // return ReadKeysMMAP(file); -} - -#if 0 -/* Read keys using MMAP to speed things up */ -std::vector ReadKeysMMAP(FILE *fp) -{ - int i, j, num, len, val, n; - - std::vector kps; - - struct stat sb; - - /* Stat the file */ - if (fstat(fileno(fp), &sb) < 0) { - printf("[ReadKeysMMAP] Error: could not stat file\n"); - return kps; - } - - char *file = (char *)mmap(NULL, sb.st_size, PROT_READ, MAP_SHARED, - fileno(fp), 0); - - char *file_start = file; - - if (sscanf(file, "%d %d%n", &num, &len, &n) != 2) { - printf("[ReadKeysMMAP] Invalid keypoint file beginning."); - return kps; - } - - file += n; - - if (len != 128) { - printf("[ReadKeysMMAP] Keypoint descriptor length invalid " - "(should be 128)."); - return kps; - } - - for (i = 0; i < num; i++) { - /* Allocate memory for the keypoint. */ - unsigned char *d = new unsigned char[len]; - float x, y, scale, ori; - - if (sscanf(file, "%f %f %f %f%n", &y, &x, &scale, &ori, &n) != 4) { - printf("[ReadKeysMMAP] Invalid keypoint file format."); - return kps; - } - - file += n; - - for (j = 0; j < len; j++) { - if (sscanf(file, "%d%n", &val, &n) != 1 || val < 0 || val > 255) { - printf("[ReadKeysMMAP] Invalid keypoint file value."); - return kps; - } - d[j] = (unsigned char) val; - file += n; - } - - kps.push_back(new Keypoint(x, y, scale, ori, d)); - } - - /* Unmap */ - if (munmap(file_start, sb.st_size) < 0) { - printf("[ReadKeysMMAP] Error: could not unmap memory\n"); - return kps; - } - - return kps; -} -#endif - -/* Read keypoints from the given file pointer and return the list of -* keypoints. The file format starts with 2 integers giving the total -* number of keypoints and the size of descriptor vector for each -* keypoint (currently assumed to be 128). Then each keypoint is -* specified by 4 floating point numbers giving subpixel row and -* column location, scale, and orientation (in radians from -PI to -* PI). Then the descriptor vector for each keypoint is given as a -* list of integers in range [0,255]. */ -int ReadKeys(FILE *fp, unsigned char **keys, keypt_t **info) -{ - int i, num, len; - - std::vector kps; - - if (fscanf(fp, "%d %d", &num, &len) != 2) { - printf("Invalid keypoint file\n"); - return 0; - } - - if (len != 128) { - printf("Keypoint descriptor length invalid (should be 128)."); - return 0; - } - - *keys = new unsigned char[128 * num + 8]; - - if (info != NULL) - *info = new keypt_t[num]; - - unsigned char *p = *keys; - for (i = 0; i < num; i++) { - /* Allocate memory for the keypoint. */ - // short int *d = new short int[128]; - float x, y, scale, ori; - - if (fscanf(fp, "%f %f %f %f\n", &y, &x, &scale, &ori) != 4) { - printf("Invalid keypoint file format."); - return 0; - } - - if (info != NULL) { - (*info)[i].x = x; - (*info)[i].y = y; - (*info)[i].scale = scale; - (*info)[i].orient = ori; - } - - char buf[1024]; - for (int line = 0; line < 7; line++) { - fgets(buf, 1024, fp); - - if (line < 6) { - sscanf(buf, - "%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu " - "%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu", - p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7, p+8, p+9, - p+10, p+11, p+12, p+13, p+14, - p+15, p+16, p+17, p+18, p+19); - - p += 20; - } else { - sscanf(buf, - "%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu", - p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7); - p += 8; - } - } - } - - return num; // kps; -} - -int ReadKeysBin(FILE* fp, unsigned char **keys, keypt_t **info) -{ - - int32_t num_keys; - fread(&num_keys, sizeof(int), 1, fp); -// printf("num_keys: %d\n", num_keys); - - - *keys = new unsigned char[128 * num_keys + 8]; - - if (info != NULL) - *info = new keypt_t[num_keys]; - - unsigned char *p = *keys; - for (int i = 0; i < num_keys; i++) { - /* Allocate memory for the keypoint. */ - // short int *d = new short int[128]; - float x, y, scale, ori; - - fread(&x, sizeof(float), 1, fp); - fread(&y, sizeof(float), 1, fp); - fread(&scale, sizeof(float), 1, fp); - fread(&ori, sizeof(float), 1, fp); - - if (info != NULL) { - (*info)[i].x = x; - (*info)[i].y = y; - (*info)[i].scale = scale; - (*info)[i].orient = ori; - } - - fread(p, sizeof(unsigned char), 128, fp); - -// printf("key %d: %f, %f, %f, %f - %d\n", i, x, y, scale, ori, *p); - - p += 128; - } - - - return num_keys; -} -int ReadKeysGzip(gzFile fp, unsigned char **keys, keypt_t **info) -{ - int i, num, len; - - std::vector kps; - char header[256]; - gzgets(fp, header, 256); - - if (sscanf(header, "%d %d", &num, &len) != 2) { - printf("Invalid keypoint file.\n"); - return 0; - } - - if (len != 128) { - printf("Keypoint descriptor length invalid (should be 128)."); - return 0; - } - - *keys = new unsigned char[128 * num + 8]; - - if (info != NULL) - *info = new keypt_t[num]; - - unsigned char *p = *keys; - for (i = 0; i < num; i++) { - /* Allocate memory for the keypoint. */ - // short int *d = new short int[128]; - float x, y, scale, ori; - char buf[1024]; - gzgets(fp, buf, 1024); - - if (sscanf(buf, "%f %f %f %f\n", &y, &x, &scale, &ori) != 4) { - printf("Invalid keypoint file format."); - return 0; - } - - if (info != NULL) { - (*info)[i].x = x; - (*info)[i].y = y; - (*info)[i].scale = scale; - (*info)[i].orient = ori; - } - - for (int line = 0; line < 7; line++) { - char *str = gzgets(fp, buf, 1024); - assert(str != Z_NULL); - - if (line < 6) { - sscanf(buf, - "%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu " - "%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu", - p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7, p+8, p+9, - p+10, p+11, p+12, p+13, p+14, - p+15, p+16, p+17, p+18, p+19); - - p += 20; - } else { - sscanf(buf, - "%hhu %hhu %hhu %hhu %hhu %hhu %hhu %hhu", - p+0, p+1, p+2, p+3, p+4, p+5, p+6, p+7); - p += 8; - } - } - } - - assert(p == *keys + 128 * num); - - return num; // kps; -} - -/* Create a search tree for the given set of keypoints */ -ANNkd_tree *CreateSearchTree(int num_keys, unsigned char *keys) -{ - // clock_t start = clock(); - - /* Create a new array of points */ - ANNpointArray pts = annAllocPts(num_keys, 128); - - for (int i = 0; i < num_keys; i++) { - memcpy(pts[i], keys + 128 * i, sizeof(unsigned char) * 128); - } - - /* Create a search tree for k2 */ - ANNkd_tree *tree = new ANNkd_tree(pts, num_keys, 128, 16); - // clock_t end = clock(); - - // printf("Building tree took %0.3fs\n", - // (end - start) / ((double) CLOCKS_PER_SEC)); - - return tree; -} - -std::vector MatchKeys(int num_keys1, unsigned char *k1, - ANNkd_tree *tree2, - double ratio, int max_pts_visit) -{ - annMaxPtsVisit(max_pts_visit); - std::vector matches; - - /* Now do the search */ - // clock_t start = clock(); - for (int i = 0; i < num_keys1; i++) { - ANNidx nn_idx[2]; - ANNdist dist[2]; - - tree2->annkPriSearch(k1 + 128 * i, 2, nn_idx, dist, 0.0); - - if (((double) dist[0]) < ratio * ratio * ((double) dist[1])) { - matches.push_back(KeypointMatch(i, nn_idx[0])); - } - } - // clock_t end = clock(); - - // printf("Searching tree took %0.3fs\n", - // (end - start) / ((double) CLOCKS_PER_SEC)); - - return matches; -} - -/* Compute likely matches between two sets of keypoints */ -std::vector MatchKeys(int num_keys1, unsigned char *k1, - int num_keys2, unsigned char *k2, - double ratio, int max_pts_visit) -{ - annMaxPtsVisit(max_pts_visit); - - int num_pts = 0; - std::vector matches; - - num_pts = num_keys2; - clock_t start = clock(); - - /* Create a new array of points */ - ANNpointArray pts = annAllocPts(num_pts, 128); - - for (int i = 0; i < num_pts; i++) { - memcpy(pts[i], k2 + 128 * i, sizeof(unsigned char) * 128); - } - - /* Create a search tree for k2 */ - ANNkd_tree *tree = new ANNkd_tree(pts, num_pts, 128, 16); - clock_t end = clock(); - - // printf("Building tree took %0.3fs\n", - // (end - start) / ((double) CLOCKS_PER_SEC)); - - /* Now do the search */ - start = clock(); - for (int i = 0; i < num_keys1; i++) { - ANNidx nn_idx[2]; - ANNdist dist[2]; - - tree->annkPriSearch(k1 + 128 * i, 2, nn_idx, dist, 0.0); - - if (((double) dist[0]) < ratio * ratio * ((double) dist[1])) { - matches.push_back(KeypointMatch(i, nn_idx[0])); - } - } - end = clock(); - // printf("Searching tree took %0.3fs\n", - // (end - start) / ((double) CLOCKS_PER_SEC)); - - /* Cleanup */ - annDeallocPts(pts); - // annDeallocPt(axis_weights); - - delete tree; - - return matches; -} diff --git a/patched_files/src/bundler/src/keys2a.h b/patched_files/src/bundler/src/keys2a.h deleted file mode 100644 index 805eeff8c..000000000 --- a/patched_files/src/bundler/src/keys2a.h +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2008-2010 Noah Snavely (snavely (at) cs.cornell.edu) - * and the University of Washington - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - */ - -/* keys2a.h */ -/* Class for SIFT keypoints */ - -#ifndef __keys2a_h__ -#define __keys2a_h__ - -#include -#include - -#include - -#include "ANN/ANN.h" - -using namespace ann_1_1_char; - -class Keypoint { -public: - Keypoint(float x, float y, float scale, float ori, short int *d) : - m_x(x), m_y(y), m_scale(scale), m_ori(ori), m_d(d) - { } - - float m_x, m_y; /* Subpixel location of keypoint. */ - float m_scale, m_ori; /* Scale and orientation (range [-PI,PI]) */ - short int *m_d; /* Vector of descriptor values */ -}; - - -/* Data struct for matches */ -class KeypointMatch { -public: - KeypointMatch() - { } - -#if 0 - KeypointMatch(int idx1, int idx2, float x1, float y1, float x2, float y2) : - m_idx1(idx1), m_idx2(idx2), m_x1(x1), m_y1(y1), m_x2(x2), m_y2(y2) - { } -#endif - - KeypointMatch(int idx1, int idx2) : - m_idx1(idx1), m_idx2(idx2) - { } - - int m_idx1, m_idx2; - // float m_x1, m_y1; - // float m_x2, m_y2; -}; - -typedef struct { - float x, y; - float scale; - float orient; -} keypt_t; - -/* Returns the number of keys in a file */ -int GetNumberOfKeys(const char *filename); - -/* This reads a keypoint file from a given filename and returns the list - * of keypoints. */ -int ReadKeyFile(const char *filename, unsigned char **keys, - keypt_t **info = NULL); - -int ReadKeyPositions(const char *filename, keypt_t **info); - -/* Read keypoints from the given file pointer and return the list of - * keypoints. The file format starts with 2 integers giving the total - * number of keypoints and the size of descriptor vector for each - * keypoint (currently assumed to be 128). Then each keypoint is - * specified by 4 floating point numbers giving subpixel row and - * column location, scale, and orientation (in radians from -PI to - * PI). Then the descriptor vector for each keypoint is given as a - * list of integers in range [0,255]. */ -int ReadKeys(FILE *fp, unsigned char **keys, keypt_t **info = NULL); -int ReadKeysBin(FILE *fp, unsigned char **keys, keypt_t **info = NULL); -int ReadKeysGzip(gzFile fp, unsigned char **keys, keypt_t **info = NULL); - -/* Read keys using MMAP to speed things up */ -std::vector ReadKeysMMAP(FILE *fp); - -/* Create a search tree for the given set of keypoints */ -ANNkd_tree *CreateSearchTree(int num_keys, unsigned char *keys); - -/* Compute likely matches between two sets of keypoints */ -std::vector MatchKeys(int num_keys1, unsigned char *k1, - int num_keys2, unsigned char *k2, - double ratio = 0.6, - int max_pts_visit = 200); - -std::vector MatchKeys(int num_keys1, unsigned char *k1, - ANNkd_tree *tree2, - double ratio = 0.6, - int max_pts_visit = 200); - -#endif /* __keys2_h__ */ diff --git a/patched_files/src/graclus/index.html b/patched_files/src/graclus/index.html new file mode 100644 index 000000000..f724f091f --- /dev/null +++ b/patched_files/src/graclus/index.html @@ -0,0 +1 @@ + Web Authentication Redirect diff --git a/run.pl b/run.pl deleted file mode 100644 index d2d605fa4..000000000 --- a/run.pl +++ /dev/null @@ -1,657 +0,0 @@ -#!/usr/bin/perl - -## -## created by Daniel Schwarz/daniel.schwarz@topoi.org -## released under Creative Commons/CC-BY -## Attribution -## - -use File::Basename; -use File::Copy; -use File::Spec; -use Data::Dumper; -use Time::localtime; -use Switch; -use POSIX qw(strftime); - -## the defs - -chomp($CURRENT_DIR = `pwd`); -chomp($BIN_PATH_REL = `dirname $0`); -chomp($OS = `uname -o`); -chomp($CORES = `ls -d /sys/devices/system/cpu/cpu[[:digit:]]* | wc -w`); - -if(!File::Spec->file_name_is_absolute($BIN_PATH_REL)){ - $BIN_PATH_ABS = File::Spec->rel2abs($BIN_PATH_REL); -} else { - $BIN_PATH_ABS = File::Spec->rel2abs($BIN_PATH_REL); -} - -require "$BIN_PATH_ABS/ccd_defs.pl"; - -$BIN_PATH = $BIN_PATH_ABS."/bin"; - -my %objectStats = { - count => 0, - good => 0, - bad => 0, - minWidth => 0, - minHeight => 0, - maxWidth => 0, - maxHeight => 0 - -}; - -my %jobOptions = { - resizeTo => 0, - srcDir => $CURRENT_DIR -}; - -my %args = {}; - -$jobOptions{srcDir} = "$CURRENT_DIR"; - -sub run { - system($_[0]); - - if($? != 0){ - die "\n\nquitting cause: \n\t$_[0]\nreturned with code ".$?."\n"; - } -} -sub now { - system("echo `date`"); -} - -sub parseArgs { - - ## defaults - $args{"--match-size"} = "200"; - - $args{"--resize-to"} = "1200"; - - $args{"--start-with"} = "resize"; - $args{"--end-with"} = "pmvs"; - - $args{"--cmvs-maxImages"} = 100; - - $args{"--matcher-ratio"} = 0.6; - $args{"--matcher-threshold"} = 2.0; - - $args{"--pmvs-level"} = 1; - $args{"--pmvs-csize"} = 2; - $args{"--pmvs-threshold"} = 0.7; - $args{"--pmvs-wsize"} = 7; - $args{"--pmvs-minImageNum"} = 3; - - for($i = 0; $i <= $#ARGV; $i++) { - if($ARGV[$i] =~ /^--[^a-z\-]*/){ - $args{"$ARGV[$i]"} = true; - - if(!($ARGV[$i+1] =~ /^--[^a-z\-]*/)) { - if($ARGV[$i] eq "--resize-to"){ - if($ARGV[$i+1] eq "orig" || $ARGV[$i+1] =~ /^[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--start-with"){ - if($ARGV[$i+1] eq "resize" || $ARGV[$i+1] eq "getKeypoints" || $ARGV[$i+1] eq "match" || $ARGV[$i+1] eq "bundler" || $ARGV[$i+1] eq "cmvs" || $ARGV[$i+1] eq "pmvs"){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]."\n\t valid values are \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\""; - } - } - if($ARGV[$i] eq "--end-with"){ - if($ARGV[$i+1] eq "resize" || $ARGV[$i+1] eq "getKeypoints" || $ARGV[$i+1] eq "match" || $ARGV[$i+1] eq "bundler" || $ARGV[$i+1] eq "cmvs" || $ARGV[$i+1] eq "pmvs"){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]."\n\t valid values are \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\""; - } - } - if($ARGV[$i] eq "--run-only"){ - if($ARGV[$i+1] eq "resize" || $ARGV[$i+1] eq "getKeypoints" || $ARGV[$i+1] eq "match" || $ARGV[$i+1] eq "bundler" || $ARGV[$i+1] eq "cmvs" || $ARGV[$i+1] eq "pmvs"){ - $args{"--start-with"} = $ARGV[$i+1]; - $args{"--end-with"} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]."\n\t valid values are \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\""; - } - } - if($ARGV[$i] eq "--matcher-threshold"){ - if($ARGV[$i+1] =~ /^-?[0-9]*\.?[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--matcher-ratio"){ - if($ARGV[$i+1] =~ /^-?[0-9]*\.?[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--cmvs-maxImages"){ - if($ARGV[$i+1] =~ /^[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--pmvs-level"){ - if($ARGV[$i+1] =~ /^[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--pmvs-csize"){ - if($ARGV[$i+1] =~ /^[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--pmvs-threshold"){ - if($ARGV[$i+1] =~ /^-?[0-9]*\.?[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--pmvs-wsize"){ - if($ARGV[$i+1] =~ /^[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--pmvs-minImageNum"){ - if($ARGV[$i+1] =~ /^[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - - if($ARGV[$i] eq "--force-focal"){ - if($ARGV[$i+1] =~ /^[0-9]*\.?[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - if($ARGV[$i] eq "--force-ccd"){ - if($ARGV[$i+1] =~ /^[0-9]*\.?[0-9]*$/){ - $args{$ARGV[$i]} = $ARGV[$i+1]; - } else { - die "\n invalid parameter for \"".$ARGV[$i]."\": ".$ARGV[$i+1]; - } - } - } - } - } - - if($args{"--help"}){ - print "\nusgae: run.pl [options]"; - print "\nit should be run from the folder contining the images to which should reconstructed"; - print "\n"; - print "\noptions:"; - print "\n --help: "; - print "\n prints this screen"; - print "\n "; - - print "\n --resize-to: "; - print "\n default: 1200"; - print "\n will resize the images so that the maximum width/height of the images are smaller or equal to the specified number"; - print "\n if \"--resize-to orig\" is used it will use the images without resizing"; - print "\n "; - - print "\n --start-with: <\"resize\"|\"getKeypoints\"|\"match\"|\"bundler\"|\"cmvs\"|\"pmvs\">"; - print "\n default: resize"; - print "\n will start the sript at the specified step"; - print "\n "; - - print "\n --end-with: <\"resize\"|\"getKeypoints\"|\"match\"|\"bundler\"|\"cmvs\"|\"pmvs\">"; - print "\n default: pmvs"; - print "\n will stop the sript after the specified step"; - print "\n "; - - print "\n --run-only: <\"resize\"|\"getKeypoints\"|\"match\"|\"bundler\"|\"cmvs\"|\"pmvs\">"; - print "\n will only execute the specified step"; - print "\n equal to --start-with --end-with "; - print "\n "; - - print "\n --force-focal: "; - print "\n override the focal length information for the images"; - print "\n "; - - print "\n --force-ccd: "; - print "\n override the ccd width information for the images"; - print "\n "; - - print "\n --matcher-threshold: (percent)"; - print "\n default: 2.0"; - print "\n ignore matched keypoints if the two images share less then percent of keypoints"; - print "\n "; - - print "\n --matcher-ratio: "; - print "\n default: 100"; - print "\n the maximum number of images per cluster"; - print "\n "; - - print "\n --pmvs-level: "; - print "\n default: 1"; - - print "\n --pmvs-csize: "; - print "\n default: 2"; - - print "\n --pmvs-threshold: "; - print "\n default: 0.7"; - - print "\n --pmvs-wsize: "; - print "\n default: 7"; - - print "\n --pmvs-minImageNum: "; - print "\n default: 3"; - print "\n see http://grail.cs.washington.edu/software/pmvs/documentation.html for an explanation of these parameters"; - print "\n"; - - exit; - } - - print "\n - configuration:"; - - foreach $args_key (sort keys %args) { - if($args{$args_key} ne ""){ - print "\n $args_key: $args{$args_key}"; - } - } - - print "\n"; - print "\n"; -} - -sub prepareObjects { - ## get the source list - @source_files = `ls -1 | egrep "\.[jJ]{1}[pP]{1}[eE]{0,1}[gG]{1}"`; - - print "\n - source files - "; now(); print "\n"; - - foreach $file (@source_files) { - chomp($file); - - chomp($file_make = `jhead \"$file\" | grep "Camera make"`); - chomp($file_model = `jhead \"$file\" | grep "Camera model"`); - chomp($file_focal = `jhead \"$file\" | grep "Focal length"`); - chomp($file_ccd = `jhead \"$file\" | grep "CCD width"`); - chomp($file_resolution = `jhead \"$file\" | grep "Resolution"`); - - my %fileObject = {}; - - chomp(($fileObject{src}) = $file); - chomp(($fileObject{base}) = $file); - $fileObject{base} =~ s/\.[^\.]*$//; - - chomp(($fileObject{make}) = $file_make =~ /: ([^\n\r]*)/); - chomp(($fileObject{model}) = $file_model =~ /: ([^\n\r]*)/); - - $fileObject{make} =~ s/^\s+//; $fileObject{make} =~ s/\s+$//; - $fileObject{model} =~ s/^\s+//; $fileObject{model} =~ s/\s+$//; - - $fileObject{id} = $fileObject{make}." ".$fileObject{model}; - - ($fileObject{width}, $fileObject{height}) = $file_resolution =~ /: ([0-9]*) x ([0-9]*)/; - - if(!$args{"--force-focal"}){ - ($fileObject{focal}) = $file_focal =~ /:[\ ]*([0-9\.]*)mm/; - } else { - $fileObject{focal} = $args{"--force-focal"}; - } - - if(!$args{"--force-ccd"}){ - ($fileObject{ccd}) = $file_ccd =~ /:[\ ]*([0-9\.]*)mm/; - - if(!$fileObject{ccd}){; - $fileObject{ccd} = $ccdWidths{$fileObject{id}}; - } - } else { - $fileObject{ccd} = $args{"--force-ccd"}; - } - - if($fileObject{ccd} && $fileObject{focal} && $fileObject{width} && $fileObject{height}){ - if($fileObject{width} > $fileObject{height}){ - $fileObject{focalpx} = $fileObject{width} * ($fileObject{focal} / $fileObject{ccd}); - } else { - $fileObject{focalpx} = $fileObject{height} * ($fileObject{focal} / $fileObject{ccd}); - } - - $fileObject{isOk} = true; - $objectStats{good}++; - - print "\n using $fileObject{src} dimensions: $fileObject{width}x$fileObject{height} / focal: $fileObject{focal}mm / ccd: $fileObject{ccd}mm"; - } else { - $fileObject{isOk} = false; - $objectStats{bad}++; - - print "\n no CCD width or focal length found for $fileObject{src} - camera: \"$fileObject{id}\""; - } - - $objectStats{count}++; - - if($objectStats{minWidth} == 0) { $objectStats{minWidth} = $fileObject{width}; } - if($objectStats{minHeight} == 0) { $objectStats{minHeight} = $fileObject{height}; } - - $objectStats{minWidth} = $objectStats{minWidth} < $fileObject{width} ? $objectStats{minWidth} : $fileObject{width}; - $objectStats{minHeight} = $objectStats{minHeight} < $fileObject{height} ? $objectStats{minHeight} : $fileObject{height}; - $objectStats{maxWidth} = $objectStats{maxWidth} > $fileObject{width} ? $objectStats{maxWidth} : $fileObject{width}; - $objectStats{maxHeight} = $objectStats{maxHeight} > $fileObject{height} ? $objectStats{maxHeight} : $fileObject{height}; - - push(@objects, \%fileObject); - } - - - - if(!$objectStats{good}){ - print "\n\n found no usable images - quitting\n"; - die; - } else { - print "\n\n found $objectStats{good} usable images"; - } - - print "\n"; - - $jobOptions{resizeTo} = $args{"--resize-to"}; - - print "\n using max image size of $jobOptions{resizeTo} x $jobOptions{resizeTo}"; - - $jobOptions{jobDir} = "$jobOptions{srcDir}/reconstruction-with-image-size-$jobOptions{resizeTo}"; - - $jobOptions{step_1_convert} = "$jobOptions{jobDir}/_convert.templist.txt"; - $jobOptions{step_1_vlsift} = "$jobOptions{jobDir}/_vlsift.templist.txt"; - $jobOptions{step_1_gzip} = "$jobOptions{jobDir}/_gzip.templist.txt"; - - $jobOptions{step_2_filelist} = "$jobOptions{jobDir}/_filelist.templist.txt"; - $jobOptions{step_2_macthes_jobs} = "$jobOptions{jobDir}/_matches_jobs.templist.txt"; - $jobOptions{step_2_matches_dir} = "$jobOptions{jobDir}/matches"; - $jobOptions{step_2_matches} = "$jobOptions{jobDir}/matches.init.txt"; - - $jobOptions{step_3_filelist} = "$jobOptions{jobDir}/list.txt"; - $jobOptions{step_3_bundlerOptions} = "$jobOptions{jobDir}/options.txt"; - - mkdir($jobOptions{jobDir}); - - foreach $fileObject (@objects) { - if($fileObject->{isOk}){ - $fileObject->{step_0_resizedImage} = "$jobOptions{jobDir}/$fileObject->{base}.jpg"; - - $fileObject->{step_1_pgmFile} = "$jobOptions{jobDir}/$fileObject->{base}.pgm"; - $fileObject->{step_1_keyFile} = "$jobOptions{jobDir}/$fileObject->{base}.key"; - $fileObject->{step_1_gzFile} = "$jobOptions{jobDir}/$fileObject->{base}.key.gz"; - } - } - -# exit -} - -sub resize { - print "\n"; - print "\n - preparing images - "; now(); print "\n"; - print "\n"; - - chdir($jobOptions{jobDir}); - - foreach $fileObject (@objects) { - if($fileObject->{isOk}){ - unless (-e "$fileObject->{step_0_resizedImage}"){ - if($jobOptions{resizeTo} != "orig" && (($fileObject->{width} > $jobOptions{resizeTo}) || ($fileObject->{height} > $jobOptions{resizeTo}))){ - print "\n resising $fileObject->{src} \tto $fileObject->{step_0_resizedImage}"; - - run("convert -resize $jobOptions{resizeTo}x$jobOptions{resizeTo} -quality 100 \"$jobOptions{srcDir}/$fileObject->{src}\" \"$fileObject->{step_0_resizedImage}\""); - - } else { - print "\n copying $fileObject->{src} \tto $fileObject->{step_0_resizedImage}"; - - copy("$CURRENT_DIR/$fileObject->{src}", "$fileObject->{step_0_resizedImage}"); - } - } else { - print "\n using existing $fileObject->{src} \tto $fileObject->{step_0_resizedImage}"; - } - - chomp($file_resolution = `jhead \"$fileObject->{step_0_resizedImage}\" | grep "Resolution"`); - ($fileObject->{width}, $fileObject->{height}) = $file_resolution =~ /: ([0-9]*) x ([0-9]*)/; - print "\t ($fileObject->{width} x $fileObject->{height})"; - } - } - - if($args{"--end-with"} ne "resize"){ - getKeypoints(); - } -} - -sub getKeypoints { - print "\n"; - print "\n - finding keypoints - "; now(); print "\n"; - print "\n\n"; - - chdir($jobOptions{jobDir}); - - $vlsiftJobs = ""; - - $c = 0; - - foreach $fileObject (@objects) { - $c = $c+1; - - if($fileObject->{isOk}){ - if($args{"--lowe-sift"}){ - $vlsiftJobs .= "echo -n \"$c/$objectStats{good} - \" && convert -format pgm \"$fileObject->{step_0_resizedImage}\" \"$fileObject->{step_1_pgmFile}\""; - $vlsiftJobs .= " && \"$BIN_PATH/sift\" < \"$fileObject->{step_1_pgmFile}\" > \"$fileObject->{step_1_keyFile}\""; - $vlsiftJobs .= " && gzip -f \"$fileObject->{step_1_keyFile}\""; - $vlsiftJobs .= " && rm -f \"$fileObject->{step_1_pgmFile}\""; - $vlsiftJobs .= " && rm -f \"$fileObject->{step_1_keyFile}.sift\"\n"; - } else { - unless (-e "$jobOptions{jobDir}/$fileObject->{base}.key.bin") { - $vlsiftJobs .= "echo -n \"$c/$objectStats{good} - \" && convert -format pgm \"$fileObject->{step_0_resizedImage}\" \"$fileObject->{step_1_pgmFile}\""; - $vlsiftJobs .= " && \"$BIN_PATH/vlsift\" \"$fileObject->{step_1_pgmFile}\" -o \"$fileObject->{step_1_keyFile}.sift\" > /dev/null && perl \"$BIN_PATH/../convert_vlsift_to_lowesift.pl\" \"$jobOptions{jobDir}/$fileObject->{base}\""; - $vlsiftJobs .= " && gzip -f \"$fileObject->{step_1_keyFile}\""; - $vlsiftJobs .= " && rm -f \"$fileObject->{step_1_pgmFile}\""; - $vlsiftJobs .= " && rm -f \"$fileObject->{step_1_keyFile}.sift\"\n"; - } else { - print "using existing $jobOptions{jobDir}/$fileObject->{base}.key.bin\n"; - } - } - } - } - - open (SIFT_DEST, ">$jobOptions{step_1_vlsift}"); - print SIFT_DEST $vlsiftJobs; - close(SIFT_DEST); - - run("\"$BIN_PATH/parallel\" --halt-on-error 1 -j+0 < \"$jobOptions{step_1_vlsift}\""); - - if($args{"--end-with"} ne "getKeypoints"){ - match(); - } -} - -sub match { - print "\n"; - print "\n - matching keypoints - "; now(); print "\n"; - print "\n"; - - chdir($jobOptions{jobDir}); - mkdir($jobOptions{step_2_matches_dir}); - - $matchesJobs = ""; - - my $c = 0; - my $t = ($objectStats{good}-1) * ($objectStats{good}/2); - - for (my $i = 0; $i < $objectStats{good}; $i++) { - for (my $j = $i+1; $j < $objectStats{good}; $j++) { - $c++; - unless (-e "$jobOptions{step_2_matches_dir}/$i-$j.txt"){ - - - $matchesJobs .= "echo -n \".\" && touch \"$jobOptions{step_2_matches_dir}/$i-$j.txt\" && \"$BIN_PATH/KeyMatch\" \"@objects[$i]->{step_1_keyFile}\" \"@objects[$j]->{step_1_keyFile}\" \"$jobOptions{step_2_matches_dir}/$i-$j.txt\" $args{'--matcher-ratio'} $args{'--matcher-threshold'}\n"; - } - } - } - - open (MATCH_DEST, ">$jobOptions{step_2_macthes_jobs}"); - print MATCH_DEST $matchesJobs; - close(MATCH_DEST); - - run("\"$BIN_PATH/parallel\" --halt-on-error 1 -j+0 < \"$jobOptions{step_2_macthes_jobs}\""); - - run("rm -f \"$jobOptions{step_2_matches}\""); - - for (my $i = 0; $i < $objectStats{good}; $i++) { - for (my $j = $i+1; $j < $objectStats{good}; $j++) { - $c++; - - if (-e "$jobOptions{step_2_matches_dir}/$i-$j.txt" && (-s "$jobOptions{step_2_matches_dir}/$i-$j.txt") > 0) { - run("echo \"$i $j\" >> \"$jobOptions{step_2_matches}\" && cat \"$jobOptions{step_2_matches_dir}/$i-$j.txt\" >> \"$jobOptions{step_2_matches}\""); - } - } - } - - foreach $fileObject (@objects) { - if($fileObject->{isOk}){ - if($fileObject->{isOk}){ - $filesList .= "$fileObject->{step_1_keyFile}\n"; - } - } - } - - open (MATCH_DEST, ">$jobOptions{step_2_filelist}"); - print MATCH_DEST $filesList; - close(MATCH_DEST); - - # run("\"$BIN_PATH/KeyMatchFull\" \"$jobOptions{step_2_filelist}\" \"$jobOptions{step_2_matches}\" "); - - if($args{"--end-with"} ne "match"){ - bundler(); - } -} - -sub bundler { - print "\n"; - print "\n - running bundler - "; now(); print "\n"; - print "\n"; - - chdir($jobOptions{jobDir}); - - mkdir($jobOptions{jobDir}."/bundle"); - mkdir($jobOptions{jobDir}."/pmvs"); - mkdir($jobOptions{jobDir}."/pmvs/txt"); - mkdir($jobOptions{jobDir}."/pmvs/visualize"); - mkdir($jobOptions{jobDir}."/pmvs/models"); - - $filesList = ""; - - foreach $fileObject (@objects) { - if($fileObject->{isOk}){ - if($fileObject->{isOk}){ - $filesList .= sprintf("\./%s.jpg 0 %0.5f\n", $fileObject->{base}, $fileObject->{focalpx}); - } - } - } - - chomp($filesList); - - $bundlerOptions = "--match_table matches.init.txt\n"; - $bundlerOptions .= "--output bundle.out\n"; - $bundlerOptions .= "--output_all bundle_\n"; - $bundlerOptions .= "--output_dir bundle\n"; - $bundlerOptions .= "--variable_focal_length\n"; - $bundlerOptions .= "--use_focal_estimate\n"; - $bundlerOptions .= "--constrain_focal\n"; - $bundlerOptions .= "--constrain_focal_weight 0.01\n"; - $bundlerOptions .= "--estimate_distortion\n"; - $bundlerOptions .= "--run_bundle"; - - system("echo \"$bundlerOptions\" > \"$jobOptions{step_3_bundlerOptions}\""); - - open (BUNDLER_DEST, ">$jobOptions{step_3_filelist}"); - print BUNDLER_DEST $filesList; - close(BUNDLER_DEST); - - run("\"$BIN_PATH/bundler\" \"$jobOptions{step_3_filelist}\" --options_file \"$jobOptions{step_3_bundlerOptions}\" > bundle/out"); - run("\"$BIN_PATH/Bundle2PMVS\" \"$jobOptions{step_3_filelist}\" bundle/bundle.out"); - run("\"$BIN_PATH/RadialUndistort\" \"$jobOptions{step_3_filelist}\" bundle/bundle.out pmvs"); - - $i = 0; - - foreach $fileObject (@objects) { - if($fileObject->{isOk}){ - if($fileObject->{isOk}){ - if (-e "pmvs/$fileObject->{base}.rd.jpg"){ - $nr = sprintf("%08d", $i++); - - system("mv pmvs/$fileObject->{base}.rd.jpg pmvs/visualize/$nr.jpg"); - system("mv pmvs/$nr.txt pmvs/txt/$nr.txt"); - } - } - } - } - - system("\"$BIN_PATH/Bundle2Vis\" pmvs/bundle.rd.out pmvs/vis.dat"); - - if($args{"--end-with"} ne "bundler"){ - cmvs(); - } -} - -sub cmvs { - print "\n"; - print "\n - running cmvs - "; now(); print "\n"; - print "\n"; - - chdir($jobOptions{jobDir}); - - run("\"$BIN_PATH/cmvs\" pmvs/ $args{'--cmvs-maxImages'} $CORES"); - run("\"$BIN_PATH/genOption\" pmvs/ $args{'--pmvs-level'} $args{'--pmvs-csize'} $args{'--pmvs-threshold'} $args{'--pmvs-wsize'} $args{'--pmvs-minImageNum'} $CORES"); - - if($args{"--end-with"} ne "cmvs"){ - pmvs(); - } -} - -sub pmvs { - print "\n"; - print "\n - running pmvs - "; - print "\n"; - - chdir($jobOptions{jobDir}); - - run("\"$BIN_PATH/pmvs2\" pmvs/ option-0000"); - - system("cp -Rf \"$jobOptions{jobDir}/pmvs/models\" \"$jobOptions{jobDir}-results\""); -} - -parseArgs(); -prepareObjects(); - -chdir($jobOptions{jobDir}); - -switch ($args{"--start-with"}) { - case "resize" { resize(); } - case "getKeypoints" { getKeypoints(); } - case "match" { match(); } - case "bundler" { bundler(); } - case "cmvs" { cmvs(); } - case "pmvs" { pmvs(); } -} - -print "\n"; -print "\n - done - "; now(); print "\n"; -print "\n"; \ No newline at end of file diff --git a/run.py b/run.py new file mode 100644 index 000000000..4a60471d9 --- /dev/null +++ b/run.py @@ -0,0 +1,49 @@ +#!/usr/bin/python + +from opendm import log +from opendm import config +from opendm import system +from opendm import io + +import ecto +import os + +from scripts.odm_app import ODMApp + +if __name__ == '__main__': + + args = config.config() + + log.ODM_INFO('Initializing OpenDroneMap app - %s' % system.now()) + + # Add project dir if doesn't exist + args.project_path = io.join_paths(args.project_path, args.name) + if not io.dir_exists(args.project_path): + log.ODM_WARNING('Directory %s does not exist. Creating it now.' % args.name) + system.mkdir_p(os.path.abspath(args.project_path)) + + # If user asks to rerun everything, delete all of the existing progress directories. + # TODO: Move this somewhere it's not hard-coded + if args.rerun_all: + log.ODM_DEBUG("Rerun all -- Removing old data") + os.system("rm -rf " + + args.project_path + "/images_resize " + + args.project_path + "/odm_georeferencing " + + args.project_path + "/odm_meshing " + + args.project_path + "/odm_orthophoto " + + args.project_path + "/odm_texturing " + + args.project_path + "/opensfm " + + args.project_path + "/pmvs") + + # create an instance of my App BlackBox + # internally configure all tasks + app = ODMApp(args=args) + + # create a plasm that only contains the BlackBox + plasm = ecto.Plasm() + plasm.insert(app) + + # execute the plasm + plasm.execute(niter=1) + + log.ODM_INFO('OpenDroneMap app finished - %s' % system.now()) diff --git a/run.rb b/run.rb deleted file mode 100644 index e48532379..000000000 --- a/run.rb +++ /dev/null @@ -1,426 +0,0 @@ -#!/usr/bin/ruby -# encoding: utf-8 - -require 'rubygems' -require 'optparse' -require 'pp' -require 'parallel' -require 'fileutils' - -#alias :puts_original :puts -# -#def puts (*args) -# puts_original(args) -# puts_original(args) -#end - -$options = {} - -optparse = OptionParser.new do|opts| - opts.banner = "Usage: run.rb [options] " - - $options[:base_path] = "." - - $options[:resize_to] = 1200 - opts.on('-r', '--resize-to int', "") do |param| - $options[:resize_to] = param.to_i - end - - $options[:match_size] = 200 - opts.on('-m', '--match-size int', "") do |param| - $options[:match_size] = param.to_i - end - - $options[:start_with] = "resize" - opts.on('', '--start-with ', "values: \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\"") do |param| - $options[:start_with] = param - end - - $options[:end_with] = "pmvs" - opts.on('', '--end-with ', "values: \"resize\", \"getKeypoints\", \"match\", \"bundler\", \"cmvs\", \"pmvs\"") do |param| - $options[:end_with] = param - end - - $options[:force_ccd] = false - opts.on('', '--force-ccd float', "") do |param| - $options[:force_ccd] = param.to_f - end - - - $options[:force_focal] = false - opts.on('', '--force-focal float', "") do |param| - $options[:force_focal] = param.to_f - end - - $options[:cmvs_max_images] = 100 - opts.on('', '--cmvs-max-images int', "") do |param| - $options[:cmvs_max_images] = param.to_i - end - - $options[:matcher_ratio] = 0.6 - opts.on('', '--matcher-ratio float', "") do |param| - $options[:matcher_ratio] = param.to_f - end - - $options[:matcher_threshold] = 2.0 - opts.on('', '--matcher-threshold float', "") do |param| - $options[:matcher_threshold] = param.to_f - end - - $options[:pmvs_min_image_num] = 3 - opts.on('', '--pmvs-minImageNum n', "") do |param| - $options[:pmvs_min_image_num] = param.to_i - end - $options[:pmvs_wsize] = 7 - opts.on('', '--pmvs-wsize int', "") do |param| - $options[:pmvs_wsize] = param.to_i - end - $options[:pmvs_threshold] = 0.7 - opts.on('', '--pmvs-threshold float', "") do |param| - $options[:pmvs_threshold] = param.to_f - end - $options[:pmvs_csize] = 2 - opts.on('', '--pmvs-csize int', "") do |param| - $options[:pmvs_csize] = param.to_i - end - $options[:pmvs_level] = 1 - opts.on('', '--pmvs-level int', "") do |param| - $options[:pmvs_level] = param.to_i - end - - opts.on( '-h', '--help', 'Display this screen' ) do - puts opts - exit - end -end - -begin - optparse.parse! - - $options[:path_base] = ARGV[0] if ARGV.length == 1 - $options[:path_base] = File.expand_path($options[:path_base]) - - $options[:path_bin] = File.expand_path(File.dirname(__FILE__)) + "/bin" - - require "#{File.expand_path(File.dirname(__FILE__))}/ccd_defs.rb" - - begin - puts " Configuration:" - puts " bin_path = #{$options[:path_bin]}" - puts " base_path = #{$options[:path_base]}" - puts " " - puts " start_with = #{$options[:start_with]}" - puts " end_with = #{$options[:end_with]}" - puts " " - puts " resize_to = #{$options[:resize_to]}" - puts " " - puts " match_size = #{$options[:match_size]}" - puts " matcher_ratio = #{$options[:matcher_ratio]}" - puts " matcher_threshold = #{$options[:matcher_threshold]}" - puts " " - puts " cmvs_max_images = #{$options[:cmvs_max_images]}" - puts " " - puts " pmvs_threshold = #{$options[:pmvs_threshold]}" - puts " pmvs_csize = #{$options[:pmvs_csize]}" - puts " pmvs_level = #{$options[:pmvs_level]}" - puts " pmvs_min_image_num = #{$options[:pmvs_min_image_num]}" - puts " pmvs_wsize = #{$options[:pmvs_wsize]}" - puts " " - end - - file_objects = [] - - source_files = `ls -1 #{$options[:path_base]} | egrep "\.[jJ]{1}[pP]{1}[eE]{0,1}[gG]{1}"`.split("\n") - file_objects = Parallel.map(source_files) { |source_file| - file_object = Hash.new - - file_object[:file_name] = "#{source_file}" - file_object[:file_basename] = "#{source_file}".sub(/\.[^\.]+$/, "") - file_object[:path_src] = "#{$options[:path_base]}/#{source_file}" - - jhead_text = `jhead #{file_object[:path_src]}` - file_object[:jhead] = Hash.new - - jhead_text.split("\n").each { |jhead_line| - jhead_parts = jhead_line.split(/\ +:\ /) - - file_object[:jhead][jhead_parts[0].to_sym] = jhead_parts[1] if jhead_parts.length == 2 - } - - file_object[:model_id] = "#{file_object[:jhead][:'Camera make']} #{file_object[:jhead][:'Camera model']}" - - file_object[:width],file_object[:height] = [file_object[:jhead][:Resolution].split(" x ")[0].to_i, file_object[:jhead][:Resolution].split(" x ")[1].to_i] if file_object[:jhead][:Resolution] - - - if file_object[:jhead][:'CCD width'] - file_object[:ccd] = file_object[:jhead][:'CCD width'][/([\.0-9]+)/].to_f - end - - if file_object[:jhead][:'Focal length'] - file_object[:focal] = file_object[:jhead][:'Focal length'][/([\.0-9]+)/].to_f - end - - file_object[:ccd] = $ccd_widths[file_object[:model_id].to_sym] unless file_object[:ccd] - - file_object[:focal] = $options[:force_focal] if $options[:force_focal] - file_object[:ccd] = $options[:force_ccd] if $options[:force_ccd] - - if file_object[:focal] && file_object[:ccd] && file_object[:width] && file_object[:height] - file_object[:focal_px] = (file_object[:focal] / file_object[:ccd]) * [[file_object[:width], file_object[:height]].max.to_f, $options[:resize_to].to_f].min - puts "#{file_object[:path_src]} – using image with ccd width: #{file_object[:ccd]} and focal length: #{file_object[:focal]}" - file_object - else - if !file_object[:ccd] - puts "no ccd width found for #{file_object[:model_id]}" - end - end - } - - file_objects = file_objects.select {|file_object| file_object} - - puts "found #{file_objects.length} usable objects" - - def get_feature_count (bin_file_name) - io = File.open(bin_file_name, "rb") - feature_count = io.read(4).unpack("L") - io.close - - feature_count.first.to_i - end - - def match?(i, j, file_object_i, file_object_j, path) - pairwise_match = "#{path}/#{i}-#{j}.txt" - - !File.exists?(pairwise_match) - end - def match (i, j, file_object_i, file_object_j, path, index) - pairwise_match = "#{path}/.#{i}-#{j}.txt" - done_pairwise_match = "#{path}/#{i}-#{j}.txt" - - file_object_i_key = "#{path}/../#{file_object_i[:file_basename]}.key" - file_object_j_key = "#{path}/../#{file_object_j[:file_basename]}.key" - - feature_count_i = get_feature_count("#{path}/../#{file_object_i[:file_basename]}.key.bin") - feature_count_j = get_feature_count("#{path}/../#{file_object_j[:file_basename]}.key.bin") - - `touch '#{pairwise_match}' && '#{$options[:path_bin]}/KeyMatch' '#{file_object_i_key}' '#{file_object_j_key}' '#{pairwise_match}' #{$options[:matcher_ratio]} #{$options[:matcher_threshold]}` unless File.exists?(pairwise_match) - matches =`cat '#{pairwise_match}' | wc -l`.to_i - - if matches > 0 - puts "%6d / %6d - %6.2f%% matches between #{file_object_i[:file_name]}, #{file_object_j[:file_name]}" % [$prog_start + index + 1, $prog_total, matches.to_f*100/([feature_count_i, feature_count_j].min)] - end - - FileUtils.mv(pairwise_match, done_pairwise_match) - end - - def get_keypoints_for_file?(file_object, path, size) - path_matching_base_name = "#{path}/#{file_object[:file_basename]}" - path_matching_key_bin = "#{path_matching_base_name}.key.bin" - path_matching_key_gz = "#{path_matching_base_name}.key.gz" - path_matching_jpg = "#{path_matching_base_name}.jpg" - - !(File.exists?(path_matching_jpg) && File.exists?(path_matching_key_bin) && File.exists?(path_matching_key_gz)) - end - def get_keypoints_for_file (file_object, path, size, index) - path_matching_base_name = "#{path}/.#{file_object[:file_basename]}" - path_matching_jpg = "#{path_matching_base_name}.jpg" - path_matching_pgm = "#{path_matching_base_name}.pgm" - path_matching_sift = "#{path_matching_base_name}.key.sift" - path_matching_key = "#{path_matching_base_name}.key" - path_matching_key_bin = "#{path_matching_base_name}.key.bin" - path_matching_key_gz = "#{path_matching_base_name}.key.gz" - - done_path_matching_base_name = "#{path}/#{file_object[:file_basename]}" - done_path_matching_key_bin = "#{done_path_matching_base_name}.key.bin" - done_path_matching_key_gz = "#{done_path_matching_base_name}.key.gz" - done_path_matching_jpg = "#{done_path_matching_base_name}.jpg" - - - `convert -format jpg -resize #{size}x#{size} -quality 100 '#{file_object[:path_src]}' '#{path_matching_jpg}'` - `convert -format pgm -resize #{size}x#{size} -quality 100 '#{file_object[:path_src]}' '#{path_matching_pgm}'` - - `'#{$options[:path_bin]}/vlsift' '#{path_matching_pgm}' -o '#{path_matching_sift}'` - - `perl '#{$options[:path_bin]}/../convert_vlsift_to_lowesift.pl' '#{path_matching_base_name}'` - - `gzip -f '#{path_matching_key}' && rm -f '#{path_matching_sift}' && rm -f '#{path_matching_pgm}'` - - feature_count = get_feature_count(path_matching_key_bin) - - puts "%6d / %6d - got #{feature_count} keypoints from #{file_object[:file_name]} @ #{size}px" % [$prog_start + index + 1, $prog_total] - - FileUtils.mv(path_matching_key_bin , done_path_matching_key_bin ) - FileUtils.mv(path_matching_key_gz , done_path_matching_key_gz ) - FileUtils.mv(path_matching_jpg , done_path_matching_jpg ) - end - - if file_objects.length > 0 - job_options = Hash.new - - job_options[:path] = "#{$options[:path_base]}/__reconstruction-#{$options[:resize_to]}" - - Dir::mkdir(job_options[:path]) unless File.directory?(job_options[:path]) - - ### MATCHING - - job_options[:path_matching] = "#{$options[:path_base]}/__pre_matching-#{$options[:match_size]}" - job_options[:path_matchinglarge] = "#{job_options[:path]}" - job_options[:path_matching_pairs] = "#{$options[:path_base]}/__pre_matching-#{$options[:match_size]}/_pairs" - job_options[:path_matchinglarge_pairs] = "#{job_options[:path_matchinglarge]}/_pairs" - - Dir::mkdir(job_options[:path_matching]) unless File.directory?(job_options[:path_matching]) - Dir::mkdir(job_options[:path_matchinglarge]) unless File.directory?(job_options[:path_matchinglarge]) - Dir::mkdir(job_options[:path_matching_pairs]) unless File.directory?(job_options[:path_matching_pairs]) - Dir::mkdir(job_options[:path_matchinglarge_pairs]) unless File.directory?(job_options[:path_matchinglarge_pairs]) - - - - puts "\n**\n** GETTING KEYPOINTS SMALL VERSION\n** #{Time.now}\n\n" - - file_objects_todo = file_objects.select { |file_object| get_keypoints_for_file?(file_object, job_options[:path_matching], $options[:match_size]) } - - $prog_start = (file_objects.length - file_objects_todo.length) - $prog_total = file_objects.length - Parallel.each_with_index(file_objects_todo) { |file_object, index| get_keypoints_for_file(file_object, job_options[:path_matching], $options[:match_size], index) } - - puts (file_objects_todo.empty? ? "nothing to do" : "done") - - - - puts "\n**\n** MATCHING SMALL VERSION\n** #{Time.now}\n\n" - - match_indeces = Array.new - - (0...file_objects.length).inject(match_indeces) { |memo, i| - (i+1...file_objects.length).inject(memo) { |memo, j| - memo.push([i, j]) - } - } - - match_indeces_todo = match_indeces.select { |i,j| match?(i, j, file_objects[i], file_objects[j], job_options[:path_matching_pairs]) } - - $prog_start = (match_indeces.length - match_indeces_todo.length) - $prog_total = match_indeces.length - Parallel.each_with_index(match_indeces_todo) { |(i, j), index| - match(i, j, file_objects[i], file_objects[j], job_options[:path_matching_pairs], index) - } - - puts (match_indeces_todo.empty? ? "nothing to do" : "done") - - - - puts "\n**\n** GETTING KEYPOINTS BIG VERSION\n** #{Time.now}\n\n" - - file_objects_todo = file_objects.select { |file_object| get_keypoints_for_file?(file_object, job_options[:path_matchinglarge], $options[:resize_to]) } - - $prog_start = (file_objects.length - file_objects_todo.length) - $prog_total = file_objects.length - Parallel.each_with_index(file_objects_todo, :in_processes => 4) { |file_object, index| get_keypoints_for_file(file_object, job_options[:path_matchinglarge], $options[:resize_to], index) } - - puts (file_objects_todo.empty? ? "nothing to do" : "done") - - - - puts "\n**\n** MATCHING BIG VERSION\n** #{Time.now}\n\n" - - matches_files = `ls -1 #{job_options[:path_matching_pairs]} | egrep "\.txt"`.split("\n").map {|e| (e.sub("\.txt", "").split("-")).map{|n| n.to_i} } - matches_files = matches_files.select {|i,j| File.size?("#{job_options[:path_matching_pairs]}/#{i}-#{j}.txt") } - - matches_files_todo = matches_files.select { |i, j| match?(i, j, file_objects[i], file_objects[j], job_options[:path_matchinglarge_pairs]) } - - $prog_start = (matches_files.length - matches_files_todo.length) - $prog_total = matches_files.length - Parallel.each_with_index(matches_files_todo) { |(i, j), index| match(i, j, file_objects[i], file_objects[j], job_options[:path_matchinglarge_pairs], index) } - - puts (matches_files_todo.empty? ? "nothing to do" : "done") - - - - puts "\n**\n** RUNNING BUNDLER\n** #{Time.now}\n\n" - - job_options[:path_bundle] = "#{job_options[:path]}/bundle" - job_options[:path_pmvs] = "#{job_options[:path]}/pmvs" - job_options[:path_pmvs_txt] = "#{job_options[:path]}/pmvs/txt" - job_options[:path_pmvs_visualize] = "#{job_options[:path]}/pmvs/visualize" - job_options[:path_pmvs_models] = "#{job_options[:path]}/pmvs/models" - - job_options[:file_bundler_filelist] = "#{job_options[:path]}/_bundler_list.txt" - job_options[:file_bundler_options] = "#{job_options[:path]}/_bundler_options.txt" - job_options[:file_bundler_matches_init] = "#{job_options[:path]}/_bundler_matches.init.txt" - - Dir::mkdir(job_options[:path_bundle]) unless File.directory?(job_options[:path_bundle]) - Dir::mkdir(job_options[:path_pmvs]) unless File.directory?(job_options[:path_pmvs]) - Dir::mkdir(job_options[:path_pmvs_txt]) unless File.directory?(job_options[:path_pmvs_txt]) - Dir::mkdir(job_options[:path_pmvs_visualize]) unless File.directory?(job_options[:path_pmvs_visualize]) - Dir::mkdir(job_options[:path_pmvs_models]) unless File.directory?(job_options[:path_pmvs_models]) - -# files_for_bundler = `ls -1 #{job_options[:path_matchinglarge]} | egrep "\.[jJ]{1}[pP]{1}[eE]{0,1}[gG]{1}$"`.split("\n").sort -# files_for_bundler = file_objects.select { |file_object| files_for_bundler.include?(file_object[:file_name]) } - - File.open(job_options[:file_bundler_filelist], 'w') do |file| - file_objects.each { |file_object| - file.puts "./%s 0 %0.5f" % ["#{file_object[:file_basename]}.jpg", file_object[:focal_px]] - } - end - - File.open(job_options[:file_bundler_options], 'w') do |file| - file.puts "--match_table _bundler_matches.init.txt" - file.puts "--output bundle.out" - file.puts "--output_all bundle_" - file.puts "--output_dir bundle" - file.puts "--variable_focal_length" - file.puts "--use_focal_estimate" - file.puts "--constrain_focal" - file.puts "--constrain_focal_weight 0.01" - file.puts "--estimate_distortion" - file.puts "--run_bundle" - end - - File.open(job_options[:file_bundler_matches_init], 'w') do |file| - matches_files = `ls -1 #{job_options[:path_matchinglarge_pairs]} | egrep "\.txt"`.split("\n").map {|e| (e.sub("\.txt", "").split("-")).map{|n| n.to_i} } - matches_files = matches_files.sort - - matches_files.each {|i,j| - if File.size?("#{job_options[:path_matchinglarge_pairs]}/#{i}-#{j}.txt") - file.puts "#{i} #{j}" - file.puts File.read("#{job_options[:path_matchinglarge_pairs]}/#{i}-#{j}.txt") - end - } - end - - Dir.chdir("#{job_options[:path]}") - - system("'#{$options[:path_bin]}/bundler' '#{job_options[:file_bundler_filelist]}' --options_file '#{job_options[:file_bundler_options]}'") - - puts "\n**\n** RUNNING BUNDLE2PMVS\n** #{Time.now}\n\n" - system("'#{$options[:path_bin]}/Bundle2PMVS' '#{job_options[:file_bundler_filelist]}' 'bundle/bundle.out'") - - puts "\n**\n** RUNNING RADIALUNDISTORT\n** #{Time.now}\n\n" - system("'#{$options[:path_bin]}/RadialUndistort' '#{job_options[:file_bundler_filelist]}' 'bundle/bundle.out' 'pmvs'") - - i = 0 - - file_objects.each { |file_object| - if File.exist?("#{job_options[:path]}/pmvs/#{file_object[:file_basename]}.rd.jpg") - nr = "%08d" % [i] - - puts "#{job_options[:path]}/pmvs/#{file_object[:file_basename]}.rd.jpg", "#{job_options[:path]}/pmvs/visualize/#{nr}.jpg" - - FileUtils.mv("#{job_options[:path]}/pmvs/#{file_object[:file_basename]}.rd.jpg", "#{job_options[:path]}/pmvs/visualize/#{nr}.jpg") - FileUtils.mv("#{job_options[:path]}/pmvs/#{nr}.txt", "#{job_options[:path]}/pmvs/txt/#{nr}.txt") - - i += 1 - end - } - - puts "\n**\n** RUNNING CMVS\n** #{Time.now}\n\n" - system("'#{$options[:path_bin]}/cmvs' pmvs/ #{$options[:cmvs_max_images]} #{Parallel.processor_count}"); - - puts "\n**\n** GENOPTION CMVS\n** #{Time.now}\n\n" - system("'#{$options[:path_bin]}/genOption' pmvs/ #{$options[:pmvs_level]} #{$options[:pmvs_csize]} #{$options[:pmvs_threshold]} #{$options[:pmvs_wsize]} #{$options[:pmvs_min_image_num]} #{Parallel.processor_count}"); - - puts "\n**\n** GENOPTION PMVS\n** #{Time.now}\n\n" - system("'#{$options[:path_bin]}/pmvs2' pmvs/ option-0000"); - - end -end diff --git a/run.sh b/run.sh new file mode 100755 index 000000000..3b511725d --- /dev/null +++ b/run.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +export PYTHONPATH=$RUNPATH/SuperBuild/install/lib/python2.7/dist-packages:$RUNPATH/SuperBuild/src/opensfm:$PYTHONPATH +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$RUNPATH/SuperBuild/install/lib +python $RUNPATH/run.py "$@" + diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/cmvs.py b/scripts/cmvs.py new file mode 100644 index 000000000..48e332bae --- /dev/null +++ b/scripts/cmvs.py @@ -0,0 +1,66 @@ +import ecto + +from opendm import io +from opendm import log +from opendm import system +from opendm import context + + +class ODMCmvsCell(ecto.Cell): + + def declare_params(self, params): + params.declare("max_images", 'The maximum number of images ' + 'per cluster', 500) + params.declare("cores", 'The maximum number of cores to use ' + 'in dense reconstruction.', context.num_cores) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "Struct with paths", []) + inputs.declare("reconstruction", "list of ODMReconstructions", []) + outputs.declare("reconstruction", "list of ODMReconstructions", []) + + def process(self, inputs, outputs): + + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running ODM CMVS Cell') + + # get inputs + args = self.inputs.args + tree = self.inputs.tree + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'cmvs') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'cmvs' in args.rerun_from) + + if not io.file_exists(tree.pmvs_bundle) or rerun_cell: + log.ODM_DEBUG('Writing CMVS vis in: %s' % tree.pmvs_bundle) + + # copy bundle file to pmvs dir + from shutil import copyfile + copyfile(tree.opensfm_bundle, + tree.pmvs_bundle) + + kwargs = { + 'bin': context.cmvs_path, + 'prefix': self.inputs.tree.pmvs_rec_path, + 'max_images': self.params.max_images, + 'cores': self.params.cores + } + + # run cmvs + system.run('{bin} {prefix}/ {max_images} {cores}'.format(**kwargs)) + else: + log.ODM_WARNING('Found a valid CMVS file in: %s' % + tree.pmvs_bundle) + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'CMVS') + + log.ODM_INFO('Running ODM CMVS Cell - Finished') + return ecto.OK if args.end_with != 'cmvs' else ecto.QUIT diff --git a/scripts/dataset.py b/scripts/dataset.py new file mode 100644 index 000000000..54285ae14 --- /dev/null +++ b/scripts/dataset.py @@ -0,0 +1,83 @@ +import os +import ecto + +from functools import partial +from multiprocessing import Pool +from opendm import context +from opendm import io +from opendm import types +from opendm import log +from opendm import system +from shutil import copyfile + + +def make_odm_photo(force_focal, force_ccd, path_file): + return types.ODM_Photo(path_file, + force_focal, + force_ccd) + + +class ODMLoadDatasetCell(ecto.Cell): + + def declare_params(self, params): + params.declare("force_focal", 'Override the focal length information for the ' + 'images', None) + params.declare("force_ccd", 'Override the ccd width information for the ' + 'images', None) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + outputs.declare("photos", "list of ODMPhotos", []) + + def process(self, inputs, outputs): + # check if the extension is supported + def supported_extension(file_name): + (pathfn, ext) = os.path.splitext(file_name) + return ext.lower() in context.supported_extensions + + # Get supported images from dir + def get_images(in_dir): + # filter images for its extension type + log.ODM_DEBUG(in_dir) + return [f for f in io.get_files_list(in_dir) if supported_extension(f)] + + log.ODM_INFO('Running ODM Load Dataset Cell') + + # get inputs + tree = self.inputs.tree + + # get images directory + input_dir = tree.input_images + images_dir = tree.dataset_raw + + if not io.dir_exists(images_dir): + log.ODM_INFO("Project directory %s doesn't exist. Creating it now. " % images_dir) + system.mkdir_p(images_dir) + copied = [copyfile(io.join_paths(input_dir, f), io.join_paths(images_dir, f)) for f in get_images(input_dir)] + + log.ODM_DEBUG('Loading dataset from: %s' % images_dir) + + files = get_images(images_dir) + + if files: + # create ODMPhoto list + path_files = [io.join_paths(images_dir, f) for f in files] + # photos = Pool().map( + # partial(make_odm_photo, self.params.force_focal, self.params.force_ccd), + # path_files + # ) + + photos = [] + for files in path_files: + photos += [make_odm_photo(self.params.force_focal, self.params.force_ccd, files)] + + log.ODM_INFO('Found %s usable images' % len(photos)) + else: + log.ODM_ERROR('Not enough supported images in %s' % images_dir) + return ecto.QUIT + + # append photos to cell output + outputs.photos = photos + + log.ODM_INFO('Running ODM Load Dataset Cell - Finished') + return ecto.OK diff --git a/scripts/example_ecto_python.py b/scripts/example_ecto_python.py new file mode 100644 index 000000000..9d4826759 --- /dev/null +++ b/scripts/example_ecto_python.py @@ -0,0 +1,35 @@ +import ecto +import numpy as np +class ClusterDetector(ecto.Cell): + def declare_params(self, params): + params.declare("n", "Max number of clusters.", 10) + + def declare_io(self, params, inputs, outputs): + outputs.declare("clusters", "Clusters output. list of tuples", []) + + def process(self, inputs, outputs): + clusters = [] + for i in range(int(np.random.uniform(0, self.params.n))): + clusters.append( (i, 'c%d'%i) ) + outputs.clusters = clusters + +class ClusterPrinter(ecto.Cell): + def declare_io(self, params, inputs, outputs): + inputs.declare("clusters", "Clusters input") + + def process(self, inputs, outputs): + print "Clusters: ", + for c in inputs.clusters: + print c, + print "\n" + +def app(): + cd = ClusterDetector(n=20) + cp = ClusterPrinter() + plasm = ecto.Plasm() + plasm.connect(cd['clusters'] >> cp['clusters']) + sched = ecto.Scheduler(plasm) + sched.execute(niter=3) + +if __name__ == "__main__": + app() \ No newline at end of file diff --git a/scripts/mvstex.py b/scripts/mvstex.py new file mode 100644 index 000000000..a5a59733a --- /dev/null +++ b/scripts/mvstex.py @@ -0,0 +1,133 @@ +import ecto, os + +from opendm import log +from opendm import io +from opendm import system +from opendm import context + +import pmvs2nvmcams + +class ODMMvsTexCell(ecto.Cell): + def declare_params(self, params): + params.declare("data_term", 'Data term: [area, gmi] default: gmi', "gmi") + params.declare("outlier_rem_type", 'Type of photometric outlier removal method: [none, gauss_damping, gauss_clamping]. default: none', "none") + params.declare("skip_vis_test", 'Skip geometric visibility test based on ray intersection.', False) + params.declare("skip_glob_seam_leveling", 'Skip global seam leveling.', False) + params.declare("skip_loc_seam_leveling", 'Skip local seam leveling (Poisson editing).', False) + params.declare("skip_hole_fill", 'Skip hole filling.', False) + params.declare("keep_unseen_faces", 'Keep unseen faces.', False) + params.declare("tone_mapping", 'Type of tone mapping: [none, gamma]. Default: gamma', "gamma") + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + inputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", []) + outputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", []) + + + + def process(self, inputs, outputs): + + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running MVS Texturing Cell') + + # get inputs + args = self.inputs.args + tree = self.inputs.tree + + # define paths and create working directories + system.mkdir_p(tree.odm_texturing) + if args.use_25dmesh: system.mkdir_p(tree.odm_25dtexturing) + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'mvs_texturing') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'mvs_texturing' in args.rerun_from) + + runs = [{ + 'out_dir': tree.odm_texturing, + 'model': tree.odm_mesh + }] + + if args.use_25dmesh: + runs += [{ + 'out_dir': tree.odm_25dtexturing, + 'model': tree.odm_25dmesh + }] + + for r in runs: + odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj) + + if not io.file_exists(odm_textured_model_obj) or rerun_cell: + log.ODM_DEBUG('Writing MVS Textured file in: %s' + % odm_textured_model_obj) + + + # Format arguments to fit Mvs-Texturing app + skipGeometricVisibilityTest = "" + skipGlobalSeamLeveling = "" + skipLocalSeamLeveling = "" + skipHoleFilling = "" + keepUnseenFaces = "" + + if (self.params.skip_vis_test): + skipGeometricVisibilityTest = "--skip_geometric_visibility_test" + if (self.params.skip_glob_seam_leveling): + skipGlobalSeamLeveling = "--skip_global_seam_leveling" + if (self.params.skip_loc_seam_leveling): + skipLocalSeamLeveling = "--skip_local_seam_leveling" + if (self.params.skip_hole_fill): + skipHoleFilling = "--skip_hole_filling" + if (self.params.keep_unseen_faces): + keepUnseenFaces = "--keep_unseen_faces" + + # mvstex definitions + kwargs = { + 'bin': context.mvstex_path, + 'out_dir': io.join_paths(r['out_dir'], "odm_textured_model"), + 'pmvs_folder': tree.pmvs_rec_path, + 'nvm_file': io.join_paths(tree.pmvs_rec_path, "nvmCams.nvm"), + 'model': r['model'], + 'dataTerm': self.params.data_term, + 'outlierRemovalType': self.params.outlier_rem_type, + 'skipGeometricVisibilityTest': skipGeometricVisibilityTest, + 'skipGlobalSeamLeveling': skipGlobalSeamLeveling, + 'skipLocalSeamLeveling': skipLocalSeamLeveling, + 'skipHoleFilling': skipHoleFilling, + 'keepUnseenFaces': keepUnseenFaces, + 'toneMapping': self.params.tone_mapping + } + + if not args.use_pmvs: + kwargs['nvm_file'] = io.join_paths(tree.opensfm, + "reconstruction.nvm") + else: + log.ODM_DEBUG('Generating .nvm file from pmvs output: %s' + % '{nvm_file}'.format(**kwargs)) + + # Create .nvm camera file. + pmvs2nvmcams.run('{pmvs_folder}'.format(**kwargs), + '{nvm_file}'.format(**kwargs)) + + # run texturing binary + system.run('{bin} {nvm_file} {model} {out_dir} ' + '-d {dataTerm} -o {outlierRemovalType} ' + '-t {toneMapping} ' + '{skipGeometricVisibilityTest} ' + '{skipGlobalSeamLeveling} ' + '{skipLocalSeamLeveling} ' + '{skipHoleFilling} ' + '{keepUnseenFaces}'.format(**kwargs)) + else: + log.ODM_WARNING('Found a valid ODM Texture file in: %s' + % odm_textured_model_obj) + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'Texturing') + + log.ODM_INFO('Running ODM Texturing Cell - Finished') + return ecto.OK if args.end_with != 'mvs_texturing' else ecto.QUIT diff --git a/scripts/odm_app.py b/scripts/odm_app.py new file mode 100644 index 000000000..1bc5aa24c --- /dev/null +++ b/scripts/odm_app.py @@ -0,0 +1,189 @@ +import ecto +import os + +from opendm import context +from opendm import types +from opendm import io +from opendm import system + +from dataset import ODMLoadDatasetCell +from opensfm import ODMOpenSfMCell +from odm_slam import ODMSlamCell +from pmvs import ODMPmvsCell +from cmvs import ODMCmvsCell +from odm_meshing import ODMeshingCell +from mvstex import ODMMvsTexCell +from odm_georeferencing import ODMGeoreferencingCell +from odm_orthophoto import ODMOrthoPhotoCell +from odm_dem import ODMDEMCell + + +class ODMApp(ecto.BlackBox): + """ODMApp - a class for ODM Activities + """ + + def __init__(self, *args, **kwargs): + ecto.BlackBox.__init__(self, *args, **kwargs) + self.tree = None + + @staticmethod + def declare_direct_params(p): + p.declare("args", "The application arguments.", {}) + + @staticmethod + def declare_cells(p): + """ + Implement the virtual function from the base class + Only cells from which something is forwarded have to be declared + """ + cells = {'args': ecto.Constant(value=p.args), + 'dataset': ODMLoadDatasetCell(force_focal=p.args.force_focal, + force_ccd=p.args.force_ccd), + 'opensfm': ODMOpenSfMCell(use_exif_size=False, + feature_process_size=p.args.resize_to, + feature_min_frames=p.args.min_num_features, + processes=p.args.opensfm_processes, + matching_gps_neighbors=p.args.matcher_neighbors, + matching_gps_distance=p.args.matcher_distance, + fixed_camera_params=p.args.use_fixed_camera_params, + hybrid_bundle_adjustment=p.args.use_hybrid_bundle_adjustment), + 'slam': ODMSlamCell(), + 'cmvs': ODMCmvsCell(max_images=p.args.cmvs_maxImages), + 'pmvs': ODMPmvsCell(level=p.args.pmvs_level, + csize=p.args.pmvs_csize, + thresh=p.args.pmvs_threshold, + wsize=p.args.pmvs_wsize, + min_imgs=p.args.pmvs_min_images, + cores=p.args.pmvs_num_cores), + 'meshing': ODMeshingCell(max_vertex=p.args.mesh_size, + oct_tree=p.args.mesh_octree_depth, + samples=p.args.mesh_samples, + solver=p.args.mesh_solver_divide, + remove_outliers=p.args.mesh_remove_outliers, + wlop_iterations=p.args.mesh_wlop_iterations, + verbose=p.args.verbose), + 'texturing': ODMMvsTexCell(data_term=p.args.texturing_data_term, + outlier_rem_type=p.args.texturing_outlier_removal_type, + skip_vis_test=p.args.texturing_skip_visibility_test, + skip_glob_seam_leveling=p.args.texturing_skip_global_seam_leveling, + skip_loc_seam_leveling=p.args.texturing_skip_local_seam_leveling, + skip_hole_fill=p.args.texturing_skip_hole_filling, + keep_unseen_faces=p.args.texturing_keep_unseen_faces, + tone_mapping=p.args.texturing_tone_mapping), + 'georeferencing': ODMGeoreferencingCell(gcp_file=p.args.gcp, + use_exif=p.args.use_exif, + verbose=p.args.verbose), + 'dem': ODMDEMCell(verbose=p.args.verbose), + 'orthophoto': ODMOrthoPhotoCell(resolution=p.args.orthophoto_resolution, + t_srs=p.args.orthophoto_target_srs, + no_tiled=p.args.orthophoto_no_tiled, + compress=p.args.orthophoto_compression, + bigtiff=p.args.orthophoto_bigtiff, + build_overviews=p.args.build_overviews, + verbose=p.args.verbose) + } + + return cells + + def configure(self, p, _i, _o): + tree = types.ODM_Tree(p.args.project_path, p.args.images) + self.tree = ecto.Constant(value=tree) + + # TODO(dakota) put this somewhere better maybe + if p.args.time and io.file_exists(tree.benchmarking): + # Delete the previously made file + os.remove(tree.benchmarking) + with open(tree.benchmarking, 'a') as b: + b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores)) + + def connections(self, p): + if p.args.video: + return self.slam_connections(p) + + # define initial task + # TODO: What is this? + # initial_task = p.args['start_with'] + # initial_task_id = config.processopts.index(initial_task) + + # define the connections like you would for the plasm + + # load the dataset + connections = [self.tree[:] >> self.dataset['tree']] + + # run opensfm with images from load dataset + connections += [self.tree[:] >> self.opensfm['tree'], + self.args[:] >> self.opensfm['args'], + self.dataset['photos'] >> self.opensfm['photos']] + + if not p.args.use_pmvs: + # create odm mesh from opensfm point cloud + connections += [self.tree[:] >> self.meshing['tree'], + self.args[:] >> self.meshing['args'], + self.opensfm['reconstruction'] >> self.meshing['reconstruction']] + else: + # run cmvs + connections += [self.tree[:] >> self.cmvs['tree'], + self.args[:] >> self.cmvs['args'], + self.opensfm['reconstruction'] >> self.cmvs['reconstruction']] + + # run pmvs + connections += [self.tree[:] >> self.pmvs['tree'], + self.args[:] >> self.pmvs['args'], + self.cmvs['reconstruction'] >> self.pmvs['reconstruction']] + + # create odm mesh from pmvs point cloud + connections += [self.tree[:] >> self.meshing['tree'], + self.args[:] >> self.meshing['args'], + self.pmvs['reconstruction'] >> self.meshing['reconstruction']] + + # create odm texture + connections += [self.tree[:] >> self.texturing['tree'], + self.args[:] >> self.texturing['args'], + self.meshing['reconstruction'] >> self.texturing['reconstruction']] + + # create odm georeference + connections += [self.tree[:] >> self.georeferencing['tree'], + self.args[:] >> self.georeferencing['args'], + self.dataset['photos'] >> self.georeferencing['photos'], + self.texturing['reconstruction'] >> self.georeferencing['reconstruction']] + + # create odm dem + connections += [self.tree[:] >> self.dem['tree'], + self.args[:] >> self.dem['args'], + self.georeferencing['reconstruction'] >> self.dem['reconstruction']] + + # create odm orthophoto + connections += [self.tree[:] >> self.orthophoto['tree'], + self.args[:] >> self.orthophoto['args'], + self.georeferencing['reconstruction'] >> self.orthophoto['reconstruction']] + return connections + + def slam_connections(self, p): + """Get connections used when running from video instead of images.""" + connections = [] + + # run slam cell + connections += [self.tree[:] >> self.slam['tree'], + self.args[:] >> self.slam['args']] + + # run cmvs + connections += [self.tree[:] >> self.cmvs['tree'], + self.args[:] >> self.cmvs['args'], + self.slam['reconstruction'] >> self.cmvs['reconstruction']] + + # run pmvs + connections += [self.tree[:] >> self.pmvs['tree'], + self.args[:] >> self.pmvs['args'], + self.cmvs['reconstruction'] >> self.pmvs['reconstruction']] + + # create odm mesh + connections += [self.tree[:] >> self.meshing['tree'], + self.args[:] >> self.meshing['args'], + self.pmvs['reconstruction'] >> self.meshing['reconstruction']] + + # create odm texture + connections += [self.tree[:] >> self.texturing['tree'], + self.args[:] >> self.texturing['args'], + self.meshing['reconstruction'] >> self.texturing['reconstruction']] + + return connections diff --git a/scripts/odm_dem.py b/scripts/odm_dem.py new file mode 100644 index 000000000..57d8870f0 --- /dev/null +++ b/scripts/odm_dem.py @@ -0,0 +1,184 @@ +import ecto, os, json +from shutil import copyfile + +from opendm import io +from opendm import log +from opendm import system +from opendm import context +from opendm import types + + +class ODMDEMCell(ecto.Cell): + def declare_params(self, params): + params.declare("verbose", 'print additional messages to console', False) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + inputs.declare("reconstruction", "list of ODMReconstructions", []) + + def process(self, inputs, outputs): + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running ODM DEM Cell') + + # get inputs + args = self.inputs.args + tree = self.inputs.tree + las_model_found = io.file_exists(tree.odm_georeferencing_model_las) + env_paths = [context.superbuild_bin_path] + + # Just to make sure + l2d_module_installed = True + try: + system.run('l2d_classify --help > /dev/null', env_paths) + except: + log.ODM_WARNING('lidar2dems is not installed properly') + l2d_module_installed = False + + log.ODM_INFO('Create DSM: ' + str(args.dsm)) + log.ODM_INFO('Create DTM: ' + str(args.dtm)) + log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_las, str(las_model_found))) + + # Do we need to process anything here? + if (args.dsm or args.dtm) and las_model_found and l2d_module_installed: + + # define paths and create working directories + odm_dem_root = tree.path('odm_dem') + system.mkdir_p(odm_dem_root) + + dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif') + dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif') + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'odm_dem') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'odm_dem' in args.rerun_from) + + if (args.dtm and not io.file_exists(dtm_output_filename)) or \ + (args.dsm and not io.file_exists(dsm_output_filename)) or \ + rerun_cell: + + # Extract boundaries and srs of point cloud + summary_file_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.summary.json') + boundary_file_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.boundary.json') + + system.run('pdal info --summary {0} > {1}'.format(tree.odm_georeferencing_model_las, summary_file_path), env_paths) + system.run('pdal info --boundary {0} > {1}'.format(tree.odm_georeferencing_model_las, boundary_file_path), env_paths) + + pc_proj4 = "" + pc_geojson_bounds_feature = None + + with open(summary_file_path, 'r') as f: + json_f = json.loads(f.read()) + pc_proj4 = json_f['summary']['srs']['proj4'] + + with open(boundary_file_path, 'r') as f: + json_f = json.loads(f.read()) + pc_geojson_boundary_feature = json_f['boundary']['boundary_json'] + + # Write bounds to GeoJSON + bounds_geojson_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.bounds.geojson') + with open(bounds_geojson_path, "w") as f: + f.write(json.dumps({ + "type": "FeatureCollection", + "features": [{ + "type": "Feature", + "geometry": pc_geojson_boundary_feature + }] + })) + + bounds_shapefile_path = os.path.join(odm_dem_root, 'bounds.shp') + + # Convert bounds to Shapefile + kwargs = { + 'input': bounds_geojson_path, + 'output': bounds_shapefile_path, + 'proj4': pc_proj4 + } + system.run('ogr2ogr -overwrite -a_srs "{proj4}" {output} {input}'.format(**kwargs)) + + # Process with lidar2dems + terrain_params_map = { + 'flatnonforest': (1, 3), + 'flatforest': (1, 2), + 'complexnonforest': (5, 2), + 'complexforest': (10, 2) + } + terrain_params = terrain_params_map[args.dem_terrain_type.lower()] + + kwargs = { + 'verbose': '-v' if self.params.verbose else '', + 'slope': terrain_params[0], + 'cellsize': terrain_params[1], + 'outdir': odm_dem_root, + 'site': bounds_shapefile_path + } + + l2d_params = '--slope {slope} --cellsize {cellsize} ' \ + '{verbose} ' \ + '-o -s {site} ' \ + '--outdir {outdir}'.format(**kwargs) + + approximate = '--approximate' if args.dem_approximate else '' + + # Classify only if we need a DTM + run_classification = args.dtm + + if run_classification: + system.run('l2d_classify {0} --decimation {1} ' + '{2} --initialDistance {3} {4}'.format( + l2d_params, args.dem_decimation, approximate, + args.dem_initial_distance, tree.odm_georeferencing), env_paths) + else: + log.ODM_INFO("Will skip classification, only DSM is needed") + copyfile(tree.odm_georeferencing_model_las, os.path.join(odm_dem_root, 'bounds-0_l2d_s{slope}c{cellsize}.las'.format(**kwargs))) + + products = [] + if args.dsm: products.append('dsm') + if args.dtm: products.append('dtm') + + radius_steps = [args.dem_resolution] + for _ in range(args.dem_gapfill_steps - 1): + radius_steps.append(radius_steps[-1] * 3) # 3 is arbitrary, maybe there's a better value? + + for product in products: + demargs = { + 'product': product, + 'indir': odm_dem_root, + 'l2d_params': l2d_params, + 'maxsd': args.dem_maxsd, + 'maxangle': args.dem_maxangle, + 'resolution': args.dem_resolution, + 'radius_steps': ' '.join(map(str, radius_steps)), + 'gapfill': '--gapfill' if args.dem_gapfill_steps > 0 else '', + + # If we didn't run a classification, we should pass the decimate parameter here + 'decimation': '--decimation {0}'.format(args.dem_decimation) if not run_classification else '' + } + + system.run('l2d_dems {product} {indir} {l2d_params} ' + '--maxsd {maxsd} --maxangle {maxangle} ' + '--resolution {resolution} --radius {radius_steps} ' + '{decimation} ' + '{gapfill} '.format(**demargs), env_paths) + + # Rename final output + if product == 'dsm': + os.rename(os.path.join(odm_dem_root, 'bounds-0_dsm.idw.tif'), dsm_output_filename) + elif product == 'dtm': + os.rename(os.path.join(odm_dem_root, 'bounds-0_dtm.idw.tif'), dtm_output_filename) + + else: + log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root) + else: + log.ODM_WARNING('DEM will not be generated') + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'Dem') + + log.ODM_INFO('Running ODM DEM Cell - Finished') + return ecto.OK if args.end_with != 'odm_dem' else ecto.QUIT diff --git a/scripts/odm_georeferencing.py b/scripts/odm_georeferencing.py new file mode 100644 index 000000000..0ca6e1671 --- /dev/null +++ b/scripts/odm_georeferencing.py @@ -0,0 +1,197 @@ +import ecto +import csv +import os + +from opendm import io +from opendm import log +from opendm import types +from opendm import system +from opendm import context + + +class ODMGeoreferencingCell(ecto.Cell): + def declare_params(self, params): + params.declare("gcp_file", 'path to the file containing the ground control ' + 'points used for georeferencing.The file needs to ' + 'be on the following line format: \neasting ' + 'northing height pixelrow pixelcol imagename', 'gcp_list.txt') + params.declare("use_exif", 'use exif', False) + params.declare("verbose", 'print additional messages to console', False) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + inputs.declare("photos", "list of ODMPhoto's", []) + inputs.declare("reconstruction", "list of ODMReconstructions", []) + outputs.declare("reconstruction", "list of ODMReconstructions", []) + + def process(self, inputs, outputs): + + # find a file in the root directory + def find(file, dir): + for root, dirs, files in os.walk(dir): + return '/'.join((root, file)) if file in files else None + + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running ODM Georeferencing Cell') + + # get inputs + args = self.inputs.args + tree = self.inputs.tree + gcpfile = io.join_paths(tree.root_path, self.params.gcp_file) \ + if self.params.gcp_file else find('gcp_list.txt', tree.root_path) + geocreated = True + verbose = '-verbose' if self.params.verbose else '' + + # define paths and create working directories + system.mkdir_p(tree.odm_georeferencing) + if args.use_25dmesh: system.mkdir_p(tree.odm_25dgeoreferencing) + + # in case a gcp file it's not provided, let's try to generate it using + # images metadata. Internally calls jhead. + log.ODM_DEBUG(self.params.gcp_file) + if not self.params.gcp_file: # and \ + # not io.file_exists(tree.odm_georeferencing_coords): + + log.ODM_WARNING('No coordinates file. ' + 'Generating coordinates file: %s' + % tree.odm_georeferencing_coords) + + # odm_georeference definitions + kwargs = { + 'bin': context.odm_modules_path, + 'imgs': tree.dataset_raw, + 'imgs_list': tree.opensfm_bundle_list, + 'coords': tree.odm_georeferencing_coords, + 'log': tree.odm_georeferencing_utm_log, + 'verbose': verbose + } + + # run UTM extraction binary + extract_utm = system.run_and_return('{bin}/odm_extract_utm -imagesPath {imgs}/ ' + '-imageListFile {imgs_list} -outputCoordFile {coords} {verbose} ' + '-logFile {log}'.format(**kwargs)) + + if extract_utm != '': + log.ODM_WARNING('Could not generate coordinates file. ' + 'Ignore if there is a GCP file. Error: %s' + % extract_utm) + + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'odm_georeferencing') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'odm_georeferencing' in args.rerun_from) + + runs = [{ + 'georeferencing_dir': tree.odm_georeferencing, + 'texturing_dir': tree.odm_texturing, + 'model': os.path.join(tree.odm_texturing, tree.odm_textured_model_obj) + }] + if args.use_25dmesh: + runs += [{ + 'georeferencing_dir': tree.odm_25dgeoreferencing, + 'texturing_dir': tree.odm_25dtexturing, + 'model': os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj) + }] + + for r in runs: + odm_georeferencing_model_obj_geo = os.path.join(r['texturing_dir'], tree.odm_georeferencing_model_obj_geo) + odm_georeferencing_model_ply_geo = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_model_ply_geo) + odm_georeferencing_log = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_log) + odm_georeferencing_transform_file = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_transform_file) + + if not io.file_exists(odm_georeferencing_model_obj_geo) or \ + not io.file_exists(odm_georeferencing_model_ply_geo) or rerun_cell: + + # odm_georeference definitions + kwargs = { + 'bin': context.odm_modules_path, + 'bundle': tree.opensfm_bundle, + 'imgs': tree.dataset_raw, + 'imgs_list': tree.opensfm_bundle_list, + 'model': r['model'], + 'log': odm_georeferencing_log, + 'transform_file': odm_georeferencing_transform_file, + 'coords': tree.odm_georeferencing_coords, + 'pc_geo': odm_georeferencing_model_ply_geo, + 'geo_sys': os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_model_txt_geo), + 'model_geo': odm_georeferencing_model_obj_geo, + 'gcp': gcpfile, + 'verbose': verbose + + } + if not args.use_pmvs: + kwargs['pc'] = tree.opensfm_model + else: + kwargs['pc'] = tree.pmvs_model + + # Check to see if the GCP file exists + + if not self.params.use_exif and (self.params.gcp_file or find('gcp_list.txt', tree.root_path)): + log.ODM_INFO('Found %s' % gcpfile) + try: + system.run('{bin}/odm_georef -bundleFile {bundle} -imagesPath {imgs} -imagesListPath {imgs_list} ' + '-inputFile {model} -outputFile {model_geo} ' + '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} ' + '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys} -gcpFile {gcp} ' + '-outputCoordFile {coords}'.format(**kwargs)) + except Exception: + log.ODM_EXCEPTION('Georeferencing failed. ') + return ecto.QUIT + elif io.file_exists(tree.odm_georeferencing_coords): + log.ODM_INFO('Running georeferencing with generated coords file.') + system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} ' + '-inputFile {model} -outputFile {model_geo} ' + '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} ' + '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys}'.format(**kwargs)) + else: + log.ODM_WARNING('Georeferencing failed. Make sure your ' + 'photos have geotags in the EXIF or you have ' + 'provided a GCP file. ') + geocreated = False # skip the rest of the georeferencing + + odm_georeferencing_model_ply_geo = os.path.join(tree.odm_georeferencing, tree.odm_georeferencing_model_ply_geo) + if geocreated: + # update images metadata + geo_ref = types.ODM_GeoRef() + geo_ref.parse_coordinate_system(tree.odm_georeferencing_coords) + + for idx, photo in enumerate(self.inputs.photos): + geo_ref.utm_to_latlon(tree.odm_georeferencing_latlon, photo, idx) + + # convert ply model to LAS reference system + geo_ref.convert_to_las(odm_georeferencing_model_ply_geo, + tree.odm_georeferencing_model_las, + tree.odm_georeferencing_las_json) + + # XYZ point cloud output + log.ODM_INFO("Creating geo-referenced CSV file (XYZ format)") + with open(tree.odm_georeferencing_xyz_file, "wb") as csvfile: + csvfile_writer = csv.writer(csvfile, delimiter=",") + reachedpoints = False + with open(odm_georeferencing_model_ply_geo) as f: + for lineNumber, line in enumerate(f): + if reachedpoints: + tokens = line.split(" ") + csv_line = [float(tokens[0])+geo_ref.utm_east_offset, + float(tokens[1])+geo_ref.utm_north_offset, + tokens[2]] + csvfile_writer.writerow(csv_line) + if line.startswith("end_header"): + reachedpoints = True + csvfile.close() + + else: + log.ODM_WARNING('Found a valid georeferenced model in: %s' + % odm_georeferencing_model_ply_geo) + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'Georeferencing') + + log.ODM_INFO('Running ODM Georeferencing Cell - Finished') + return ecto.OK if args.end_with != 'odm_georeferencing' else ecto.QUIT diff --git a/scripts/odm_meshing.py b/scripts/odm_meshing.py new file mode 100644 index 000000000..8a05552c6 --- /dev/null +++ b/scripts/odm_meshing.py @@ -0,0 +1,116 @@ +import ecto + +from opendm import log +from opendm import io +from opendm import system +from opendm import context + + +class ODMeshingCell(ecto.Cell): + def declare_params(self, params): + params.declare("max_vertex", 'The maximum vertex count of the output ' + 'mesh', 100000) + params.declare("oct_tree", 'Oct-tree depth used in the mesh reconstruction, ' + 'increase to get more vertices, recommended ' + 'values are 8-12', 9) + params.declare("samples", 'Number of points per octree node, recommended ' + 'value: 1.0', 1) + params.declare("solver", 'Oct-tree depth at which the Laplacian equation ' + 'is solved in the surface reconstruction step. ' + 'Increasing this value increases computation ' + 'times slightly but helps reduce memory usage.', 9) + + params.declare("remove_outliers", 'Percentage of outliers to remove from the point set. Set to 0 to disable. ' + 'Applies to 2.5D mesh only.', 2) + params.declare("wlop_iterations", 'Iterations of the Weighted Locally Optimal Projection (WLOP) simplification algorithm. ' + 'Higher values take longer but produce a smoother mesh. ' + 'Applies to 2.5D mesh only. ', 70) + + params.declare("verbose", 'print additional messages to console', False) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + inputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", []) + outputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", []) + + def process(self, inputs, outputs): + + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running ODM Meshing Cell') + + # get inputs + args = self.inputs.args + tree = self.inputs.tree + verbose = '-verbose' if self.params.verbose else '' + + # define paths and create working directories + system.mkdir_p(tree.odm_meshing) + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'odm_meshing') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'odm_meshing' in args.rerun_from) + + infile = tree.opensfm_model + if args.use_pmvs: + infile = tree.pmvs_model + + if not io.file_exists(tree.odm_mesh) or rerun_cell: + log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh) + + kwargs = { + 'bin': context.odm_modules_path, + 'outfile': tree.odm_mesh, + 'infile': infile, + 'log': tree.odm_meshing_log, + 'max_vertex': self.params.max_vertex, + 'oct_tree': self.params.oct_tree, + 'samples': self.params.samples, + 'solver': self.params.solver, + 'verbose': verbose + } + + # run meshing binary + system.run('{bin}/odm_meshing -inputFile {infile} ' + '-outputFile {outfile} -logFile {log} ' + '-maxVertexCount {max_vertex} -octreeDepth {oct_tree} {verbose} ' + '-samplesPerNode {samples} -solverDivide {solver}'.format(**kwargs)) + else: + log.ODM_WARNING('Found a valid ODM Mesh file in: %s' % + tree.odm_mesh) + + # Do we need to generate a 2.5D mesh also? + if args.use_25dmesh: + if not io.file_exists(tree.odm_25dmesh) or rerun_cell: + log.ODM_DEBUG('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh) + + kwargs = { + 'bin': context.odm_modules_path, + 'outfile': tree.odm_25dmesh, + 'infile': infile, + 'log': tree.odm_25dmeshing_log, + 'verbose': verbose, + 'max_vertex': self.params.max_vertex, + 'remove_outliers': self.params.remove_outliers, + 'wlop_iterations': self.params.wlop_iterations + } + + # run 2.5D meshing binary + system.run('{bin}/odm_25dmeshing -inputFile {infile} ' + '-outputFile {outfile} -logFile {log} ' + '-maxVertexCount {max_vertex} -outliersRemovalPercentage {remove_outliers} ' + '-wlopIterations {wlop_iterations} {verbose}'.format(**kwargs)) + else: + log.ODM_WARNING('Found a valid ODM 2.5D Mesh file in: %s' % + tree.odm_25dmesh) + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'Meshing') + + log.ODM_INFO('Running ODM Meshing Cell - Finished') + return ecto.OK if args.end_with != 'odm_meshing' else ecto.QUIT diff --git a/scripts/odm_orthophoto.py b/scripts/odm_orthophoto.py new file mode 100644 index 000000000..4174af428 --- /dev/null +++ b/scripts/odm_orthophoto.py @@ -0,0 +1,155 @@ +import ecto, os + +from opendm import io +from opendm import log +from opendm import system +from opendm import context +from opendm import types + + +class ODMOrthoPhotoCell(ecto.Cell): + def declare_params(self, params): + params.declare("resolution", 'Orthophoto ground resolution in pixels/meter', 20) + params.declare("t_srs", 'Target SRS', None) + params.declare("no_tiled", 'Do not tile tiff', False) + params.declare("compress", 'Compression type', 'DEFLATE') + params.declare("bigtiff", 'Make BigTIFF orthophoto', 'IF_SAFER') + params.declare("build_overviews", 'Build overviews', False) + params.declare("verbose", 'print additional messages to console', False) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + inputs.declare("reconstruction", "list of ODMReconstructions", []) + + def process(self, inputs, outputs): + + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running ODM Orthophoto Cell') + + # get inputs + args = self.inputs.args + tree = self.inputs.tree + verbose = '-verbose' if self.params.verbose else '' + + # define paths and create working directories + system.mkdir_p(tree.odm_orthophoto) + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'odm_orthophoto') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'odm_orthophoto' in args.rerun_from) + + if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell: + + # odm_orthophoto definitions + kwargs = { + 'bin': context.odm_modules_path, + 'log': tree.odm_orthophoto_log, + 'ortho': tree.odm_orthophoto_file, + 'corners': tree.odm_orthophoto_corners, + 'res': self.params.resolution, + 'verbose': verbose + } + + # Have geo coordinates? + if io.file_exists(tree.odm_georeferencing_coords): + if args.use_25dmesh: + kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_georeferencing_model_obj_geo) + else: + kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_georeferencing_model_obj_geo) + else: + if args.use_25dmesh: + kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj) + else: + kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj) + + # run odm_orthophoto + system.run('{bin}/odm_orthophoto -inputFile {model_geo} ' + '-logFile {log} -outputFile {ortho} -resolution {res} {verbose} ' + '-outputCornerFile {corners}'.format(**kwargs)) + + if not io.file_exists(tree.odm_georeferencing_coords): + log.ODM_WARNING('No coordinates file. A georeferenced raster ' + 'will not be created') + else: + # Create georeferenced GeoTiff + geotiffcreated = False + georef = types.ODM_GeoRef() + # creates the coord refs # TODO I don't want to have to do this twice- after odm_georef + georef.parse_coordinate_system(tree.odm_georeferencing_coords) + + if georef.epsg and georef.utm_east_offset and georef.utm_north_offset: + ulx = uly = lrx = lry = 0.0 + with open(tree.odm_orthophoto_corners) as f: + for lineNumber, line in enumerate(f): + if lineNumber == 0: + tokens = line.split(' ') + if len(tokens) == 4: + ulx = float(tokens[0]) + \ + float(georef.utm_east_offset) + lry = float(tokens[1]) + \ + float(georef.utm_north_offset) + lrx = float(tokens[2]) + \ + float(georef.utm_east_offset) + uly = float(tokens[3]) + \ + float(georef.utm_north_offset) + log.ODM_INFO('Creating GeoTIFF') + + kwargs = { + 'ulx': ulx, + 'uly': uly, + 'lrx': lrx, + 'lry': lry, + 'tiled': '' if self.params.no_tiled else '-co TILED=yes ', + 'compress': self.params.compress, + 'predictor': '-co PREDICTOR=2 ' if self.params.compress in + ['LZW', 'DEFLATE'] else '', + 'epsg': georef.epsg, + 't_srs': self.params.t_srs or "EPSG:{0}".format(georef.epsg), + 'bigtiff': self.params.bigtiff, + 'png': tree.odm_orthophoto_file, + 'tiff': tree.odm_orthophoto_tif, + 'log': tree.odm_orthophoto_tif_log + } + + system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} ' + '{tiled} ' + '-co BIGTIFF={bigtiff} ' + '-co COMPRESS={compress} ' + '{predictor} ' + '-co BLOCKXSIZE=512 ' + '-co BLOCKYSIZE=512 ' + '-co NUM_THREADS=ALL_CPUS ' + '-a_srs \"EPSG:{epsg}\" ' + '{png} {tiff} > {log}'.format(**kwargs)) + + if self.params.build_overviews: + log.ODM_DEBUG("Building Overviews") + kwargs = { + 'orthophoto': tree.odm_orthophoto_tif, + 'log': tree.odm_orthophoto_gdaladdo_log + } + # Run gdaladdo + system.run('gdaladdo -ro -r average ' + '--config BIGTIFF_OVERVIEW IF_SAFER ' + '--config COMPRESS_OVERVIEW JPEG ' + '{orthophoto} 2 4 8 16 > {log}'.format(**kwargs)) + + geotiffcreated = True + if not geotiffcreated: + log.ODM_WARNING('No geo-referenced orthophoto created due ' + 'to missing geo-referencing or corner coordinates.') + + else: + log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file) + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'Orthophoto') + + log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished') + return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT diff --git a/scripts/odm_slam.py b/scripts/odm_slam.py new file mode 100644 index 000000000..b905f4a56 --- /dev/null +++ b/scripts/odm_slam.py @@ -0,0 +1,111 @@ +"""Cell to run odm_slam.""" + +import os + +import ecto + +from opendm import log +from opendm import io +from opendm import system +from opendm import context + + +class ODMSlamCell(ecto.Cell): + """Run odm_slam on a video and export to opensfm format.""" + + def declare_params(self, params): + """Cell parameters.""" + pass + + def declare_io(self, params, inputs, outputs): + """Cell inputs and outputs.""" + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + outputs.declare("reconstruction", "list of ODMReconstructions", []) + + def process(self, inputs, outputs): + """Run the cell.""" + log.ODM_INFO('Running OMD Slam Cell') + + # get inputs + tree = self.inputs.tree + args = self.inputs.args + video = os.path.join(tree.root_path, args.video) + slam_config = os.path.join(tree.root_path, args.slam_config) + + if not video: + log.ODM_ERROR('No video provided') + return ecto.QUIT + + # create working directories + system.mkdir_p(tree.opensfm) + system.mkdir_p(tree.pmvs) + + vocabulary = os.path.join(context.orb_slam2_path, + 'Vocabulary/ORBvoc.txt') + orb_slam_cmd = os.path.join(context.odm_modules_path, 'odm_slam') + trajectory = os.path.join(tree.opensfm, 'KeyFrameTrajectory.txt') + map_points = os.path.join(tree.opensfm, 'MapPoints.txt') + + # check if we rerun cell or not + rerun_cell = args.rerun == 'slam' + + # check if slam was run before + if not io.file_exists(trajectory) or rerun_cell: + # run slam binary + system.run(' '.join([ + 'cd {} &&'.format(tree.opensfm), + orb_slam_cmd, + vocabulary, + slam_config, + video, + ])) + else: + log.ODM_WARNING('Found a valid slam trajectory in: {}'.format( + trajectory)) + + # check if trajectory was exported to opensfm before + if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell: + # convert slam to opensfm + system.run(' '.join([ + 'cd {} &&'.format(tree.opensfm), + 'PYTHONPATH={}:{}'.format(context.pyopencv_path, + context.opensfm_path), + 'python', + os.path.join(context.odm_modules_src_path, + 'odm_slam/src/orb_slam_to_opensfm.py'), + video, + trajectory, + map_points, + slam_config, + ])) + # link opensfm images to resized images + os.symlink(tree.opensfm + '/images', tree.dataset_resize) + else: + log.ODM_WARNING('Found a valid OpenSfM file in: {}'.format( + tree.opensfm_reconstruction)) + + # check if reconstruction was exported to bundler before + if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell: + # convert back to bundler's format + system.run( + 'PYTHONPATH={} {}/bin/export_bundler {}'.format( + context.pyopencv_path, context.opensfm_path, tree.opensfm)) + else: + log.ODM_WARNING( + 'Found a valid Bundler file in: {}'.format( + tree.opensfm_reconstruction)) + + # check if reconstruction was exported to pmvs before + if not io.file_exists(tree.pmvs_visdat) or rerun_cell: + # run PMVS converter + system.run( + 'PYTHONPATH={} {}/bin/export_pmvs {} --output {}'.format( + context.pyopencv_path, context.opensfm_path, tree.opensfm, + tree.pmvs)) + else: + log.ODM_WARNING('Found a valid CMVS file in: {}'.format( + tree.pmvs_visdat)) + + log.ODM_INFO('Running OMD Slam Cell - Finished') + return ecto.OK if args.end_with != 'odm_slam' else ecto.QUIT diff --git a/scripts/opensfm.py b/scripts/opensfm.py new file mode 100644 index 000000000..d61f36b5d --- /dev/null +++ b/scripts/opensfm.py @@ -0,0 +1,172 @@ +import ecto + +from opendm import log +from opendm import io +from opendm import system +from opendm import context + + +class ODMOpenSfMCell(ecto.Cell): + def declare_params(self, params): + params.declare("use_exif_size", "The application arguments.", False) + params.declare("feature_process_size", "The application arguments.", 2400) + params.declare("feature_min_frames", "The application arguments.", 4000) + params.declare("processes", "The application arguments.", context.num_cores) + params.declare("matching_gps_neighbors", "The application arguments.", 8) + params.declare("matching_gps_distance", "The application arguments.", 0) + params.declare("fixed_camera_params", "Optimize internal camera parameters", True) + params.declare("hybrid_bundle_adjustment", "Use local + global bundle adjustment", False) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + inputs.declare("photos", "list of ODMPhoto's", []) + outputs.declare("reconstruction", "list of ODMReconstructions", []) + + def process(self, inputs, outputs): + + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running ODM OpenSfM Cell') + + # get inputs + tree = self.inputs.tree + args = self.inputs.args + photos = self.inputs.photos + + if not photos: + log.ODM_ERROR('Not enough photos in photos array to start OpenSfM') + return ecto.QUIT + + # create working directories + system.mkdir_p(tree.opensfm) + system.mkdir_p(tree.pmvs) + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'opensfm') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'opensfm' in args.rerun_from) + + if not args.use_pmvs: + output_file = tree.opensfm_model + else: + output_file = tree.opensfm_reconstruction + + # check if reconstruction was done before + if not io.file_exists(output_file) or rerun_cell: + # create file list + list_path = io.join_paths(tree.opensfm, 'image_list.txt') + has_alt = True + with open(list_path, 'w') as fout: + for photo in photos: + if not photo.altitude: + has_alt = False + fout.write('%s\n' % photo.path_file) + + # create config file for OpenSfM + config = [ + "use_exif_size: %s" % ('no' if not self.params.use_exif_size else 'yes'), + "feature_process_size: %s" % self.params.feature_process_size, + "feature_min_frames: %s" % self.params.feature_min_frames, + "processes: %s" % self.params.processes, + "matching_gps_neighbors: %s" % self.params.matching_gps_neighbors, + "optimize_camera_parameters: %s" % ('no' if self.params.fixed_camera_params else 'yes') + ] + + if has_alt: + log.ODM_DEBUG("Altitude data detected, enabling it for GPS alignment") + config.append("use_altitude_tag: True") + config.append("align_method: naive") + + if args.use_hybrid_bundle_adjustment: + log.ODM_DEBUG("Enabling hybrid bundle adjustment") + config.append("bundle_interval: 100") # Bundle after adding 'bundle_interval' cameras + config.append("bundle_new_points_ratio: 1.2") # Bundle when (new points) / (bundled points) > bundle_new_points_ratio + config.append("local_bundle_radius: 1") # Max image graph distance for images to be included in local bundle adjustment + + if args.matcher_distance > 0: + config.append("matching_gps_distance: %s" % self.params.matching_gps_distance) + + # write config file + config_filename = io.join_paths(tree.opensfm, 'config.yaml') + with open(config_filename, 'w') as fout: + fout.write("\n".join(config)) + + # run OpenSfM reconstruction + matched_done_file = io.join_paths(tree.opensfm, 'matching_done.txt') + if not io.file_exists(matched_done_file) or rerun_cell: + system.run('PYTHONPATH=%s %s/bin/opensfm extract_metadata %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + system.run('PYTHONPATH=%s %s/bin/opensfm detect_features %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + system.run('PYTHONPATH=%s %s/bin/opensfm match_features %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + with open(matched_done_file, 'w') as fout: + fout.write("Matching done!\n") + else: + log.ODM_WARNING('Found a feature matching done progress file in: %s' % + matched_done_file) + + if not io.file_exists(tree.opensfm_tracks) or rerun_cell: + system.run('PYTHONPATH=%s %s/bin/opensfm create_tracks %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + else: + log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' % + tree.opensfm_tracks) + + if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell: + system.run('PYTHONPATH=%s %s/bin/opensfm reconstruct %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + else: + log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' % + tree.opensfm_reconstruction) + + if not io.file_exists(tree.opensfm_reconstruction_meshed) or rerun_cell: + system.run('PYTHONPATH=%s %s/bin/opensfm mesh %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + else: + log.ODM_WARNING('Found a valid OpenSfM meshed reconstruction file in: %s' % + tree.opensfm_reconstruction_meshed) + + if not args.use_pmvs: + if not io.file_exists(tree.opensfm_reconstruction_nvm) or rerun_cell: + system.run('PYTHONPATH=%s %s/bin/opensfm export_visualsfm %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + else: + log.ODM_WARNING('Found a valid OpenSfM NVM reconstruction file in: %s' % + tree.opensfm_reconstruction_nvm) + + system.run('PYTHONPATH=%s %s/bin/opensfm undistort %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + system.run('PYTHONPATH=%s %s/bin/opensfm compute_depthmaps %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + else: + log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' % + tree.opensfm_reconstruction) + + # check if reconstruction was exported to bundler before + if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell: + # convert back to bundler's format + system.run('PYTHONPATH=%s %s/bin/export_bundler %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm)) + else: + log.ODM_WARNING('Found a valid Bundler file in: %s' % + tree.opensfm_reconstruction) + + if args.use_pmvs: + # check if reconstruction was exported to pmvs before + if not io.file_exists(tree.pmvs_visdat) or rerun_cell: + # run PMVS converter + system.run('PYTHONPATH=%s %s/bin/export_pmvs %s --output %s' % + (context.pyopencv_path, context.opensfm_path, tree.opensfm, tree.pmvs)) + else: + log.ODM_WARNING('Found a valid CMVS file in: %s' % tree.pmvs_visdat) + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'OpenSfM') + + log.ODM_INFO('Running ODM OpenSfM Cell - Finished') + return ecto.OK if args.end_with != 'opensfm' else ecto.QUIT diff --git a/scripts/pmvs.py b/scripts/pmvs.py new file mode 100644 index 000000000..93f1f00fa --- /dev/null +++ b/scripts/pmvs.py @@ -0,0 +1,82 @@ +import ecto + +from opendm import io +from opendm import log +from opendm import system +from opendm import context + + +class ODMPmvsCell(ecto.Cell): + def declare_params(self, params): + params.declare("level", 'The level in the image pyramid that is used ' + 'for the computation', 1) + params.declare("csize", 'Cell size controls the density of reconstructions', 2) + params.declare("thresh", 'A patch reconstruction is accepted as a success ' + 'and kept, if its associcated photometric consistency ' + 'measure is above this threshold.', 0.7) + params.declare("wsize", 'pmvs samples wsize x wsize pixel colors from ' + 'each image to compute photometric consistency ' + 'score. For example, when wsize=7, 7x7=49 pixel ' + 'colors are sampled in each image. Increasing the ' + 'value leads to more stable reconstructions, but ' + 'the program becomes slower.', 7) + params.declare("min_imgs", 'Each 3D point must be visible in at least ' + 'minImageNum images for being reconstructed. 3 is ' + 'suggested in general.', 3) + params.declare("cores", 'The maximum number of cores to use in dense ' + ' reconstruction.', context.num_cores) + + def declare_io(self, params, inputs, outputs): + inputs.declare("tree", "Struct with paths", []) + inputs.declare("args", "The application arguments.", {}) + inputs.declare("reconstruction", "list of ODMReconstructions", []) + outputs.declare("reconstruction", "list of ODMReconstructions", []) + + def process(self, inputs, outputs): + + # Benchmarking + start_time = system.now_raw() + + log.ODM_INFO('Running OMD PMVS Cell') + + # get inputs + args = self.inputs.args + tree = self.inputs.tree + + # check if we rerun cell or not + rerun_cell = (args.rerun is not None and + args.rerun == 'pmvs') or \ + (args.rerun_all) or \ + (args.rerun_from is not None and + 'pmvs' in args.rerun_from) + + if not io.file_exists(tree.pmvs_model) or rerun_cell: + log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model) + + kwargs = { + 'bin': context.cmvs_opts_path, + 'prefix': tree.pmvs_rec_path, + 'level': self.params.level, + 'csize': self.params.csize, + 'thresh': self.params.thresh, + 'wsize': self.params.wsize, + 'min_imgs': self.params.min_imgs, + 'cores': self.params.cores + } + + # generate pmvs2 options + system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} ' + '{min_imgs} {cores}'.format(**kwargs)) + + # run pmvs2 + system.run('%s %s/ option-0000' % + (context.pmvs2_path, tree.pmvs_rec_path)) + + else: + log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model) + + if args.time: + system.benchmark(start_time, tree.benchmarking, 'PMVS') + + log.ODM_INFO('Running ODM PMVS Cell - Finished') + return ecto.OK if args.end_with != 'pmvs' else ecto.QUIT diff --git a/scripts/pmvs2nvmcams.py b/scripts/pmvs2nvmcams.py new file mode 100644 index 000000000..f9754579e --- /dev/null +++ b/scripts/pmvs2nvmcams.py @@ -0,0 +1,144 @@ +import os +import numpy as np + +from opendm import log + +# Go from QR-factorizatoin to corresponding RQ-factorization. +def rq(A): + Q,R = np.linalg.qr(np.flipud(A).T) + R = np.flipud(R.T) + Q = Q.T + return R[:,::-1],Q[::-1,:] + +# Create a unit quaternion from rotation matrix. +def rot2quat(R): + + # Float epsilon (use square root to be well with the stable region). + eps = np.sqrt(np.finfo(float).eps) + + # If the determinant is not 1, it's not a rotation matrix + if np.abs(np.linalg.det(R) - 1.0) > eps: + log.ODM_ERROR('Matrix passed to rot2quat was not a rotation matrix, det != 1.0') + + tr = np.trace(R) + + quat = np.zeros((1,4)) + + # Is trace big enough be computationally stable? + if tr > eps: + S = 0.5 / np.sqrt(tr + 1.0) + quat[0,0] = 0.25 / S + quat[0,1] = (R[2,1] - R[1,2]) * S + quat[0,2] = (R[0,2] - R[2,0]) * S + quat[0,3] = (R[1,0] - R[0,1]) * S + else: # It's not, use the largest diagonal. + if R[0,0] > R[1,1] and R[0,0] > R[2,2]: + S = np.sqrt(1.0 + R[0,0] - R[1,1] - R[2,2]) * 2.0 + quat[0,0] = (R[2,1] - R[1,2]) / S + quat[0,1] = 0.25 * S + quat[0,2] = (R[0,1] + R[1,0]) / S + quat[0,3] = (R[0,2] + R[2,0]) / S + elif R[1,1] > R[2,2]: + S = np.sqrt(1.0 - R[0,0] + R[1,1] - R[2,2]) * 2.0 + quat[0,0] = (R[0,2] - R[2,0]) / S + quat[0,1] = (R[0,1] + R[1,0]) / S + quat[0,2] = 0.25 * S + quat[0,3] = (R[1,2] + R[2,1]) / S + else: + S = np.sqrt(1.0 - R[0,0] - R[1,1] + R[2,2]) * 2.0 + quat[0,0] = (R[1,0] - R[0,1]) / S + quat[0,1] = (R[0,2] + R[2,0]) / S + quat[0,2] = (R[1,2] + R[2,1]) / S + quat[0,3] = 0.25 * S + + return quat + +# Decompose a projection matrix into parts +# (Intrinsic projection, Rotation, Camera position) +def decomposeProjection(projectionMatrix): + + # Check input: + if projectionMatrix.shape != (3,4): + log.ODM_ERROR('Unable to decompose projection matrix, shape != (3,4)') + + RQ = rq(projectionMatrix[:,:3]) + + # Fix sign, since we know K is upper triangular and has a positive diagonal. + signMat = np.diag(np.diag(np.sign(RQ[0]))) + K = signMat*RQ[0] + R = signMat*RQ[1] + + # Calculate camera position from translation vector. + t = np.linalg.inv(-1.0*projectionMatrix[:,:3])*projectionMatrix[:,3] + + return K, R, t + +# Parses pvms contour file. +def parseContourFile(filePath): + + with open(filePath, 'r') as contourFile: + if (contourFile.readline().strip() != "CONTOUR"): + return np.array([]) + else: + pMatData = np.loadtxt(contourFile, float, '#', None, None, 0) + if pMatData.shape == (3,4): + return pMatData + return np.array([]) + + + +# Creates a .nvm camera file in the pmvs folder. +def run(pmvsFolder, outputFile): + + projectionFolder = pmvsFolder + "/txt" + imageFolder = pmvsFolder + "/visualize" + + pMatrices = [] + imageFileNames = [] + + # for all files in the visualize folder: + for imageFileName in os.listdir(imageFolder): + fileNameNoExt = os.path.splitext(imageFileName)[0] + + # look for corresponding projection matrix txt file + projectionFilePath = os.path.join(projectionFolder, fileNameNoExt) + projectionFilePath += ".txt" + if os.path.isfile(projectionFilePath): + pMatData = parseContourFile(projectionFilePath) + if pMatData.size == 0: + log.ODM_WARNING('Unable to parse contour file, skipping: %s' + % projectionFilePath) + else: + pMatrices.append(np.matrix(pMatData)) + imageFileNames.append(imageFileName) + + + # Decompose projection matrices + focals = [] + rotations = [] + translations = [] + for projection in pMatrices: + KRt = decomposeProjection(projection) + focals.append(KRt[0][0,0]) + rotations.append(rot2quat(KRt[1])) + translations.append(KRt[2]) + + # Create .nvm file + with open (outputFile, 'w') as nvmFile: + nvmFile.write("NVM_V3\n\n") + nvmFile.write('%d' % len(rotations) + "\n") + + for idx, imageFileName in enumerate(imageFileNames): + nvmFile.write(os.path.join("visualize", imageFileName)) + nvmFile.write(" " + '%f' % focals[idx]) + nvmFile.write(" " + '%f' % rotations[idx][0,0] + + " " + '%f' % rotations[idx][0,1] + + " " + '%f' % rotations[idx][0,2] + + " " + '%f' % rotations[idx][0,3]) + nvmFile.write(" " + '%f' % translations[idx][0] + + " " + '%f' % translations[idx][1] + + " " + '%f' % translations[idx][2]) + nvmFile.write(" 0 0\n") + nvmFile.write("0\n\n") + nvmFile.write("0\n\n") + nvmFile.write("0") diff --git a/settings.yaml b/settings.yaml new file mode 100644 index 000000000..6413e5902 --- /dev/null +++ b/settings.yaml @@ -0,0 +1,63 @@ +--- +# A list of global configuration variables +# Uncomment lines as needed to edit default settings. +# Note this only works for settings with default values. Some commands like --rerun +# or --force-ccd n will have to be set in the command line (if you need to) + +# This line is really important to set up properly +project_path: '' # Example: '/home/user/ODMProjects + +# The rest of the settings will default to the values set unless you uncomment and change them +#resize_to: 2048 +#start_with: 'resize' +#end_with: 'odm_orthophoto' +#rerun_all: False +#zip_results: False +#verbose: False +#time: False +#use_fixed_camera_params: False +#use_hybrid_bundle_adjustment: False +#opensfm_processes: 4 # by default this is set to $(nproc) +#min_num_features: 4000 +#matcher_threshold: 2.0 +#matcher_ratio: 0.6 +#matcher_neighbors: 8 +#matcher_distance: 0 +#use_pmvs: False # The cmvs/pmvs settings only matter if 'Enabled' is set to True +#cmvs_maximages: 500 +#pmvs_level: 1 +#pmvs_csize: 2 +#pmvs_threshold: 0.7 +#pmvs_wsize: 7 +#pmvs_min_images: 3 +#pmvs_num_cores: 4 # by default this is set to $(nproc) +#mesh_size: 100000 +#mesh_octree_depth: 9 +#mesh_samples: 1.0 +#mesh_solver_divide: 9 +#texturing_data_term: 'gmi' +#texturing_outlier_removal_type: 'gauss_clamping' +#texturing_skip_visibility_test: False +#texturing_skip_global_seam_leveling: False +#texturing_skip_local_seam_leveling: False +#texturing_skip_hole_filling: False +#texturing_keep_unseen_faces: False +#texturing_tone_mapping: 'none' +#gcp: !!null # YAML tag for None +#use_exif: False # Set to True if you have a GCP file (it auto-detects) and want to use EXIF +#dtm: False # Use this tag to build a DTM (Digital Terrain Model +#dsm: False # Use this tag to build a DSM (Digital Surface Model +#dem-gapfill-steps: 4 +#dem-resolution: 0.1 +#dem-maxangle:20 +#dem-maxsd: 2.5 +#dem-approximate: False +#dem-decimation: 1 +#dem-terrain-type: ComplexForest +#orthophoto_resolution: 20.0 # Pixels/meter +#orthophoto_target_srs: !!null # Currently does nothing +#orthophoto_no_tiled: False +#orthophoto_compression: DEFLATE # Options are [JPEG, LZW, PACKBITS, DEFLATE, LZMA, NONE] Don't change unless you know what you are doing +#orthophoto_bigtiff: IF_SAFER # Options are [YES, NO, IF_NEEDED, IF_SAFER] +#build_overviews: FALSE + diff --git a/tests/test_data/gcp_List_GCPposition.jpg b/tests/test_data/gcp_List_GCPposition.jpg new file mode 100644 index 000000000..6e9718a12 Binary files /dev/null and b/tests/test_data/gcp_List_GCPposition.jpg differ diff --git a/tests/test_data/gcp_list.txt b/tests/test_data/gcp_list.txt new file mode 100644 index 000000000..1abb09315 --- /dev/null +++ b/tests/test_data/gcp_list.txt @@ -0,0 +1,20 @@ +WGS84 UTM 32N +274914.738,4603349.014,400,2121.02804,-405.91779,DJI_0068.JPG +274914.738,4603349.014,400,2113.45101,-52.37843,DJI_0104.JPG +274914.738,4603349.014,400,2251.05140,-2510.08324,1JI_0076.JPG +274915.887,4603307.715,400,2189.08075,-1266.93925,DJI_0068.JPG +274915.887,4603307.715,400,1249.52906,-136.41574,DJI_0104.JPG +274915.887,4603307.715,400561.05432,-1057.69568,DJI_0083.JPG +274915.887,4603307.715,400,2239.44947,-1593.32652,1JI_0076.JPG +274985.284,4603319.756,400,3619.20999,-978.85879,DJI_0068.JPG +274985.284,4603319.756,400,1506.89252,-1535.16355,DJI_0104.JPG +274985.284,4603319.756,400,783.53534,-1793.26811,1JI_0076.JPG +274920.710,4603258.802,400,2342.84463,-2285.96671,DJI_0068.JPG +274920.710,4603258.802,400,305.37748,-303.36595,DJI_0104.JPG +274920.710,4603258.802,400,1531.52745,-896.39311,DJI_0083.JPG +274920.710,4603258.802,400,3671.08645,-2198.67114,DJI_0063.JPG +274920.710,4603258.802,400.1007.75409,-1286.05432,DJI_0074.JPG +274879.571,4603204.279,400,2693.25350,-1682.97313,DJI_0083.JPG +274879.571,4603204.279,400,2513.14252,-1413.82886,DJI_0063.JPG +274879.571,4603204.279,400,2150.56951,-2093.38493,DJI_0074.JPG + diff --git a/tests/test_data/gcp_list_README.txt b/tests/test_data/gcp_list_README.txt new file mode 100644 index 000000000..8169a9814 --- /dev/null +++ b/tests/test_data/gcp_list_README.txt @@ -0,0 +1,66 @@ +Do not exspect more acurate results with these coordinates!! + + +These GCPs are reverse geotagged from an OpenDroneMap created Geotif +with WGS84 coordinates in the EXIF tags. + +I also do not have a height value for the points! +(Set to 400m for testing purposes...) + +----------- +CRS: +---- +WGS 84 / UTM zone 17N +EPSG: 32617 + +----------- +GCP point values: +---- +GCP_PointNr,X_Coordinate,Y_Coordinate +1,274914.738,4603349.014,400 +2,274915.887,4603307.715,400 +3,274985.284,4603319.756,400 +4,274920.710,4603258.802,400 +5,274879.571,4603204.279,400 +----------- + + +----------- +Better human readable gcp_list.txt +---- +WGS84 UTM 32N +## x1 y1 z1 pixelx1 pixely1 imagename1 +## 0/0 of Pixel Coordinate = Upper left hand corner + + + +# GCP 1 +274914.738,4603349.014,400,2121.02804,-405.91779,DJI_0068.JPG +274914.738,4603349.014,400,2113.45101,-52.37843,DJI_0104.JPG +274914.738,4603349.014,400,2251.05140,-2510.08324,1JI_0076.JPG + + +# GCP 2 +274915.887,4603307.715,400,2189.08075,-1266.93925,DJI_0068.JPG +274915.887,4603307.715,400,1249.52906,-136.41574,DJI_0104.JPG +274915.887,4603307.715,400561.05432,-1057.69568,DJI_0083.JPG +274915.887,4603307.715,400,2239.44947,-1593.32652,1JI_0076.JPG + +# GCP 3 +274985.284,4603319.756,400,3619.20999,-978.85879,DJI_0068.JPG +274985.284,4603319.756,400,1506.89252,-1535.16355,DJI_0104.JPG +274985.284,4603319.756,400,783.53534,-1793.26811,1JI_0076.JPG + + +# GCP 4 +274920.710,4603258.802,400,2342.84463,-2285.96671,DJI_0068.JPG +274920.710,4603258.802,400,305.37748,-303.36595,DJI_0104.JPG +274920.710,4603258.802,400,1531.52745,-896.39311,DJI_0083.JPG +274920.710,4603258.802,400,3671.08645,-2198.67114,DJI_0063.JPG +274920.710,4603258.802,400.1007.75409,-1286.05432,DJI_0074.JPG + +# GCP 5 +274879.571,4603204.279,400,2693.25350,-1682.97313,DJI_0083.JPG +274879.571,4603204.279,400,2513.14252,-1413.82886,DJI_0063.JPG +274879.571,4603204.279,400,2150.56951,-2093.38493,DJI_0074.JPG + diff --git a/tests/test_data/images/1JI_0064.JPG b/tests/test_data/images/1JI_0064.JPG new file mode 100644 index 000000000..c46a1e3a7 Binary files /dev/null and b/tests/test_data/images/1JI_0064.JPG differ diff --git a/tests/test_data/images/1JI_0065.JPG b/tests/test_data/images/1JI_0065.JPG new file mode 100644 index 000000000..433a51fca Binary files /dev/null and b/tests/test_data/images/1JI_0065.JPG differ diff --git a/tests/test_data/images/1JI_0066.JPG b/tests/test_data/images/1JI_0066.JPG new file mode 100644 index 000000000..35f630baf Binary files /dev/null and b/tests/test_data/images/1JI_0066.JPG differ diff --git a/tests/test_data/images/1JI_0067.JPG b/tests/test_data/images/1JI_0067.JPG new file mode 100644 index 000000000..5af5613cf Binary files /dev/null and b/tests/test_data/images/1JI_0067.JPG differ diff --git a/tests/test_data/images/1JI_0068.JPG b/tests/test_data/images/1JI_0068.JPG new file mode 100644 index 000000000..93aae6583 Binary files /dev/null and b/tests/test_data/images/1JI_0068.JPG differ diff --git a/tests/test_data/images/1JI_0076.JPG b/tests/test_data/images/1JI_0076.JPG new file mode 100644 index 000000000..f410a8ef9 Binary files /dev/null and b/tests/test_data/images/1JI_0076.JPG differ diff --git a/tests/test_data/images/1JI_0077.JPG b/tests/test_data/images/1JI_0077.JPG new file mode 100644 index 000000000..188f3a590 Binary files /dev/null and b/tests/test_data/images/1JI_0077.JPG differ diff --git a/tests/test_data/images/1JI_0078.JPG b/tests/test_data/images/1JI_0078.JPG new file mode 100644 index 000000000..4f1a66fed Binary files /dev/null and b/tests/test_data/images/1JI_0078.JPG differ diff --git a/tests/test_data/images/1JI_0079.JPG b/tests/test_data/images/1JI_0079.JPG new file mode 100644 index 000000000..409e9b1d8 Binary files /dev/null and b/tests/test_data/images/1JI_0079.JPG differ diff --git a/tests/test_data/images/1JI_0080.JPG b/tests/test_data/images/1JI_0080.JPG new file mode 100644 index 000000000..16065e90b Binary files /dev/null and b/tests/test_data/images/1JI_0080.JPG differ diff --git a/tests/test_data/images/DJI_0061.JPG b/tests/test_data/images/DJI_0061.JPG new file mode 100644 index 000000000..1917e099a Binary files /dev/null and b/tests/test_data/images/DJI_0061.JPG differ diff --git a/tests/test_data/images/DJI_0062.JPG b/tests/test_data/images/DJI_0062.JPG new file mode 100644 index 000000000..b8706d4e9 Binary files /dev/null and b/tests/test_data/images/DJI_0062.JPG differ diff --git a/tests/test_data/images/DJI_0063.JPG b/tests/test_data/images/DJI_0063.JPG new file mode 100644 index 000000000..dce4dda3f Binary files /dev/null and b/tests/test_data/images/DJI_0063.JPG differ diff --git a/tests/test_data/images/DJI_0064.JPG b/tests/test_data/images/DJI_0064.JPG new file mode 100644 index 000000000..7067afed6 Binary files /dev/null and b/tests/test_data/images/DJI_0064.JPG differ diff --git a/tests/test_data/images/DJI_0065.JPG b/tests/test_data/images/DJI_0065.JPG new file mode 100644 index 000000000..4aea8e7c5 Binary files /dev/null and b/tests/test_data/images/DJI_0065.JPG differ diff --git a/tests/test_data/images/DJI_0066.JPG b/tests/test_data/images/DJI_0066.JPG new file mode 100644 index 000000000..8246e2153 Binary files /dev/null and b/tests/test_data/images/DJI_0066.JPG differ diff --git a/tests/test_data/images/DJI_0067.JPG b/tests/test_data/images/DJI_0067.JPG new file mode 100644 index 000000000..2f85bde95 Binary files /dev/null and b/tests/test_data/images/DJI_0067.JPG differ diff --git a/tests/test_data/images/DJI_0068.JPG b/tests/test_data/images/DJI_0068.JPG new file mode 100644 index 000000000..db4f33b32 Binary files /dev/null and b/tests/test_data/images/DJI_0068.JPG differ diff --git a/tests/test_data/images/DJI_0069.JPG b/tests/test_data/images/DJI_0069.JPG new file mode 100644 index 000000000..2efd6b3d9 Binary files /dev/null and b/tests/test_data/images/DJI_0069.JPG differ diff --git a/tests/test_data/images/DJI_0070.JPG b/tests/test_data/images/DJI_0070.JPG new file mode 100644 index 000000000..6de155290 Binary files /dev/null and b/tests/test_data/images/DJI_0070.JPG differ diff --git a/tests/test_data/images/DJI_0071.JPG b/tests/test_data/images/DJI_0071.JPG new file mode 100644 index 000000000..1bf5aecc1 Binary files /dev/null and b/tests/test_data/images/DJI_0071.JPG differ diff --git a/tests/test_data/images/DJI_0072.JPG b/tests/test_data/images/DJI_0072.JPG new file mode 100644 index 000000000..29effdc93 Binary files /dev/null and b/tests/test_data/images/DJI_0072.JPG differ diff --git a/tests/test_data/images/DJI_0073.JPG b/tests/test_data/images/DJI_0073.JPG new file mode 100644 index 000000000..952c2bf08 Binary files /dev/null and b/tests/test_data/images/DJI_0073.JPG differ diff --git a/tests/test_data/images/DJI_0074.JPG b/tests/test_data/images/DJI_0074.JPG new file mode 100644 index 000000000..757beccea Binary files /dev/null and b/tests/test_data/images/DJI_0074.JPG differ diff --git a/tests/test_data/images/DJI_0075.JPG b/tests/test_data/images/DJI_0075.JPG new file mode 100644 index 000000000..a238ee569 Binary files /dev/null and b/tests/test_data/images/DJI_0075.JPG differ diff --git a/tests/test_data/images/DJI_0076.JPG b/tests/test_data/images/DJI_0076.JPG new file mode 100644 index 000000000..a5c4a9a48 Binary files /dev/null and b/tests/test_data/images/DJI_0076.JPG differ diff --git a/tests/test_data/images/DJI_0077.JPG b/tests/test_data/images/DJI_0077.JPG new file mode 100644 index 000000000..9dc06158d Binary files /dev/null and b/tests/test_data/images/DJI_0077.JPG differ diff --git a/tests/test_data/images/DJI_0078.JPG b/tests/test_data/images/DJI_0078.JPG new file mode 100644 index 000000000..c88a2f12d Binary files /dev/null and b/tests/test_data/images/DJI_0078.JPG differ diff --git a/tests/test_data/images/DJI_0079.JPG b/tests/test_data/images/DJI_0079.JPG new file mode 100644 index 000000000..5e7f8618b Binary files /dev/null and b/tests/test_data/images/DJI_0079.JPG differ diff --git a/tests/test_data/images/DJI_0080.JPG b/tests/test_data/images/DJI_0080.JPG new file mode 100644 index 000000000..1f5562dd1 Binary files /dev/null and b/tests/test_data/images/DJI_0080.JPG differ diff --git a/tests/test_data/images/DJI_0081.JPG b/tests/test_data/images/DJI_0081.JPG new file mode 100644 index 000000000..e82b03927 Binary files /dev/null and b/tests/test_data/images/DJI_0081.JPG differ diff --git a/tests/test_data/images/DJI_0082.JPG b/tests/test_data/images/DJI_0082.JPG new file mode 100644 index 000000000..eaed567e0 Binary files /dev/null and b/tests/test_data/images/DJI_0082.JPG differ diff --git a/tests/test_data/images/DJI_0083.JPG b/tests/test_data/images/DJI_0083.JPG new file mode 100644 index 000000000..27f5ffca8 Binary files /dev/null and b/tests/test_data/images/DJI_0083.JPG differ diff --git a/tests/test_data/images/DJI_0084.JPG b/tests/test_data/images/DJI_0084.JPG new file mode 100644 index 000000000..a60b91fb9 Binary files /dev/null and b/tests/test_data/images/DJI_0084.JPG differ diff --git a/tests/test_data/images/DJI_0085.JPG b/tests/test_data/images/DJI_0085.JPG new file mode 100644 index 000000000..4e52fd0fe Binary files /dev/null and b/tests/test_data/images/DJI_0085.JPG differ diff --git a/tests/test_data/images/DJI_0086.JPG b/tests/test_data/images/DJI_0086.JPG new file mode 100644 index 000000000..2c561456c Binary files /dev/null and b/tests/test_data/images/DJI_0086.JPG differ diff --git a/tests/test_data/images/DJI_0087.JPG b/tests/test_data/images/DJI_0087.JPG new file mode 100644 index 000000000..301103cda Binary files /dev/null and b/tests/test_data/images/DJI_0087.JPG differ diff --git a/tests/test_data/images/DJI_0088.JPG b/tests/test_data/images/DJI_0088.JPG new file mode 100644 index 000000000..22fb70047 Binary files /dev/null and b/tests/test_data/images/DJI_0088.JPG differ diff --git a/tests/test_data/images/DJI_0089.JPG b/tests/test_data/images/DJI_0089.JPG new file mode 100644 index 000000000..e232f2ffb Binary files /dev/null and b/tests/test_data/images/DJI_0089.JPG differ diff --git a/tests/test_data/images/DJI_0090.JPG b/tests/test_data/images/DJI_0090.JPG new file mode 100644 index 000000000..0a8b2f270 Binary files /dev/null and b/tests/test_data/images/DJI_0090.JPG differ diff --git a/tests/test_data/images/DJI_0091.JPG b/tests/test_data/images/DJI_0091.JPG new file mode 100644 index 000000000..059065287 Binary files /dev/null and b/tests/test_data/images/DJI_0091.JPG differ diff --git a/tests/test_data/images/DJI_0092.JPG b/tests/test_data/images/DJI_0092.JPG new file mode 100644 index 000000000..c007bc6af Binary files /dev/null and b/tests/test_data/images/DJI_0092.JPG differ diff --git a/tests/test_data/images/DJI_0093.JPG b/tests/test_data/images/DJI_0093.JPG new file mode 100644 index 000000000..532168686 Binary files /dev/null and b/tests/test_data/images/DJI_0093.JPG differ diff --git a/tests/test_data/images/DJI_0094.JPG b/tests/test_data/images/DJI_0094.JPG new file mode 100644 index 000000000..d72b03f20 Binary files /dev/null and b/tests/test_data/images/DJI_0094.JPG differ diff --git a/tests/test_data/images/DJI_0095.JPG b/tests/test_data/images/DJI_0095.JPG new file mode 100644 index 000000000..abc9f9e31 Binary files /dev/null and b/tests/test_data/images/DJI_0095.JPG differ diff --git a/tests/test_data/images/DJI_0096.JPG b/tests/test_data/images/DJI_0096.JPG new file mode 100644 index 000000000..1266e82d0 Binary files /dev/null and b/tests/test_data/images/DJI_0096.JPG differ diff --git a/tests/test_data/images/DJI_0097.JPG b/tests/test_data/images/DJI_0097.JPG new file mode 100644 index 000000000..37242dbfb Binary files /dev/null and b/tests/test_data/images/DJI_0097.JPG differ diff --git a/tests/test_data/images/DJI_0098.JPG b/tests/test_data/images/DJI_0098.JPG new file mode 100644 index 000000000..93fa8cc5c Binary files /dev/null and b/tests/test_data/images/DJI_0098.JPG differ diff --git a/tests/test_data/images/DJI_0099.JPG b/tests/test_data/images/DJI_0099.JPG new file mode 100644 index 000000000..91d866ec9 Binary files /dev/null and b/tests/test_data/images/DJI_0099.JPG differ diff --git a/tests/test_data/images/DJI_0100.JPG b/tests/test_data/images/DJI_0100.JPG new file mode 100644 index 000000000..49be66934 Binary files /dev/null and b/tests/test_data/images/DJI_0100.JPG differ diff --git a/tests/test_data/images/DJI_0101.JPG b/tests/test_data/images/DJI_0101.JPG new file mode 100644 index 000000000..911346e6a Binary files /dev/null and b/tests/test_data/images/DJI_0101.JPG differ diff --git a/tests/test_data/images/DJI_0102.JPG b/tests/test_data/images/DJI_0102.JPG new file mode 100644 index 000000000..a70e736bf Binary files /dev/null and b/tests/test_data/images/DJI_0102.JPG differ diff --git a/tests/test_data/images/DJI_0103.JPG b/tests/test_data/images/DJI_0103.JPG new file mode 100644 index 000000000..b6f182f04 Binary files /dev/null and b/tests/test_data/images/DJI_0103.JPG differ diff --git a/tests/test_data/images/DJI_0104.JPG b/tests/test_data/images/DJI_0104.JPG new file mode 100644 index 000000000..06a353265 Binary files /dev/null and b/tests/test_data/images/DJI_0104.JPG differ diff --git a/tests/test_data/images/DJI_0105.JPG b/tests/test_data/images/DJI_0105.JPG new file mode 100644 index 000000000..adc7d7549 Binary files /dev/null and b/tests/test_data/images/DJI_0105.JPG differ diff --git a/tests/test_data/images/DJI_0106.JPG b/tests/test_data/images/DJI_0106.JPG new file mode 100644 index 000000000..76d8c1e5e Binary files /dev/null and b/tests/test_data/images/DJI_0106.JPG differ diff --git a/tests/test_data/images/DJI_0107.JPG b/tests/test_data/images/DJI_0107.JPG new file mode 100644 index 000000000..91edf5595 Binary files /dev/null and b/tests/test_data/images/DJI_0107.JPG differ diff --git a/tests/test_odm.py b/tests/test_odm.py new file mode 100644 index 000000000..648a657b3 --- /dev/null +++ b/tests/test_odm.py @@ -0,0 +1,154 @@ +import unittest +import os +import shutil + +import ecto +from opendm import config +from opendm import context +from scripts.odm_app import ODMApp +from ecto.opts import scheduler_options, run_plasm + +parser = config.parser +scheduler_options(parser) +options = config.config() + + +def appSetup(options): + app = ODMApp(args=options) + plasm = ecto.Plasm() + plasm.insert(app) + return app, plasm + + +def setup_module(): + # Run tests + print '%s' % options + options.project_path = context.tests_data_path + # options.rerun_all = True + app, plasm = appSetup(options) + print 'Run Setup: Initial Run' + run_plasm(options, plasm) + # options.rerun_all = False + + +def teardown_module(): + # Delete generated test directories + dirnames = ['images_resize', 'opensfm', 'pmvs', 'odm_meshing', + 'odm_texturing', 'odm_georeferencing', 'odm_orthophoto'] + for n in dirnames: + rmpath = os.path.join(context.tests_data_path, n) + if os.path.exists(rmpath): + shutil.rmtree(rmpath) + + +class TestResize(unittest.TestCase): + """ + Tests the resize function + """ + + def setUp(self): + # rerun resize cell and set params + options.rerun = 'resize' + options.resize_to = 1600 + # rebuild app + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + + def test_resize(self): + # assert each image is sized to the option.resize_to + self.assertEquals(max(self.app.resize.outputs.photos[0].height, self.app.resize.outputs.photos[0].width), + options.resize_to) + + def test_all_resized(self): + # assert the number of images in images == number of images in resize + self.assertEquals(len(self.app.resize.outputs.photos), len(self.app.dataset.outputs.photos)) + + +class TestOpenSfM(unittest.TestCase): + """ + Tests the OpenSfM module + """ + def setUp(self): + options.rerun = 'opensfm' + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + def test_opensfm(self): + # Test configuration + self.assertTrue(os.path.isfile(self.app.opensfm.inputs.tree.opensfm_reconstruction)) + + +class TestCMVS(unittest.TestCase): + + def setUp(self): + options.rerun = 'cmvs' + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + def test_cmvs(self): + self.assertTrue(os.path.isfile(self.app.cmvs.inputs.tree.pmvs_bundle)) + + +class TestPMVS(unittest.TestCase): + + def setUp(self): + options.rerun = 'pmvs' + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + def test_pmvs(self): + self.assertTrue(os.path.isfile(self.app.pmvs.inputs.tree.pmvs_model)) + + +class TestMeshing(unittest.TestCase): + + def setUp(self): + options.rerun = 'odm_meshing' + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + def test_meshing(self): + self.assertTrue(os.path.isfile(self.app.meshing.inputs.tree.odm_mesh)) + + +class TestTexturing(unittest.TestCase): + + def setUp(self): + options.rerun = 'odm_texturing' + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + def test_texturing(self): + self.assertTrue(os.path.isfile(self.app.texturing.inputs.tree.odm_textured_model_obj)) + + +class TestGeoreferencing(unittest.TestCase): + + def setUp(self): + options.rerun = 'odm_georeferencing' + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + def test_georef(self): + self.assertTrue(os.path.isfile(self.app.georeferencing.inputs.tree.odm_georeferencing_coords) & + os.path.isfile(self.app.georeferencing.inputs.tree.odm_georeferencing_model_obj_geo)) + + def test_las_out(self): + self.assertTrue(os.path.isfile(os.path.join(self.app.georeferencing.inputs.tree.odm_georeferencing, + "odm_georeferenced_model.ply.las"))) + + +class TestOrthophoto(unittest.TestCase): + + def setUp(self): + options.rerun = 'odm_orthophoto' + self.app, self.plasm = appSetup(options) + run_plasm(options, self.plasm) + + def test_orthophoto(self): + self.assertTrue(os.path.isfile(self.app.orthophoto.inputs.tree.odm_orthophoto_file)) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/testing.md b/tests/testing.md new file mode 100644 index 000000000..d85fb12e8 --- /dev/null +++ b/tests/testing.md @@ -0,0 +1,66 @@ + +## opendm/ +### config.py +* test each parameter (min, max, out of bounds, etc.) + +### context.py +* Each path must be a valid, existing path + +### io.py +* check each function with a known string + +### system.py +* (Get_ccd_widths is depreciated) +* parse_coordinate_system: + * This is also old code - new one is in types + +### types.py +* ODM_Photos + * updated focal equal to known focal length + * updated ccd equal to known ccd +* ODM_Georef + * calculate_epsg needs to be updated, see master + * convert_to_las: assert existence of las file output + * utm_to_latlon: + * + * + +## scripts/ +### dataset.py +* test that supported_extensions works with a variety of file names, even bogus ones +* check outputs are all the photos + +### resize.py +* resulting images are the right size +* metadata has been properly updated + +### OpenSfM.py +* config file is contains info same as params +* when matcher_distance > 0 it is written to the config +* at least one reconstruction file is generated +* check that bundler file is exported (possible to check if valid?) + +### CMVS.py +* validate params +* system.run() command is equal to some known string + +### PMVS.py +* validate params +* system.run() command is equal to some known string + +### odm_meshing.py +* validate params +* system.run() command is equal to some known string + +### odm_texturing.py +* validate params +* system.run() command is equal to some known string + +### odm_georeferencing.py +* validate params +* system.run() command is equal to some known string when using EXIF coords +* system.run() when using GCP + +### odm_orthophoto.py +* validate params +* system.run() command is equal to some known string diff --git a/toledo_dataset_example_mesh.jpg b/toledo_dataset_example_mesh.jpg new file mode 100644 index 000000000..90129cab4 Binary files /dev/null and b/toledo_dataset_example_mesh.jpg differ