diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 190afad..a29907b 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -5,7 +5,8 @@ name: CI
on:
# Triggers the workflow on push or pull request events but only for the master branch
push:
- branches: [ master ]
+ branches:
+ - master
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@@ -50,7 +51,7 @@ jobs:
- name: Build Artifact
shell: cmd
run: call .\ArtifactBuild.cmd
-
+
- name: Create Release
id: create_release
uses: actions/create-release@v1
@@ -61,8 +62,14 @@ jobs:
release_name: Release ${{github.run_number}}
body: |
Automated Release by GitHub Action CI
+
+ ${{ join(github.event.commits.*.message, '\n') }}
draft: false
- prerelease: false
+ prerelease: false
+
+ - name: List directory contents for debugging
+ run: Get-ChildItem -Recurse
+ shell: pwsh
- name: Upload Release Asset (x64)
id: upload-release-asset-x64
diff --git a/App.config b/App.config
index d1c27cd..2b5d840 100644
--- a/App.config
+++ b/App.config
@@ -145,6 +145,27 @@
4
+
+ True
+
+
+
+
+
+ min
+
+
+ False
+
+
+ False
+
+
+ 0.5
+
+
+ False
+
diff --git a/ArtifactBuild.cmd b/ArtifactBuild.cmd
index 613c2f9..5c6db4e 100644
--- a/ArtifactBuild.cmd
+++ b/ArtifactBuild.cmd
@@ -1,6 +1,6 @@
@echo off
pushd "%~dp0"
-powershell Compress-7Zip "Bin\Release" -ArchiveFileName "PointCloudConverterX64.zip" -Format Zip
+powershell Compress-7Zip "D:\a\PointCloudConverter\PointCloudConverter\bin\Release\net8.0-windows10.0.22621.0" -ArchiveFileName "PointCloudConverterX64.zip" -Format Zip
:exit
popd
@echo on
diff --git a/Build.cmd b/Build.cmd
index 88aae09..eb971f9 100644
--- a/Build.cmd
+++ b/Build.cmd
@@ -4,7 +4,8 @@ if exist Debug rd /s /q Debug
if exist Release rd /s /q Release
if exist x64 rd /s /q x64
-"%programfiles(x86)%\Microsoft Visual Studio\2019\Enterprise\MSBuild\Current\Bin\msbuild.exe" /p:Configuration=Release
+"%programfiles%\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin\msbuild.exe" /p:Configuration=Release
+REM "%programfiles%\Microsoft Visual Studio\2022\Community\MSBuild\Current\Bin\msbuild.exe" /p:Configuration=Release
:exit
popd
diff --git a/Icons/app.ico b/Icons/app.ico
index e7ead2e..23d50df 100644
Binary files a/Icons/app.ico and b/Icons/app.ico differ
diff --git a/Interfaces/ILogger.cs b/Interfaces/ILogger.cs
new file mode 100644
index 0000000..86ebaa0
--- /dev/null
+++ b/Interfaces/ILogger.cs
@@ -0,0 +1,75 @@
+using System.Diagnostics;
+
+namespace PointCloudConverter.Logger
+{
+ public enum LogEvent
+ {
+ Start,
+ Settings,
+ File,
+ End,
+ Error,
+ Warning,
+ Info,
+ Progress,
+ Debug
+ }
+
+ public enum LogStatus
+ {
+ Processing,
+ Complete
+ }
+
+ public interface ILogger
+ {
+ void Write(string msg);
+ void Write(string msg, LogEvent eventType);
+ }
+
+ // Handles non-JSON (text-based) logging
+ public class LogText : ILogger
+ {
+ public void Write(string msg)
+ {
+ Console.WriteLine(msg);
+ Trace.WriteLine(msg);
+ }
+
+ public void Write(string msg, LogEvent eventType)
+ {
+ // Could be expanded to handle different events in the future
+ //Console.WriteLine($"{eventType}: {msg}");
+ }
+ }
+
+ // Handles JSON-based logging
+ public class LogJSON : ILogger
+ {
+ public void Write(string msg)
+ {
+ //Console.WriteLine(msg);
+ }
+
+ public void Write(string msg, LogEvent eventType)
+ {
+ Console.WriteLine(msg);
+ }
+ }
+
+ public static class LoggerFactory
+ {
+ public static ILogger CreateLogger(bool isJSON)
+ {
+ //Trace.WriteLine($"Creating logger with JSON: {isJSON}");
+ if (isJSON)
+ {
+ return new LogJSON();
+ }
+ else
+ {
+ return new LogText();
+ }
+ }
+ }
+}
diff --git a/Writers/IWriter.cs b/Interfaces/IWriter.cs
similarity index 70%
rename from Writers/IWriter.cs
rename to Interfaces/IWriter.cs
index a84a294..1e766ee 100644
--- a/Writers/IWriter.cs
+++ b/Interfaces/IWriter.cs
@@ -1,11 +1,12 @@
-using PointCloudConverter.Structs;
+using PointCloudConverter.Logger;
namespace PointCloudConverter.Writers
{
public interface IWriter
{
// create output filestream, called before looping through points
- bool InitWriter(ImportSettings importSettings, int pointCount);
+ //bool InitWriter(TSettings importSettings, int pointCount);
+ bool InitWriter(dynamic importSettings, int pointCount, ILogger logger);
// optional: if need to create special file header
void CreateHeader(int pointCount);
// output point X,Y,Z values to file
@@ -13,7 +14,7 @@ public interface IWriter
// output R,G,B values (float 0-1) to file
void WriteRGB(float r, float g, float b);
// optional: if you need to collect points for later processing
- void AddPoint(int index, float x, float y, float z, float r, float g, float b, bool hasIntensity, float i, bool hasTime, double time);
+ void AddPoint(int index, float x, float y, float z, float r, float g, float b, ushort intensity, double time, byte classification);
// optional: randomizes points (to use dynamic resolution/tile LOD in Unity)
void Randomize();
// called after all points have been looped through
@@ -22,6 +23,9 @@ public interface IWriter
void Cleanup(int fileIndex);
// close filestream
void Close();
+ void Dispose();
+ // used for intensity detection
+ void SetIntensityRange(bool isCustomRange);
}
}
diff --git a/Interfaces/Log.cs b/Interfaces/Log.cs
new file mode 100644
index 0000000..ae5edc4
--- /dev/null
+++ b/Interfaces/Log.cs
@@ -0,0 +1,40 @@
+using System.Diagnostics;
+
+namespace PointCloudConverter.Logger
+{
+ public static class Log
+ {
+ private static ILogger logger;
+ public static string version = null;
+ public static bool isJSON = false;
+
+ public static bool json()
+ {
+ return isJSON;
+ }
+
+ //// Create a logger based on whether JSON output is needed
+ //public static void CreateLogger(bool isJSON, string version)
+ //{
+ // Log.version = version;
+ // logger = LoggerFactory.CreateLogger(isJSON);
+ //}
+
+ public static void SetSettings(bool _isJSON)
+ {
+ Console.WriteLine($"Setting JSON to {_isJSON}");
+ Trace.WriteLine($"Setting JSON to {_isJSON}");
+ isJSON = _isJSON;
+ }
+
+ public static void WriteLine(string message)
+ {
+ logger.Write(message);
+ }
+
+ public static void WriteLine(string message, LogEvent logEvent)
+ {
+ logger.Write(message, logEvent);
+ }
+ }
+}
\ No newline at end of file
diff --git a/Interfaces/Shared.csproj b/Interfaces/Shared.csproj
new file mode 100644
index 0000000..41d202f
--- /dev/null
+++ b/Interfaces/Shared.csproj
@@ -0,0 +1,13 @@
+
+
+
+ net8.0
+ enable
+ enable
+ AnyCPU;x64
+
+
+
+ false
+
+
diff --git a/LICENSE b/LICENSE
index 8000a6f..24d19d6 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,3 +1,7 @@
+MULTIPLE LICENSES FOR LIBRARIES USED
+
+ ### shintadono/laszip.net
+
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
@@ -502,3 +506,1091 @@ necessary. Here is a sample; alter the names:
Ty Coon, President of Vice
That's all there is to it!
+
+
+### aardwark.base
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+### Aardvark.Data.E57
+
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
+
+
+### Newtonsoft.Json
+
+The MIT License (MIT)
+
+Copyright (c) 2007 James Newton-King
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+### ply.net
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2018-2023 Stefan Maierhofer
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/MainWindow.xaml b/MainWindow.xaml
index 78456d2..c90d4bc 100644
--- a/MainWindow.xaml
+++ b/MainWindow.xaml
@@ -5,24 +5,28 @@
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:local="clr-namespace:PointCloudConverter"
mc:Ignorable="d"
- Title="PointCloudConverter" Height="720" Width="907" Background="#FF252222" Closing="Window_Closing" Loaded="Window_Loaded">
+ Title="PointCloudConverter" Height="737" Width="940" Background="#FF252222" Closing="Window_Closing" Loaded="Window_Loaded" MinWidth="940">
-
-
+
+
-
-
-
+
+
+
-
-
-
-
+
+
+
+
+
-
+
-
+
+
+
+
@@ -37,8 +41,12 @@
+
+
+
+
+
-
@@ -74,24 +82,29 @@
+
+
+
+
+
-
+
+
+
+
+
-
-
-
-
@@ -117,17 +130,13 @@
+
-
-
-
-
-
@@ -140,14 +149,41 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
diff --git a/MainWindow.xaml.cs b/MainWindow.xaml.cs
index 1f040a5..4ef21cb 100644
--- a/MainWindow.xaml.cs
+++ b/MainWindow.xaml.cs
@@ -19,12 +19,18 @@
using Brushes = System.Windows.Media.Brushes;
using System.Threading.Tasks;
using PointCloudConverter.Readers;
+using System.Collections.Concurrent;
+using PointCloudConverter.Writers;
+using System.Reflection;
+using System.Globalization;
+using System.Windows.Media;
+using PointCloudConverter.Structs.Metadata;
namespace PointCloudConverter
{
public partial class MainWindow : Window
{
- static readonly string version = "18.08.2024";
+ static readonly string version = "18.05.2025";
static readonly string appname = "PointCloud Converter - " + version;
static readonly string rootFolder = AppDomain.CurrentDomain.BaseDirectory;
@@ -37,13 +43,19 @@ public partial class MainWindow : Window
[DllImport("kernel32.dll", SetLastError = true)]
static extern bool FreeConsole();
- Thread workerThread;
- static bool abort = false;
+ const uint WM_CHAR = 0x0102;
+ const int VK_ENTER = 0x0D;
+
+ [DllImport("kernel32.dll")]
+ static extern IntPtr GetConsoleWindow();
+
+ [DllImport("user32.dll")]
+ static extern int SendMessage(IntPtr hWnd, uint msg, IntPtr wParam, IntPtr lParam);
+
public static MainWindow mainWindowStatic;
bool isInitialiazing = true;
- static List lasHeaders = new List();
- private readonly ILogger logger;
+ static JobMetadata jobMetadata = new JobMetadata();
// progress bar data
static int progressPoint = 0;
@@ -53,6 +65,14 @@ public partial class MainWindow : Window
static DispatcherTimer progressTimerThread;
public static string lastStatusMessage = "";
public static int errorCounter = 0; // how many errors when importing or reading files (single file could have multiple errors)
+ private CancellationTokenSource _cancellationTokenSource = new CancellationTokenSource();
+
+ // filter by distance
+ private readonly float cellSize = 0.5f;
+ private static ConcurrentDictionary<(int, int, int), byte> occupiedCells = new();
+
+ // plugins
+ string externalFileFormats = "";
public MainWindow()
{
@@ -61,20 +81,89 @@ public MainWindow()
Main();
}
+ public static Dictionary externalWriters = new Dictionary();
+
+ static ILogger Log;
+
private async void Main()
{
// check cmdline args
string[] args = Environment.GetCommandLineArgs();
-
Tools.FixDLLFoldersAndConfig(rootFolder);
Tools.ForceDotCultureSeparator();
// default logger
- Log.CreateLogger(isJSON: false, version: version);
-
+ //Log.CreateLogger(isJSON: false, version: version);
+ Log = LoggerFactory.CreateLogger(isJSON: false);
+ //Log.CreateLogger(isJSON: false, version: "1.0");
// default code
Environment.ExitCode = (int)ExitCode.Success;
+ // load all plugins from plugins folder
+ //var testwriter = PointCloudConverter.Plugins.PluginLoader.LoadWriter("plugins/GLTFWriter.dll");
+ ////testwriter.Close();
+ //externalWriters = AppDomain.CurrentDomain.GetAssemblies().SelectMany(assembly => assembly.GetTypes()).Where(type => typeof(IWriter).IsAssignableFrom(type) && !type.IsInterface);
+
+ // Get the directory of the running executable
+ var exeDir = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
+
+ // Build absolute path to plugins folder
+ var pluginsDirectory = Path.Combine(exeDir, "plugins");
+
+ if (Directory.Exists(pluginsDirectory))
+ {
+ //Log.Write("Plugins directory not found.");
+
+ // Get all DLL files in the plugins directory
+ var pluginFiles = Directory.GetFiles(pluginsDirectory, "*.dll");
+
+ foreach (var pluginDLL in pluginFiles)
+ {
+ try
+ {
+ // Load the DLL file as an assembly
+ var assembly = Assembly.LoadFrom(pluginDLL);
+
+ // Find all types in the assembly that implement IWriter
+ var writerTypes = assembly.GetTypes().Where(type => typeof(IWriter).IsAssignableFrom(type) && !type.IsInterface && !type.IsAbstract);
+
+ foreach (var writerType in writerTypes)
+ {
+ // Derive a unique key for the writer (e.g., from its name or class name)
+ string writerName = writerType.Name;//.Replace("Writer", ""); // Customize the key generation logic
+ if (!externalWriters.ContainsKey(writerName))
+ {
+ // Add the writer type to the dictionary for later use
+ externalWriters.Add(writerName, writerType);
+ //Log.Write($"Found writer: {writerType.FullName} in {pluginDLL}");
+
+ // TODO take extensions from plugin? has 2: .glb and .gltf
+ externalFileFormats += "|" + writerName + " (" + writerType.FullName + ")|*." + writerName.ToLower();
+ }
+ }
+ }
+ catch (ReflectionTypeLoadException rex)
+ {
+ Log.Write($"Error loading plugin {pluginDLL}: {rex.Message}");
+
+ foreach (var loaderException in rex.LoaderExceptions)
+ {
+ Log.Write(" - " + loaderException?.Message);
+ }
+ }
+ catch (Exception ex)
+ {
+ Log.Write($"General error loading plugin {pluginDLL}: {ex.Message}");
+ }
+ }
+ } // if plugins folder exists
+
+ //return;
+
+ // for debug: print config file location in appdata local here directly
+ // string configFilePath = System.Configuration.ConfigurationManager.OpenExeConfiguration(System.Configuration.ConfigurationUserLevel.PerUserRoamingAndLocal).FilePath;
+ // Log.Write("Config file: " + configFilePath);
+
// using from commandline
if (args.Length > 1)
{
@@ -88,23 +177,26 @@ private async void Main()
{
if (arg.ToLower().Contains("-json=true"))
{
- Log.CreateLogger(isJSON: true, version: version);
+ //Log.CreateLogger(isJSON: true, version: version);
+ Log = LoggerFactory.CreateLogger(isJSON: true);
}
}
Console.ForegroundColor = ConsoleColor.Cyan;
- Log.WriteLine("\n::: " + appname + " :::\n");
+ Log.Write("\n::: " + appname + " :::\n");
//Console.WriteLine("\n::: " + appname + " :::\n");
Console.ForegroundColor = ConsoleColor.White;
+ IntPtr cw = GetConsoleWindow();
// check args, null here because we get the args later
- var importSettings = ArgParser.Parse(null, rootFolder);
+ var importSettings = ArgParser.Parse(null, rootFolder, Log);
- if (importSettings.useJSONLog)
- {
- importSettings.version = version;
- Log.SetSettings(importSettings);
- }
+ // NOTE was not used?
+ //if (importSettings.useJSONLog)
+ //{
+ // importSettings.version = version;
+ // Log.SetSettings(importSettings);
+ //}
//if (importSettings.useJSONLog) log.Init(importSettings, version);
@@ -121,6 +213,8 @@ private async void Main()
CancellationToken = _cancellationTokenSource.Token
};
+ InitProgressBars(importSettings);
+
await Task.Run(() => ProcessAllFiles(workerParams));
}
@@ -130,13 +224,15 @@ private async void Main()
string elapsedString = elapsed.ToString(@"hh\h\ mm\m\ ss\s\ ms\m\s");
// end output
- Log.WriteLine("Exited.\nElapsed: " + elapsedString);
+ Log.Write("Exited.\nElapsed: " + elapsedString);
if (importSettings.useJSONLog)
{
- Log.WriteLine("{\"event\": \"" + LogEvent.End + "\", \"elapsed\": \"" + elapsedString + "\",\"version\":\"" + version + ",\"errors\":" + errorCounter + "}", LogEvent.End);
+ Log.Write("{\"event\": \"" + LogEvent.End + "\", \"elapsed\": \"" + elapsedString + "\",\"version\":\"" + version + ",\"errors\":" + errorCounter + "}", LogEvent.End);
}
- // hack for console exit https://stackoverflow.com/a/67940480/5452781
- SendKeys.SendWait("{ENTER}");
+
+ // https://stackoverflow.com/a/45620138/5452781
+ SendMessage(cw, WM_CHAR, (IntPtr)VK_ENTER, IntPtr.Zero);
+
FreeConsole();
Environment.Exit(Environment.ExitCode);
}
@@ -150,15 +246,12 @@ private async void Main()
LoadSettings();
}
-
// main processing loop
-
private static async Task ProcessAllFiles(object workerParamsObject)
{
var workerParams = (WorkerParams)workerParamsObject;
var importSettings = workerParams.ImportSettings;
var cancellationToken = workerParams.CancellationToken;
-
// Use cancellationToken to check for cancellation
if (cancellationToken.IsCancellationRequested)
{
@@ -184,19 +277,23 @@ private static async Task ProcessAllFiles(object workerParamsObject)
List boundsListTemp = new List();
+ // clear filter by distance
+ occupiedCells.Clear();
+
// get all file bounds, if in batch mode and RGB+INT+PACK
// TODO: check what happens if its too high? over 128/256?
//if (importSettings.useAutoOffset == true && importSettings.importIntensity == true && importSettings.importRGB == true && importSettings.packColors == true && importSettings.importMetadataOnly == false)
-
- //Log.WriteLine(importSettings.useAutoOffset + " && " + importSettings.importMetadataOnly + " || (" + importSettings.importIntensity + " && " + importSettings.importRGB + " && " + importSettings.packColors + " && " + importSettings.importMetadataOnly + ")");
+ //Log.Write(importSettings.useAutoOffset + " && " + importSettings.importMetadataOnly + " || (" + importSettings.importIntensity + " && " + importSettings.importRGB + " && " + importSettings.packColors + " && " + importSettings.importMetadataOnly + ")");
//bool istrue1 = (importSettings.useAutoOffset == true && importSettings.importMetadataOnly == false);
//bool istrue2 = (importSettings.importIntensity == true && importSettings.importRGB == true && importSettings.packColors == true && importSettings.importMetadataOnly == false);
- //Log.WriteLine(istrue1 ? "1" : "0");
- //Log.WriteLine(istrue2 ? "1" : "0");
+ //Log.Write(istrue1 ? "1" : "0");
+ //Log.Write(istrue2 ? "1" : "0");
- if ((importSettings.useAutoOffset == true && importSettings.importMetadataOnly == false) || (importSettings.importIntensity == true && importSettings.importRGB == true && importSettings.packColors == true && importSettings.importMetadataOnly == false))
+ if ((importSettings.useAutoOffset == true && importSettings.importMetadataOnly == false) || ((importSettings.importIntensity == true || importSettings.importClassification == true) && importSettings.importRGB == true && importSettings.packColors == true && importSettings.importMetadataOnly == false))
{
- for (int i = 0, len = importSettings.maxFiles; i < len; i++)
+ int iterations = importSettings.offsetMode == "min" ? importSettings.maxFiles : 1; // 1 for legacy mode
+
+ for (int i = 0, len = iterations; i < len; i++)
{
if (cancellationToken.IsCancellationRequested)
{
@@ -204,7 +301,7 @@ private static async Task ProcessAllFiles(object workerParamsObject)
}
progressFile = i;
- Log.WriteLine("\nReading bounds from file (" + (i + 1) + "/" + len + ") : " + importSettings.inputFiles[i] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[i]).Length) + ")");
+ Log.Write("\nReading bounds from file (" + (i + 1) + "/" + len + ") : " + importSettings.inputFiles[i] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[i]).Length) + ")");
var res = GetBounds(importSettings, i);
if (res.Item1 == true)
@@ -216,15 +313,16 @@ private static async Task ProcessAllFiles(object workerParamsObject)
errorCounter++;
if (importSettings.useJSONLog)
{
- Log.WriteLine("{\"event\": \"" + LogEvent.File + "\", \"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[i]) + ", \"status\": \"" + LogStatus.Processing + "\"}", LogEvent.Error);
+ Log.Write("{\"event\": \"" + LogEvent.File + "\", \"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[i]) + ", \"status\": \"" + LogStatus.Processing + "\"}", LogEvent.Error);
}
else
{
- Log.WriteLine("Error> Failed to get bounds from file: " + importSettings.inputFiles[i], LogEvent.Error);
+ Log.Write("Error> Failed to get bounds from file: " + importSettings.inputFiles[i], LogEvent.Error);
}
}
}
+ // NOTE this fails with some files? returns 0,0,0 for some reason
// find lowest bounds from boundsListTemp
float lowestX = float.MaxValue;
float lowestY = float.MaxValue;
@@ -236,14 +334,22 @@ private static async Task ProcessAllFiles(object workerParamsObject)
if (boundsListTemp[iii].z < lowestZ) lowestZ = (float)boundsListTemp[iii].z;
}
- //Console.WriteLine("Lowest bounds: " + lowestX + " " + lowestY + " " + lowestZ);
+ //Log.Write("Lowest bounds: " + lowestX + " " + lowestY + " " + lowestZ);
// TODO could take center for XZ, and lowest for Y?
importSettings.offsetX = lowestX;
importSettings.offsetY = lowestY;
importSettings.offsetZ = lowestZ;
} // if useAutoOffset
- lasHeaders.Clear();
+
+ //lasHeaders.Clear();
+ jobMetadata.Job = new Job
+ {
+ ConverterVersion = version,
+ ImportSettings = importSettings,
+ StartTime = DateTime.Now
+ };
+ jobMetadata.lasHeaders.Clear();
progressFile = 0;
//for (int i = 0, len = importSettings.maxFiles; i < len; i++)
@@ -254,7 +360,7 @@ private static async Task ProcessAllFiles(object workerParamsObject)
// }
// progressFile = i;
- // Log.WriteLine("\nReading file (" + (i + 1) + "/" + len + ") : " + importSettings.inputFiles[i] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[i]).Length) + ")");
+ // Log.Write("\nReading file (" + (i + 1) + "/" + len + ") : " + importSettings.inputFiles[i] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[i]).Length) + ")");
// //Debug.WriteLine("\nReading file (" + (i + 1) + "/" + len + ") : " + importSettings.inputFiles[i] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[i]).Length) + ")");
// //if (abort==true)
// // do actual point cloud parsing for this file
@@ -264,28 +370,31 @@ private static async Task ProcessAllFiles(object workerParamsObject)
// errorCounter++;
// if (importSettings.useJSONLog)
// {
- // Log.WriteLine("{\"event\": \"" + LogEvent.File + "\", \"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[i]) + ", \"status\": \"" + LogStatus.Processing + "\"}", LogEvent.Error);
+ // Log.Write("{\"event\": \"" + LogEvent.File + "\", \"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[i]) + ", \"status\": \"" + LogStatus.Processing + "\"}", LogEvent.Error);
// }
// else
// {
- // Log.WriteLine("Error> Failed to parse file: " + importSettings.inputFiles[i], LogEvent.Error);
+ // Log.Write("Error> Failed to parse file: " + importSettings.inputFiles[i], LogEvent.Error);
// }
// }
//}
//// hack to fix progress bar not updating on last file
//progressFile++;
- // clamp to max of inputfiles (otherwise errors in threading)
- int maxThreads = Math.Min(importSettings.maxThreads, importSettings.maxFiles - 1); // FIXME: -1 because otherwise keynotfindexception in last file or after it?
+ // clamp to maxfiles
+ int maxThreads = Math.Min(importSettings.maxThreads, importSettings.maxFiles);
// clamp to min 1
- maxThreads = Math.Max(importSettings.maxThreads, 1);
- Log.WriteLine("Using MaxThreads: " + maxThreads);
+ maxThreads = Math.Max(maxThreads, 1);
+ Log.Write("Using MaxThreads: " + maxThreads);
+ // init pool
+ importSettings.InitWriterPool(maxThreads, importSettings.exportFormat);
- var semaphore = new SemaphoreSlim(importSettings.maxThreads);
+ var semaphore = new SemaphoreSlim(maxThreads);
var tasks = new List();
+
for (int i = 0, len = importSettings.maxFiles; i < len; i++)
{
if (cancellationToken.IsCancellationRequested)
@@ -293,50 +402,91 @@ private static async Task ProcessAllFiles(object workerParamsObject)
return;
}
- await semaphore.WaitAsync(cancellationToken);
+ //await semaphore.WaitAsync(cancellationToken);
+ try
+ {
+ await semaphore.WaitAsync(cancellationToken);
+ }
+ catch (OperationCanceledException)
+ {
+ // Handle the cancellation scenario here
+ Log.Write("Wait was canceled.");
+ }
+ finally
+ {
+ // Ensure the semaphore is released safely
+ if (semaphore.CurrentCount == 0) // Make sure we don't release more times than we acquire
+ {
+ try
+ {
+ semaphore.Release();
+ }
+ catch (SemaphoreFullException ex)
+ {
+ //Log.Write($"Semaphore was already fully released. Exception: {ex.Message}");
+ }
+ }
+ }
//int? taskId = Task.CurrentId; // Get the current task ID
- progressFile = i;
+ //progressFile = i;
+ Interlocked.Increment(ref progressFile);
//bool isLastTask = (i == len - 1); // Check if this is the last task
- int index = i; // Capture the current index in the loop
+ int index = i; // Capture the current file index in the loop
int len2 = len;
tasks.Add(Task.Run(async () =>
{
int? taskId = Task.CurrentId; // Get the current task ID
- //Log.WriteLine("task started: " + taskId + " fileindex: " + index);
- Log.WriteLine("task:" + taskId + ", reading file (" + (index + 1) + "/" + len2 + ") : " + importSettings.inputFiles[index] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[index]).Length) + ")\n");
+ //Log.Write("task started: " + taskId + " fileindex: " + index);
+ Log.Write("task:" + taskId + ", reading file (" + (index + 1) + "/" + len2 + ") : " + importSettings.inputFiles[index] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[index]).Length) + ")\n");
try
{
// Do actual point cloud parsing for this file and pass taskId
- var res = ParseFile(importSettings, index, taskId);
+ var res = ParseFile(importSettings, index, taskId, cancellationToken);
if (!res)
{
Interlocked.Increment(ref errorCounter); // thread-safe error counter increment
if (importSettings.useJSONLog)
{
- Log.WriteLine("{\"event\": \"" + LogEvent.File + "\", \"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[i]) + ", \"status\": \"" + LogStatus.Processing + "\"}", LogEvent.Error);
+ // if canceled, we dont want to log this (causes nullref)
+ if (cancellationToken.IsCancellationRequested == false)
+ {
+ Log.Write("{\"event\": \"" + LogEvent.File + "\", \"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[i]) + ", \"status\": \"" + LogStatus.Processing + "\"}", LogEvent.Error);
+ }
}
else
{
- Log.WriteLine("Error> Failed to parse file: " + importSettings.inputFiles[i], LogEvent.Error);
+ if (cancellationToken.IsCancellationRequested)
+ {
+ Log.Write("Task was canceled.");
+ }
+ else
+ {
+ Log.Write("files" + importSettings.inputFiles.Count + " i:" + i);
+ Log.Write("Error> Failed to parse file: " + importSettings.inputFiles[i], LogEvent.Error);
+ }
}
}
}
catch (TaskCanceledException ex)
{
- Log.WriteLine("Task was canceled: " + ex.Message, LogEvent.Error);
+ Log.Write("Task was canceled: " + ex.Message, LogEvent.Error);
}
catch (TimeoutException ex)
{
- Log.WriteLine("Timeout occurred: " + ex.Message, LogEvent.Error);
+ Log.Write("Timeout occurred: " + ex.Message, LogEvent.Error);
+ }
+ catch (OperationCanceledException)
+ {
+ MessageBox.Show("Operation was canceled.");
}
catch (Exception ex)
{
- Log.WriteLine("Exception> " + ex.Message, LogEvent.Error);
- throw; // Rethrow to ensure Task.WhenAll sees the exception
+ Log.Write("Exception> " + ex.Message, LogEvent.Error);
+ //throw; // Rethrow to ensure Task.WhenAll sees the exception
}
finally
{
@@ -350,7 +500,11 @@ private static async Task ProcessAllFiles(object workerParamsObject)
//Trace.WriteLine(" ---------------------- all finished -------------------- ");
// now write header for for pcroot (using main writer)
- importSettings.writer.Close();
+ if (importSettings.exportFormat != ExportFormat.UCPC)
+ {
+ importSettings.writer.Close();
+ // UCPC calls close in Save() itself
+ }
// if this was last file
//if (fileIndex == (importSettings.maxFiles - 1))
@@ -360,21 +514,32 @@ private static async Task ProcessAllFiles(object workerParamsObject)
StringEscapeHandling = StringEscapeHandling.Default // This prevents escaping of characters and write the WKT string properly
};
- string jsonMeta = JsonConvert.SerializeObject(lasHeaders, settings);
+ // add job date
+ jobMetadata.Job.EndTime = DateTime.Now;
+ jobMetadata.Job.Elapsed = jobMetadata.Job.EndTime - jobMetadata.Job.StartTime;
+
+ string jsonMeta = JsonConvert.SerializeObject(jobMetadata, settings);
// var jsonMeta = JsonSerializer.Serialize(lasHeaders, new JsonSerializerOptions() { WriteIndented = true });
- //Log.WriteLine("MetaData: " + jsonMeta);
+ //Log.Write("MetaData: " + jsonMeta);
// write metadata to file
if (importSettings.importMetadata == true)
{
- var jsonFile = Path.Combine(Path.GetDirectoryName(importSettings.outputFile), Path.GetFileNameWithoutExtension(importSettings.outputFile) + ".json");
- Log.WriteLine("Writing metadata to file: " + jsonFile);
+ string filename = Path.GetFileNameWithoutExtension(importSettings.outputFile);
+ // for gltf, there is no output filename
+ if (string.IsNullOrEmpty(filename))
+ {
+ // get last folder name
+ filename = Path.GetFileName(Path.GetDirectoryName(importSettings.inputFiles[0]));
+ }
+ var jsonFile = Path.Combine(Path.GetDirectoryName(importSettings.outputFile), filename + ".json");
+ Log.Write("Writing metadata to file: " + jsonFile);
File.WriteAllText(jsonFile, jsonMeta);
}
lastStatusMessage = "Done!";
Console.ForegroundColor = ConsoleColor.Green;
- Log.WriteLine("Finished!");
+ Log.Write("Finished!");
Console.ForegroundColor = ConsoleColor.White;
mainWindowStatic.Dispatcher.Invoke(() =>
{
@@ -396,7 +561,7 @@ private static async Task ProcessAllFiles(object workerParamsObject)
// } // if last file
stopwatch.Stop();
- Log.WriteLine("Elapsed: " + (TimeSpan.FromMilliseconds(stopwatch.ElapsedMilliseconds)).ToString(@"hh\h\ mm\m\ ss\s\ ms\m\s"));
+ Log.Write("Elapsed: " + (TimeSpan.FromMilliseconds(stopwatch.ElapsedMilliseconds)).ToString(@"hh\h\ mm\m\ ss\s\ ms\m\s"));
stopwatch.Reset();
Application.Current.Dispatcher.Invoke(new Action(() =>
@@ -407,7 +572,7 @@ private static async Task ProcessAllFiles(object workerParamsObject)
// clear timer
progressTimerThread.Stop();
mainWindowStatic.progressBarFiles.Foreground = Brushes.Green;
- mainWindowStatic.progressBarPoints.Foreground = Brushes.Green;
+ //mainWindowStatic.progressBarPoints.Foreground = Brushes.Green;
}));
} // ProcessAllFiles
@@ -419,6 +584,7 @@ void HideProcessingPanel()
static void StartProgressTimer()
{
+ //Log.Write("Starting progress timer..*-*************************");
progressTimerThread = new DispatcherTimer(DispatcherPriority.Background, Application.Current.Dispatcher);
progressTimerThread.Tick += ProgressTick;
progressTimerThread.Interval = TimeSpan.FromSeconds(1);
@@ -427,35 +593,145 @@ static void StartProgressTimer()
Application.Current.Dispatcher.Invoke(new Action(() =>
{
mainWindowStatic.progressBarFiles.Foreground = Brushes.Red;
- mainWindowStatic.progressBarPoints.Foreground = Brushes.Red;
+ //mainWindowStatic.progressBarPoints.Foreground = Brushes.Red;
mainWindowStatic.lblStatus.Content = "";
}));
}
+ private static List progressInfos = new List();
+ private static object lockObject = new object();
+
+ public class ProgressInfo
+ {
+ public int Index { get; internal set; } // Index of the ProgressBar in the UI
+ public int CurrentValue { get; internal set; } // Current progress value
+ public int MaxValue { get; internal set; } // Maximum value for the progress
+ public string FilePath { get; internal set; }
+ public bool UseJsonLog { get; internal set; }
+ }
+
+ static void InitProgressBars(ImportSettings importSettings)
+ {
+ ClearProgressBars();
+
+ int threadCount = importSettings.maxThreads;
+ // clamp to max files
+ threadCount = Math.Min(threadCount, importSettings.inputFiles.Count);
+ threadCount = Math.Max(threadCount, 1);
+
+ //Log.WriteLine("Creating progress bars: " + threadCount);
+ bool useJsonLog = importSettings.useJSONLog;
+ progressInfos.Clear();
+
+ for (int i = 0; i < threadCount; i++)
+ {
+ ProgressBar newProgressBar = new ProgressBar
+ {
+ Height = 10,
+ Width = 490 / threadCount,
+ Value = 0,
+ Maximum = 100, // TODO set value in parsefile?
+ HorizontalAlignment = HorizontalAlignment.Left,
+ Margin = new Thickness(1, 0, 1, 0),
+ Foreground = Brushes.Red,
+ Background = null,
+ //BorderBrush = Brushes.Red,
+ //ToolTip = $"Thread {i}"
+ };
+
+ // Initialize ProgressInfo for each ProgressBar
+ var progressInfo = new ProgressInfo
+ {
+ Index = i, // Index in the StackPanel
+ CurrentValue = 0, // Initial value
+ MaxValue = 100,
+ UseJsonLog = useJsonLog
+ };
+
+ progressInfos.Add(progressInfo);
+
+ mainWindowStatic.ProgressBarsContainer.Children.Add(newProgressBar);
+ }
+ }
+
+ static void ClearProgressBars()
+ {
+ mainWindowStatic.ProgressBarsContainer.Children.Clear();
+ }
+
static void ProgressTick(object sender, EventArgs e)
{
- if (progressTotalPoints > 0)
+ Application.Current.Dispatcher.Invoke(() =>
{
- //mainWindowStatic.progressBarFiles.Value = ((float)((progressFile+1) / (float)(progressTotalFiles+1)));
+ //if (progressTotalPoints > 0)
+ //{
mainWindowStatic.progressBarFiles.Value = progressFile;
mainWindowStatic.progressBarFiles.Maximum = progressTotalFiles + 1;
- mainWindowStatic.progressBarPoints.Value = progressPoint / (float)progressTotalPoints;
mainWindowStatic.lblStatus.Content = lastStatusMessage;
- }
- else
- {
- mainWindowStatic.progressBarFiles.Value = 0;
- mainWindowStatic.progressBarPoints.Value = 0;
- mainWindowStatic.lblStatus.Content = "";
- }
- }
+
+ // Update all progress bars based on the current values in the List
+ lock (lockObject) // Lock to safely read progressInfos
+ {
+ foreach (var progressInfo in progressInfos)
+ {
+ int index = progressInfo.Index;
+ int currentValue = progressInfo.CurrentValue;
+ int maxValue = progressInfo.MaxValue;
+
+ // Access ProgressBar directly from the StackPanel.Children using its index
+ if (index >= 0 && index < mainWindowStatic.ProgressBarsContainer.Children.Count)
+ {
+ if (mainWindowStatic.ProgressBarsContainer.Children[index] is ProgressBar progressBar)
+ {
+ progressBar.Maximum = maxValue;
+ progressBar.Value = currentValue;
+ progressBar.Foreground = ((currentValue + 1 >= maxValue) ? Brushes.Lime : Brushes.Red); //+1 hack fix
+ //progressBar.ToolTip = $"Thread {index} - {currentValue} / {maxValue}"; // not visible, because modal dialog
+ //Log.Write("ProgressTick: " + index + " " + currentValue + " / " + maxValue);
+
+ // print json progress
+ if (progressInfo.UseJsonLog) // TODO now same bool value is for each progressinfo..
+ {
+ string jsonString = "{" +
+ "\"event\": \"" + LogEvent.Progress + "\"," +
+ "\"thread\": " + index + "," +
+ "\"currentPoint\": " + currentValue + "," +
+ "\"totalPoints\": " + maxValue + "," +
+ "\"percentage\": " + (int)((currentValue / (float)maxValue) * 100.0) + "," +
+ "\"file\": " + System.Text.Json.JsonSerializer.Serialize(progressInfo.FilePath) +
+ "}";
+ Log.Write(jsonString, LogEvent.Progress);
+ }
+ }
+ }
+ } // foreach progressinfo
+ } // lock
+ //}
+ //else // finished ?
+ //{
+ // Log.Write("*************** ProgressTick: progressTotalPoints is 0, finishing..");
+ // mainWindowStatic.progressBarFiles.Value = 0;
+ // mainWindowStatic.lblStatus.Content = "";
+
+ // foreach (UIElement element in mainWindowStatic.ProgressBarsContainer.Children)
+ // {
+ // if (element is ProgressBar progressBar)
+ // {
+ // progressBar.Value = 0;
+ // progressBar.Foreground = Brushes.Lime;
+ // }
+ // }
+ //}
+ });
+ } // ProgressTick()
+
static (bool, float, float, float) GetBounds(ImportSettings importSettings, int fileIndex)
{
var res = importSettings.reader.InitReader(importSettings, fileIndex);
if (res == false)
{
- Log.WriteLine("Unknown error while initializing reader: " + importSettings.inputFiles[fileIndex]);
+ Log.Write("Unknown error while initializing reader: " + importSettings.inputFiles[fileIndex]);
Environment.ExitCode = (int)ExitCode.Error;
return (false, 0, 0, 0);
}
@@ -468,9 +744,11 @@ static void ProgressTick(object sender, EventArgs e)
}
// process single file
- static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
+ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, CancellationToken cancellationToken)
{
- //Log.WriteLine("parsefile, taskid: " + taskId + " fileindex: " + fileIndex);
+ progressTotalPoints = 0;
+
+ Log.Write("Started processing file: " + importSettings.inputFiles[fileIndex]);
// each thread needs its own reader
bool res;
@@ -478,6 +756,13 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
//importSettings.reader = new LAZ(taskId);
IReader taskReader = importSettings.GetOrCreateReader(taskId);
+ ProgressInfo progressInfo = null;
+ //lock (lockObject)
+ {
+ //Log.Write(progressInfos.Count + " : " + fileIndex, LogEvent.Info);
+ progressInfo = progressInfos[fileIndex % progressInfos.Count];
+ }
+
try
{
res = taskReader.InitReader(importSettings, fileIndex);
@@ -487,21 +772,15 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
throw new Exception("Error> Failed to initialize reader: " + importSettings.inputFiles[fileIndex]);
}
- //Log.WriteLine("taskid: " + taskId + " reader initialized");
+ //Log.Write("taskid: " + taskId + " reader initialized");
if (res == false)
{
- Log.WriteLine("Unknown error while initializing reader: " + importSettings.inputFiles[fileIndex]);
+ Log.Write("Unknown error while initializing reader: " + importSettings.inputFiles[fileIndex]);
Environment.ExitCode = (int)ExitCode.Error;
return false;
}
- if (importSettings.importMetadata == true)
- {
- var metaData = taskReader.GetMetaData(importSettings, fileIndex);
- lasHeaders.Add(metaData);
- }
-
if (importSettings.importMetadataOnly == false)
{
int fullPointCount = taskReader.GetPointCount();
@@ -511,31 +790,32 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
if (importSettings.skipPoints == true)
{
var afterSkip = (int)Math.Floor(pointCount - (pointCount / (float)importSettings.skipEveryN));
- Log.WriteLine("Skip every X points is enabled, original points: " + fullPointCount + ", After skipping:" + afterSkip);
+ Log.Write("Skip every X points is enabled, original points: " + fullPointCount + ", After skipping:" + afterSkip);
pointCount = afterSkip;
}
if (importSettings.keepPoints == true)
{
- Log.WriteLine("Keep every x points is enabled, original points: " + fullPointCount + ", After keeping:" + (pointCount / importSettings.keepEveryN));
+ Log.Write("Keep every x points is enabled, original points: " + fullPointCount + ", After keeping:" + (pointCount / importSettings.keepEveryN));
pointCount = pointCount / importSettings.keepEveryN;
}
if (importSettings.useLimit == true)
{
- Log.WriteLine("Original points: " + pointCount + " Limited points: " + importSettings.limit);
+ Log.Write("Original points: " + pointCount + " Limited points: " + importSettings.limit);
pointCount = importSettings.limit > pointCount ? pointCount : importSettings.limit;
}
else
{
- Log.WriteLine("Points: " + pointCount);
+ Log.Write("Points: " + pointCount + " (" + importSettings.inputFiles[fileIndex] + ")");
}
// NOTE only works with formats that have bounds defined in header, otherwise need to loop whole file to get bounds?
// dont use these bounds, in this case
- if (importSettings.useAutoOffset == true || (importSettings.importIntensity == true && importSettings.importRGB == true && importSettings.packColors == true))
+ if (importSettings.useAutoOffset == true || ((importSettings.importIntensity == true || importSettings.importClassification == true) && importSettings.importRGB == true && importSettings.packColors == true))
{
+ // TODO add manual offset here still?
// we use global bounds or Y offset to fix negative Y
}
else if (importSettings.useManualOffset == true)
@@ -544,25 +824,43 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
importSettings.offsetY = importSettings.manualOffsetY;
importSettings.offsetZ = importSettings.manualOffsetZ;
}
- else // neither
+ else // no autooffset either
{
- importSettings.offsetX = 0;
- importSettings.offsetY = 0;
- importSettings.offsetZ = 0;
+ if (importSettings.useAutoOffset == false)
+ {
+ importSettings.offsetX = 0;
+ importSettings.offsetY = 0;
+ importSettings.offsetZ = 0;
+ }
}
+ //Log.Write("************** Offsets: " + importSettings.offsetX + " " + importSettings.offsetY + " " + importSettings.offsetZ);
+
var taskWriter = importSettings.GetOrCreateWriter(taskId);
- //var writerRes = importSettings.writer.InitWriter(importSettings, pointCount);
- var writerRes = taskWriter.InitWriter(importSettings, pointCount);
+ // for saving pcroot header, we need this writer
+ if (importSettings.exportFormat != ExportFormat.UCPC)
+ {
+ var mainWriterRes = importSettings.writer.InitWriter(importSettings, pointCount, Log);
+ if (mainWriterRes == false)
+ {
+ Log.Write("Error> Failed to initialize main Writer, fileindex: " + fileIndex + " taskid:" + taskId);
+ return false;
+ }
+ }
+
+ // init writer for this file
+ var writerRes = taskWriter.InitWriter(importSettings, pointCount, Log);
if (writerRes == false)
{
- Log.WriteLine("Error> Failed to initialize Writer, fileindex: " + fileIndex + " taskid:" + taskId);
+ Log.Write("Error> Failed to initialize Writer, fileindex: " + fileIndex + " taskid:" + taskId);
return false;
}
- progressPoint = 0;
- progressTotalPoints = importSettings.useLimit ? pointCount : fullPointCount;
+ //progressPoint = 0;
+ progressInfo.CurrentValue = 0;
+ progressInfo.MaxValue = importSettings.useLimit ? pointCount : fullPointCount;
+ progressInfo.FilePath = importSettings.inputFiles[fileIndex];
lastStatusMessage = "Processing points..";
@@ -574,18 +872,45 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
"\"status\": \"" + LogStatus.Processing + "\"" +
"}";
- Log.WriteLine(jsonString, LogEvent.File);
+ Log.Write(jsonString, LogEvent.File);
+
+ int checkCancelEvery = fullPointCount / 128;
+
+ // detect is 0-255 or 0-65535 range
+ bool isCustomIntensityRange = false;
// Loop all points
- for (int i = 0; i < fullPointCount; i++)
- //for (int i = 0; i < 1000; i++)
+ // FIXME: would be nicer, if use different STEP value for skip, keep and limit..(to collect points all over the file, not just start)
+ int maxPointIterations = importSettings.useLimit ? pointCount : fullPointCount;
+ for (int i = 0; i < maxPointIterations; i++)
{
- // stop at limit count
- if (importSettings.useLimit == true && i > pointCount) break;
+ // check for cancel every 1% of points
+ if (i % checkCancelEvery == 0)
+ {
+ if (cancellationToken.IsCancellationRequested)
+ {
+ //Log.Write("Parse task (" + taskId + ") was canceled for: " + importSettings.inputFiles[fileIndex]);
+ return false;
+ }
+ }
// get point XYZ
Float3 point = taskReader.GetXYZ();
- if (point.hasError == true) break;
+ if (point.hasError == true) break; // TODO display errors
+
+ // get point color
+ Color rgb = (default);
+
+ if (importSettings.importRGB == true)
+ {
+ rgb = taskReader.GetRGB();
+ }
+
+ // skip points
+ if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue;
+
+ // keep points
+ if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue;
// add offsets (its 0 if not used)
point.x -= importSettings.offsetX;
@@ -593,9 +918,6 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
point.z -= importSettings.offsetZ;
// scale if enabled
- //point.x = importSettings.useScale ? point.x * importSettings.scale : point.x;
- //point.y = importSettings.useScale ? point.y * importSettings.scale : point.y;
- //point.z = importSettings.useScale ? point.z * importSettings.scale : point.z;
if (importSettings.useScale == true)
{
point.x *= importSettings.scale;
@@ -623,28 +945,74 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
point.x = -point.x;
}
- // get point color
- Color rgb = (default);
- Color intensity = (default);
- double time = 0;
-
- if (importSettings.importRGB == true)
+ // filtering is done after scaling and offsets
+ if (importSettings.useFilter)
{
- rgb = taskReader.GetRGB();
+ var cell = ((int)Math.Floor(point.x / importSettings.filterDistance), (int)Math.Floor(point.y / importSettings.filterDistance), (int)Math.Floor(point.z / importSettings.filterDistance));
+
+ if (!occupiedCells.TryAdd(cell, 0))
+ {
+ continue; // cell already taken, skip this point
+ }
}
- // TODO get intensity as separate value, TODO is this float or rgb?
+ ushort intensity = 0;
+ byte classification = 0;
+ double time = 0;
+
+ // TODO get intensity as separate value
if (importSettings.importIntensity == true)
{
+ //intensity = 0;
intensity = taskReader.GetIntensity();
- //if (i < 100) Console.WriteLine(intensity.r);
+
+ //if (i < 20000) Log.Write("int: " + intensity);
+
+ if (importSettings.detectIntensityRange && isCustomIntensityRange == false)
+ {
+ // check if intensity is 0-255 or 0-65535
+ isCustomIntensityRange = intensity > 255;
+ //Log.Write("Detecting intensity range " + intensity + " " + (isCustomIntensityRange ? "************" : "")+" "+ importSettings.inputFiles[fileIndex]);
+ }
+
+ // if no rgb, then replace RGB with intensity, NOTE this doesnt work correctly if using detect intensity range! (since raw value is now ushort, can be 0-65k)
+ if (importSettings.importRGB == false)
+ {
+ rgb.r = intensity / 255f; // convert byte to float
+ rgb.g = rgb.r;
+ rgb.b = rgb.r;
+ }
+ }
+
+ // FIXME cannot have both classification and intensity, because both save into RGB here
+
+ if (importSettings.importClassification == true)
+ {
+ classification = taskReader.GetClassification();
+
+ //classification = (byte)255;
+
+ //if (classification<0 || classification>1) Log.Write("****: " + classification.ToString());
+
+ //if (i < 10000) Log.Write("class: " + classification.ToString() + " minClass: " + minClass + " maxClass: " + maxClass);
+ //classification = 0;
+ //if (intensity.r < minInt)
+ //{
+ // minInt = intensity.r;
+ // Log.Write("Min: " + minInt + " Max: " + maxInt);
+ //}
+ //if (intensity.r > maxInt)
+ //{
+ // maxInt = intensity.r;
+ // Log.Write("Min: " + minInt + " Max: " + maxInt);
+ //}
// if no rgb, then replace RGB with intensity
if (importSettings.importRGB == false)
{
- rgb.r = intensity.r;
- rgb.g = intensity.r;
- rgb.b = intensity.r;
+ rgb.r = classification / 255f;
+ rgb.g = rgb.r;
+ rgb.b = rgb.r;
}
}
@@ -657,24 +1025,60 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId)
// collect this point XYZ and RGB into node, optionally intensity also
//importSettings.writer.AddPoint(i, (float)point.x, (float)point.y, (float)point.z, rgb.r, rgb.g, rgb.b, importSettings.importIntensity, intensity.r, importSettings.averageTimestamp, time);
- taskWriter.AddPoint(i, (float)point.x, (float)point.y, (float)point.z, rgb.r, rgb.g, rgb.b, importSettings.importIntensity, intensity.r, importSettings.averageTimestamp, time);
- progressPoint = i;
+ // TODO can remove importsettings, its already passed on init
+ taskWriter.AddPoint(index: i, x: (float)point.x, y: (float)point.y, z: (float)point.z, r: rgb.r, g: rgb.g, b: rgb.b, intensity: intensity, time: time, classification: classification);
+ //progressPoint = i;
+ progressInfo.CurrentValue = i;
} // for all points
+ // hack for missing 100% progress
+ progressInfo.CurrentValue = maxPointIterations;
+
+ if (importSettings.detectIntensityRange == true)
+ {
+ taskWriter.SetIntensityRange(isCustomIntensityRange);
+ }
+
lastStatusMessage = "Saving files..";
//importSettings.writer.Save(fileIndex);
taskWriter.Save(fileIndex);
lastStatusMessage = "Finished saving..";
//taskReader.Close();
- //Log.WriteLine("------------ release reader and writer ------------");
+ //Log.Write("------------ release reader and writer ------------");
importSettings.ReleaseReader(taskId);
//taskReader.Dispose();
importSettings.ReleaseWriter(taskId);
- //Log.WriteLine("------------ reader and writer released ------------");
- } // if importMetadataOnly == false
+ //Log.Write("------------ reader and writer released ------------");
+
+ // TODO add event for finished writing this file, and return list of output files
+ //jsonString = "{" +
+ // "\"event\": \"" + LogEvent.File + "\"," +
+ // "\"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[fileIndex]) + "," +
+ // //"\"size\": " + new FileInfo(importSettings.inputFiles[fileIndex]).Length + "," +
+ // //"\"points\": " + pointCount + "," +
+ // "\"status\": \"" + LogStatus.Complete + "\"" +
+ // "}";
+
+ //Log.Write(jsonString, LogEvent.File);
- //Log.WriteLine("taskid: " + taskId + " done");
+ if (importSettings.importMetadata == true)
+ {
+ var metaData = taskReader.GetMetaData(importSettings, fileIndex);
+ jobMetadata.lasHeaders.Add(metaData);
+ }
+
+ } // if importMetadataOnly == false ^
+ else // only metadata:
+ {
+ if (importSettings.importMetadata == true)
+ {
+ var metaData = taskReader.GetMetaData(importSettings, fileIndex);
+ jobMetadata.lasHeaders.Add(metaData);
+ }
+ }
+
+ //Log.Write("taskid: " + taskId + " done");
return true;
} // ParseFile
@@ -683,6 +1087,10 @@ private void btnConvert_Click(object sender, RoutedEventArgs e)
// reset progress
progressTotalFiles = 0;
progressTotalPoints = 0;
+
+ // reset cancel token
+ _cancellationTokenSource = new CancellationTokenSource();
+
if (ValidateSettings() == true)
{
ProgressTick(null, null);
@@ -692,7 +1100,7 @@ private void btnConvert_Click(object sender, RoutedEventArgs e)
}
else
{
- Log.WriteLine("Error> Invalid settings, aborting..");
+ Log.Write("Error> Invalid settings, aborting..");
}
}
@@ -735,23 +1143,49 @@ void StartProcess(bool doProcess = true)
var args = new List();
// add enabled args to list, TODO use binding later?
- args.Add("-input=" + txtInputFile.Text);
+ string inputFile = txtInputFile.Text;
+ string outputFile = txtOutput.Text;
+
+ // add quotes, if contains space in path
+ if (inputFile.Contains(" ")) inputFile = "\"" + inputFile + "\"";
+ if (outputFile.Contains(" ")) outputFile = "\"" + outputFile + "\"";
if (cmbImportFormat.SelectedItem != null)
{
args.Add("-importformat=" + cmbImportFormat.SelectedItem.ToString());
}
+
+ args.Add("-input=" + inputFile);
+
if (cmbExportFormat.SelectedItem != null)
{
args.Add("-exportformat=" + cmbExportFormat.SelectedItem.ToString());
}
- args.Add("-output=" + txtOutput.Text);
- args.Add("-offset=" + (bool)chkAutoOffset.IsChecked);
+ args.Add("-output=" + outputFile);
+
+ // check if using autooffset
+ //if ((bool)chkAutoOffset.IsChecked && !(bool)chkManualOffset.IsChecked)
+ if (!(bool)chkManualOffset.IsChecked)
+ {
+ args.Add("-offset=" + (bool)chkAutoOffset.IsChecked);
+ }
+
+ args.Add("-offsetmode=" + txtOffsetMode.Text.ToLower());
+
+ // or manual offset, TODO later should allow using both (first autooffset, then add manual)
+ if ((bool)chkManualOffset.IsChecked) args.Add("-offset=" + txtOffsetX.Text + "," + txtOffsetY.Text + "," + txtOffsetZ.Text);
+
args.Add("-rgb=" + (bool)chkImportRGB.IsChecked);
args.Add("-intensity=" + (bool)chkImportIntensity.IsChecked);
+ args.Add("-classification=" + (bool)chkImportClassification.IsChecked);
+
- if (cmbExportFormat.SelectedItem.ToString().ToUpper().Contains("PCROOT")) args.Add("-gridsize=" + txtGridSize.Text);
+ bool isPCROOT = (cmbExportFormat.SelectedItem.ToString() == "PCROOT");
+ bool isGLTF = (cmbExportFormat.SelectedItem.ToString().ToUpper() == "GLTF" || cmbExportFormat.SelectedItem.ToString().ToUpper() == "GLB");
+ // cmbExportFormat.SelectedItem?.ToString()?.ToUpper()?.Contains("PCROOT")
+
+ if (isPCROOT == true) args.Add("-gridsize=" + txtGridSize.Text);
if ((bool)chkUseMinPointCount.IsChecked) args.Add("-minpoints=" + txtMinPointCount.Text);
if ((bool)chkUseScale.IsChecked) args.Add("-scale=" + txtScale.Text);
@@ -764,7 +1198,6 @@ void StartProcess(bool doProcess = true)
if ((bool)chkUseSkip.IsChecked) args.Add("-skip=" + txtSkipEvery.Text);
if ((bool)chkUseKeep.IsChecked) args.Add("-keep=" + txtKeepEvery.Text);
if ((bool)chkUseMaxFileCount.IsChecked) args.Add("-maxfiles=" + txtMaxFileCount.Text);
- if ((bool)chkManualOffset.IsChecked) args.Add("-offset=" + txtOffsetX.Text + "," + txtOffsetY.Text + "," + txtOffsetZ.Text);
args.Add("-randomize=" + (bool)chkRandomize.IsChecked);
if ((bool)chkSetRandomSeed.IsChecked) args.Add("-seed=" + txtRandomSeed.Text);
if ((bool)chkUseJSONLog.IsChecked) args.Add("-json=true");
@@ -774,10 +1207,17 @@ void StartProcess(bool doProcess = true)
if ((bool)chkCalculateOverlappingTiles.IsChecked) args.Add("-checkoverlap=true");
args.Add("-maxthreads=" + txtMaxThreads.Text);
+ if ((bool)chkUseFilter.IsChecked) args.Add("-filter=" + txtFilterDistance.Text);
+
+ if (isGLTF == true) args.Add(("-usegrid=" + (bool)chkUseGrid.IsChecked).ToLower());
+
if (((bool)chkImportIntensity.IsChecked) && ((bool)chkCustomIntensityRange.IsChecked)) args.Add("-customintensityrange=True");
+ if (((bool)chkDetectIntensityRange.IsChecked) && ((bool)chkDetectIntensityRange.IsChecked)) args.Add("-detectintensityrange=True");
// check input files
- var importSettings = ArgParser.Parse(args.ToArray(), rootFolder);
+ //Trace.WriteLine("loggeris:" + Log.GetType().ToString());
+
+ var importSettings = ArgParser.Parse(args.ToArray(), rootFolder, Log);
// if have files, process them
if (importSettings.errors.Count == 0)
@@ -815,6 +1255,8 @@ void StartProcess(bool doProcess = true)
CancellationToken = _cancellationTokenSource.Token
};
+ InitProgressBars(importSettings);
+
Task.Run(() => ProcessAllFiles(workerParams));
}
}
@@ -826,30 +1268,198 @@ void StartProcess(bool doProcess = true)
}
}
- private CancellationTokenSource _cancellationTokenSource = new CancellationTokenSource();
- private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
+ // set gui from commandline args
+ void ImportArgs(string rawArgs)
{
- SaveSettings();
+ Log.Write(rawArgs);
+ string[] args = ArgParser.SplitArgs(rawArgs);
+ bool isFirstArgExe = args[0].EndsWith(".exe", StringComparison.OrdinalIgnoreCase);
+ int startIndex = isFirstArgExe ? 1 : 0;
- // Signal the cancellation to the worker thread
- _cancellationTokenSource.Cancel();
+ // reset all checkboxes to false
+ UncheckAllCheckboxes(this);
+
+ for (int i = startIndex; i < args.Length; i++)
+ {
+ string arg = args[i];
+ arg = arg.TrimStart('-');
+
+ if (i + 1 < args.Length)
+ {
+ string[] parts = args[i].Split('=');
- if (workerThread != null)
+ if (parts.Length < 2)
+ {
+ Log.Write($"Missing value for argument: {arg}");
+ continue;
+ }
+
+ string key = parts[0].ToLower().TrimStart('-');
+ string value = parts[1];
+
+ // FIXME, if value is not saved, need to use default value
+
+ // Apply the key-value pairs to the GUI elements
+ switch (key)
+ {
+ case "input":
+ txtInputFile.Text = value;
+ break;
+ case "importformat":
+ cmbImportFormat.SelectedItem = value;
+ break;
+ case "exportformat":
+ cmbExportFormat.SelectedItem = value;
+ break;
+ case "output":
+ txtOutput.Text = value;
+ break;
+ case "offset":
+ chkAutoOffset.IsChecked = value.ToLower() == "true";
+ break;
+ case "rgb":
+ chkImportRGB.IsChecked = value.ToLower() == "true";
+ break;
+ case "intensity":
+ chkImportIntensity.IsChecked = value.ToLower() == "true";
+ break;
+ case "classification":
+ chkImportClassification.IsChecked = value.ToLower() == "true";
+ break;
+ case "gridsize":
+ txtGridSize.Text = value;
+ break;
+ case "minpoints":
+ chkUseMinPointCount.IsChecked = true;
+ txtMinPointCount.Text = value;
+ break;
+ case "scale":
+ chkUseScale.IsChecked = true;
+ txtScale.Text = value;
+ break;
+ case "swap":
+ chkSwapYZ.IsChecked = value.ToLower() == "true";
+ break;
+ case "invertx":
+ chkInvertX.IsChecked = value.ToLower() == "true";
+ break;
+ case "invertz":
+ chkInvertZ.IsChecked = value.ToLower() == "true";
+ break;
+ case "pack":
+ chkPackColors.IsChecked = value.ToLower() == "true";
+ break;
+ case "packmagic":
+ chkUsePackMagic.IsChecked = true;
+ txtPackMagic.Text = value;
+ break;
+ case "limit":
+ chkUseMaxImportPointCount.IsChecked = true;
+ txtMaxImportPointCount.Text = value;
+ break;
+ case "keep":
+ chkUseKeep.IsChecked = true;
+ txtKeepEvery.Text = value;
+ break;
+ case "maxfiles":
+ chkUseMaxFileCount.IsChecked = true;
+ txtMaxFileCount.Text = value;
+ break;
+ case "randomize":
+ chkRandomize.IsChecked = value.ToLower() == "true";
+ break;
+ case "seed":
+ chkSetRandomSeed.IsChecked = true;
+ txtRandomSeed.Text = value;
+ break;
+ case "json":
+ chkUseJSONLog.IsChecked = value.ToLower() == "true";
+ break;
+ case "metadata":
+ chkReadMetaData.IsChecked = value.ToLower() == "true";
+ break;
+ case "metadataonly":
+ chkMetaDataOnly.IsChecked = value.ToLower() == "true";
+ break;
+ case "averagetimestamp":
+ chkGetAvgTileTimestamp.IsChecked = value.ToLower() == "true";
+ break;
+ case "checkoverlap":
+ chkCalculateOverlappingTiles.IsChecked = value.ToLower() == "true";
+ break;
+ case "maxthreads":
+ txtMaxThreads.Text = value;
+ break;
+ case "customintensityrange":
+ chkCustomIntensityRange.IsChecked = value.ToLower() == "true";
+ break;
+ default:
+ Console.WriteLine($"Unknown argument: {key}");
+ break;
+ }
+ }
+ else
+ {
+ Console.WriteLine($"Missing value for argument: {arg}");
+ }
+ } // for all args
+ } // ImportArgs()
+
+ private void UncheckAllCheckboxes(DependencyObject parent)
+ {
+ // Loop through all the child elements
+ for (int i = 0; i < VisualTreeHelper.GetChildrenCount(parent); i++)
{
- // Wait for the worker thread to finish
- workerThread.Join();
+ var child = VisualTreeHelper.GetChild(parent, i);
+
+ // If the child is a CheckBox, set it to unchecked
+ if (child is CheckBox checkBox)
+ {
+ checkBox.IsChecked = false;
+ }
- // Optionally exit the application
- Environment.Exit((int)ExitCode.Cancelled);
+ // If the child is a container, recursively call the function to check its children
+ if (VisualTreeHelper.GetChildrenCount(child) > 0)
+ {
+ UncheckAllCheckboxes(child);
+ }
}
}
+ private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
+ {
+ SaveSettings();
+
+ // Signal the cancellation to the worker thread
+ _cancellationTokenSource.Cancel();
+ }
+
private void btnBrowseInput_Click(object sender, RoutedEventArgs e)
{
// select single file
var dialog = new OpenFileDialog();
dialog.Title = "Select file to import";
- dialog.Filter = "LAS|*.las;*.laz";
+
+ if (cmbImportFormat.SelectedItem != null)
+ {
+ var format = cmbImportFormat.SelectedItem.ToString();
+ if (format == "LAS" || format == "LAZ")
+ {
+ dialog.Filter = "LAS Files|*.las;*.laz|All Files|*.*";
+ }
+ else if (format == "PLY")
+ {
+ dialog.Filter = "PLY Files|*.ply|All Files|*.*";
+ }
+ else
+ {
+ dialog.Filter = "All Files|*.*";
+ }
+ }
+ else
+ {
+ dialog.Filter = "Point Cloud Files|*.las;*.laz;*.ply|LAS Files|*.las;*.laz|PLY Files|*.ply|All Files|*.*";
+ }
// take folder from field
if (string.IsNullOrEmpty(txtInputFile.Text) == false)
@@ -897,7 +1507,7 @@ private void btnBrowseOutput_Click(object sender, RoutedEventArgs e)
// select single output filename
var dialog = new SaveFileDialog();
dialog.Title = "Set output folder and filename";
- dialog.Filter = "UCPC (V2)|*.ucpc|PCROOT (V3)|*.pcroot";
+ dialog.Filter = "UCPC (V2)|*.ucpc|PCROOT (V3)|*.pcroot" + externalFileFormats;
dialog.FilterIndex = cmbExportFormat.SelectedIndex + 1;
@@ -941,16 +1551,29 @@ private void btnBrowseOutput_Click(object sender, RoutedEventArgs e)
private void LoadSettings()
{
+ // add importer formats
foreach (var item in Enum.GetValues(typeof(ImportFormat)))
{
if ((ImportFormat)item == ImportFormat.Unknown) continue;
cmbImportFormat.Items.Add(item);
}
+ // add builtin exporter formats
foreach (var item in Enum.GetValues(typeof(ExportFormat)))
{
if ((ExportFormat)item == ExportFormat.Unknown) continue;
- cmbExportFormat.Items.Add(item);
+ if ((ExportFormat)item == ExportFormat.External) continue;
+ cmbExportFormat.Items.Add(item.ToString());
+ }
+
+ // Add dynamic export formats discovered from plugins
+ foreach (var externalPlugin in externalWriters)
+ {
+ // Avoid adding duplicates if they already exist in the enum
+ if (!cmbExportFormat.Items.Contains(externalPlugin.Key))
+ {
+ cmbExportFormat.Items.Add(externalPlugin.Key);
+ }
}
// TODO check if format is available in list..
@@ -962,6 +1585,7 @@ private void LoadSettings()
chkImportRGB.IsChecked = Properties.Settings.Default.importRGB;
chkImportIntensity.IsChecked = Properties.Settings.Default.importIntensity;
+ chkImportClassification.IsChecked = Properties.Settings.Default.importClassification;
chkAutoOffset.IsChecked = Properties.Settings.Default.useAutoOffset;
txtGridSize.Text = Properties.Settings.Default.gridSize.ToString();
@@ -985,6 +1609,7 @@ private void LoadSettings()
txtMaxFileCount.Text = Properties.Settings.Default.maxFileCount.ToString();
chkRandomize.IsChecked = Properties.Settings.Default.randomize;
chkCustomIntensityRange.IsChecked = Properties.Settings.Default.customintensityrange;
+ chkDetectIntensityRange.IsChecked = Properties.Settings.Default.detectIntensityRange;
chkOpenOutputFolder.IsChecked = Properties.Settings.Default.openOutputFolder;
chkManualOffset.IsChecked = Properties.Settings.Default.useManualOffset;
txtOffsetX.Text = Properties.Settings.Default.manualOffsetX.ToString();
@@ -998,6 +1623,10 @@ private void LoadSettings()
chkGetAvgTileTimestamp.IsChecked = Properties.Settings.Default.getAvgTileTimestamp;
chkCalculateOverlappingTiles.IsChecked = Properties.Settings.Default.calculateOverlappingTiles;
txtMaxThreads.Text = Properties.Settings.Default.maxThreads;
+ chkUseGrid.IsChecked = Properties.Settings.Default.useGrid;
+ txtOffsetMode.Text = Properties.Settings.Default.offsetMode;
+ chkUseFilter.IsChecked = Properties.Settings.Default.useFilter;
+ txtFilterDistance.Text = Properties.Settings.Default.filterDistance.ToString();
isInitialiazing = false;
}
@@ -1029,15 +1658,16 @@ void SaveSettings()
Properties.Settings.Default.maxFileCount = Tools.ParseInt(txtMaxFileCount.Text);
Properties.Settings.Default.randomize = (bool)chkRandomize.IsChecked;
Properties.Settings.Default.customintensityrange = (bool)chkCustomIntensityRange.IsChecked;
+ Properties.Settings.Default.detectIntensityRange = (bool)chkDetectIntensityRange.IsChecked;
Properties.Settings.Default.openOutputFolder = (bool)chkOpenOutputFolder.IsChecked;
Properties.Settings.Default.useManualOffset = (bool)chkManualOffset.IsChecked;
- float.TryParse(txtOffsetX.Text, out float offsetX);
+ float.TryParse(txtOffsetX.Text.Replace(",", "."), NumberStyles.Float, CultureInfo.InvariantCulture, out float offsetX);
Properties.Settings.Default.manualOffsetX = offsetX;
- float.TryParse(txtOffsetY.Text, out float offsetY);
+ float.TryParse(txtOffsetY.Text.Replace(",", "."), NumberStyles.Float, CultureInfo.InvariantCulture, out float offsetY);
Properties.Settings.Default.manualOffsetY = offsetY;
- float.TryParse(txtOffsetZ.Text, out float offsetZ);
+ float.TryParse(txtOffsetZ.Text.Replace(",", "."), NumberStyles.Float, CultureInfo.InvariantCulture, out float offsetZ);
int tempSeed = 42;
- int.TryParse(txtRandomSeed.Text, out tempSeed);
+ int.TryParse(txtRandomSeed.Text, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempSeed);
Properties.Settings.Default.seed = tempSeed;
Properties.Settings.Default.useJSON = (bool)chkUseJSONLog.IsChecked;
Properties.Settings.Default.importMetadata = (bool)chkReadMetaData.IsChecked;
@@ -1047,6 +1677,10 @@ void SaveSettings()
Properties.Settings.Default.getAvgTileTimestamp = (bool)chkGetAvgTileTimestamp.IsChecked;
Properties.Settings.Default.calculateOverlappingTiles = (bool)chkCalculateOverlappingTiles.IsChecked;
Properties.Settings.Default.maxThreads = txtMaxThreads.Text;
+ Properties.Settings.Default.useGrid = (bool)chkUseGrid.IsChecked;
+ Properties.Settings.Default.offsetMode = txtOffsetMode.Text;
+ Properties.Settings.Default.useFilter = (bool)chkUseFilter.IsChecked;
+ Properties.Settings.Default.filterDistance = Tools.ParseFloat(txtFilterDistance.Text);
Properties.Settings.Default.Save();
}
@@ -1061,20 +1695,52 @@ private void Window_Loaded(object sender, RoutedEventArgs e)
private void BtnCancel_Click(object sender, RoutedEventArgs e)
{
- abort = true;
+ Log.Write("Aborting - Please wait..");
_cancellationTokenSource.Cancel();
-
- if (workerThread != null)
- {
- workerThread.Join();
- Environment.Exit((int)ExitCode.Cancelled);
- }
}
private void cmbExportFormat_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
- // updatae file extension, if set
- txtOutput.Text = Path.ChangeExtension(txtOutput.Text, "." + cmbExportFormat.SelectedValue.ToString().ToLower());
+ if (isInitialiazing == true) return;
+
+ // get current output path
+ var currentOutput = txtOutput.Text;
+
+ // check if output is file or directory
+ if (Directory.Exists(currentOutput) == true) // its directory
+ {
+ // if PCROOT then filename is required, use default output.pcroot then
+ if (cmbExportFormat.SelectedValue.ToString().ToUpper().Contains("PCROOT"))
+ {
+ string sourceName = Path.GetFileNameWithoutExtension(txtInputFile.Text);
+ if (string.IsNullOrEmpty(sourceName)) sourceName = "output";
+
+ txtOutput.Text = Path.Combine(currentOutput, sourceName + ".pcroot");
+ }
+ return;
+ }
+
+ // check if file has extension already
+ if (string.IsNullOrEmpty(Path.GetExtension(currentOutput)) == false)
+ {
+ // add extension based on selected format
+ txtOutput.Text = Path.ChangeExtension(currentOutput, "." + cmbExportFormat.SelectedValue.ToString().ToLower());
+ }
+ else // no extension, set default filename
+ {
+ // check if have filename
+ if (string.IsNullOrEmpty(Path.GetFileName(currentOutput)) == false)
+ {
+ // add extension based on selected format
+ txtOutput.Text = Path.Combine(Path.GetDirectoryName(currentOutput), Path.GetFileName(currentOutput) + "." + cmbExportFormat.SelectedValue.ToString().ToLower());
+ }
+ else // no filename, set default
+ {
+ //Log.Write("cmbExportFormat.SelectedValue> " + cmbExportFormat.SelectedValue.ToString());
+ if (string.IsNullOrEmpty(currentOutput)) return;
+ txtOutput.Text = Path.Combine(Path.GetDirectoryName(currentOutput), "output." + cmbExportFormat.SelectedValue.ToString().ToLower());
+ }
+ }
}
private void chkImportRGB_Checked(object sender, RoutedEventArgs e)
@@ -1087,6 +1753,16 @@ private void chkImportRGB_Checked(object sender, RoutedEventArgs e)
Properties.Settings.Default.Save();
}
+ private void chkImportRGB_Unchecked(object sender, RoutedEventArgs e)
+ {
+ if (isInitialiazing == true) return;
+ Properties.Settings.Default.importRGB = false;
+
+ //chkImportIntensity.IsChecked = true;
+ //Properties.Settings.Default.importIntensity = true;
+ Properties.Settings.Default.Save();
+ }
+
private void chkImportIntensity_Checked(object sender, RoutedEventArgs e)
{
if (isInitialiazing == true) return;
@@ -1101,21 +1777,30 @@ private void chkImportIntensity_Unchecked(object sender, RoutedEventArgs e)
if (isInitialiazing == true) return;
Properties.Settings.Default.importIntensity = false;
- chkImportRGB.IsChecked = true;
- Properties.Settings.Default.importRGB = true;
+ //chkImportRGB.IsChecked = true;
+ //Properties.Settings.Default.importRGB = true;
+ Properties.Settings.Default.importIntensity = false;
Properties.Settings.Default.Save();
}
- private void chkImportRGB_Unchecked(object sender, RoutedEventArgs e)
+ private void chkImportClassification_Checked(object sender, RoutedEventArgs e)
{
if (isInitialiazing == true) return;
- Properties.Settings.Default.importRGB = false;
- chkImportIntensity.IsChecked = true;
- Properties.Settings.Default.importIntensity = true;
+ // TODO for now only can import classification as RGB color
+ Properties.Settings.Default.importClassification = true;
+ Properties.Settings.Default.Save();
+ }
+
+ private void chkImportClassification_Unchecked(object sender, RoutedEventArgs e)
+ {
+ if (isInitialiazing == true) return;
+ // TODO for now only can import classification as RGB color
+ Properties.Settings.Default.importClassification = false;
Properties.Settings.Default.Save();
}
+
private void txtInputFile_DragEnter(object sender, DragEventArgs e)
{
if (e.Data.GetDataPresent(DataFormats.FileDrop))
@@ -1143,7 +1828,19 @@ private void txtInputFile_Drop(object sender, DragEventArgs e)
private void btnHelp_Click(object sender, RoutedEventArgs e)
{
- Process.Start("https://github.com/unitycoder/PointCloudConverter/wiki");
+ try
+ {
+ var processStartInfo = new ProcessStartInfo
+ {
+ FileName = "https://github.com/unitycoder/PointCloudConverter/wiki",
+ UseShellExecute = true
+ };
+ Process.Start(processStartInfo);
+ }
+ catch (Exception ex)
+ {
+ MessageBox.Show($"Unable to open the link. Error: {ex.Message}");
+ }
}
private void chkAutoOffset_Checked(object sender, RoutedEventArgs e)
@@ -1168,13 +1865,72 @@ private void chkManualOffset_Checked(object sender, RoutedEventArgs e)
private void btnCopyToClipboard_Click(object sender, RoutedEventArgs e)
{
- // copy console to clipboard
- System.Windows.Clipboard.SetText(txtConsole.Text);
- // focus
+ Clipboard.SetText(txtConsole.Text);
txtConsole.Focus();
- // select all text
txtConsole.SelectAll();
e.Handled = true;
}
+
+ private void btnImportSettings_Click(object sender, RoutedEventArgs e)
+ {
+ var dialog = new OpenFileDialog();
+ dialog.Title = "Import settings file";
+ dialog.Filter = "Text files (*.txt)|*.txt|All files (*.*)|*.*";
+
+ // if have previously used config dir use that, if not, use local config dir if exists, if neither, use default..
+ if (string.IsNullOrEmpty(Properties.Settings.Default.lastUsedConfigFolder) == false)
+ {
+ dialog.InitialDirectory = Properties.Settings.Default.lastUsedConfigFolder;
+ }
+ else if (Directory.Exists(Path.Combine(Directory.GetCurrentDirectory(), "configs")))
+ {
+ dialog.InitialDirectory = Path.Combine(Directory.GetCurrentDirectory(), "configs");
+ }
+
+ if (dialog.ShowDialog() == true)
+ {
+ if (File.Exists(dialog.FileName))
+ {
+ var contents = File.ReadAllText(dialog.FileName);
+ ImportArgs(contents);
+ Properties.Settings.Default.lastUsedConfigFolder = Path.GetDirectoryName(dialog.FileName);
+ }
+ }
+ }
+
+ private void btnExportSettings_Click(object sender, RoutedEventArgs e)
+ {
+ var dialog = new SaveFileDialog();
+ dialog.Title = "Save settings file";
+ dialog.Filter = "Text files (*.txt)|*.txt|All files (*.*)|*.*";
+
+ if (string.IsNullOrEmpty(Properties.Settings.Default.lastUsedConfigFolder) == false)
+ {
+ dialog.InitialDirectory = Properties.Settings.Default.lastUsedConfigFolder;
+ }
+ else if (Directory.Exists(Path.Combine(Directory.GetCurrentDirectory(), "configs")))
+ {
+ dialog.InitialDirectory = Path.Combine(Directory.GetCurrentDirectory(), "configs");
+ }
+
+ if (dialog.ShowDialog() == true)
+ {
+ StartProcess(false);
+ File.WriteAllText(dialog.FileName, txtConsole.Text);
+ Properties.Settings.Default.lastUsedConfigFolder = Path.GetDirectoryName(dialog.FileName);
+ }
+ }
+
+ private void btnPlugins_Click(object sender, RoutedEventArgs e)
+ {
+ // open plugins folder in explorer at location of this exe
+ var pluginsFolder = Path.Combine(Directory.GetCurrentDirectory(), "plugins");
+ if (Directory.Exists(pluginsFolder) == false)
+ {
+ Directory.CreateDirectory(pluginsFolder);
+ }
+ Process.Start(new ProcessStartInfo("explorer.exe", pluginsFolder));
+ }
+
} // class
} // namespace
diff --git a/PointCloudConverter.csproj b/PointCloudConverter.csproj
index 670281c..34c5a9d 100644
--- a/PointCloudConverter.csproj
+++ b/PointCloudConverter.csproj
@@ -1,8 +1,8 @@
WinExe
- net8.0-windows10.0.22621.0
- true
+ net8.0-windows10.0.22621.0
+ trueenableenableIcons\app.ico
@@ -10,46 +10,51 @@
PointCloudConvertertrue10.0.17763.0
- True
+ FalsePointCloudConverterPointCloudConverter.AppFalseFalseFalse
- AnyCPU;x64
-
-
- 4
- full
+ x644full
-
- 4
- full
- 4full
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
diff --git a/PointCloudConverter.sln b/PointCloudConverter.sln
index 51091f9..a1166b6 100644
--- a/PointCloudConverter.sln
+++ b/PointCloudConverter.sln
@@ -5,16 +5,32 @@ VisualStudioVersion = 17.10.35122.118
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PointCloudConverter", "PointCloudConverter.csproj", "{B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Shared", "Interfaces\Shared.csproj", "{692B05A5-DEB5-4B3B-8171-1C126783003B}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
+ Release|Any CPU = Release|Any CPU
Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Debug|x64.ActiveCfg = Debug|Any CPU
{B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Debug|x64.Build.0 = Debug|Any CPU
+ {B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Release|Any CPU.Build.0 = Release|Any CPU
{B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Release|x64.ActiveCfg = Release|x64
{B348688B-00FB-45A8-8BBD-D2D64FD7E8D8}.Release|x64.Build.0 = Release|x64
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Debug|Any CPU.ActiveCfg = Debug|x64
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Debug|Any CPU.Build.0 = Debug|x64
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Debug|x64.ActiveCfg = Debug|x64
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Debug|x64.Build.0 = Debug|x64
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Release|x64.ActiveCfg = Release|x64
+ {692B05A5-DEB5-4B3B-8171-1C126783003B}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/Properties/Settings.Designer.cs b/Properties/Settings.Designer.cs
index 9894163..1175cd1 100644
--- a/Properties/Settings.Designer.cs
+++ b/Properties/Settings.Designer.cs
@@ -12,7 +12,7 @@ namespace PointCloudConverter.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
- [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "17.10.0.0")]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "17.13.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
@@ -562,5 +562,89 @@ public string maxThreads {
this["maxThreads"] = value;
}
}
+
+ [global::System.Configuration.UserScopedSettingAttribute()]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Configuration.DefaultSettingValueAttribute("True")]
+ public bool useGrid {
+ get {
+ return ((bool)(this["useGrid"]));
+ }
+ set {
+ this["useGrid"] = value;
+ }
+ }
+
+ [global::System.Configuration.UserScopedSettingAttribute()]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Configuration.DefaultSettingValueAttribute("")]
+ public string lastUsedConfigFolder {
+ get {
+ return ((string)(this["lastUsedConfigFolder"]));
+ }
+ set {
+ this["lastUsedConfigFolder"] = value;
+ }
+ }
+
+ [global::System.Configuration.UserScopedSettingAttribute()]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Configuration.DefaultSettingValueAttribute("min")]
+ public string offsetMode {
+ get {
+ return ((string)(this["offsetMode"]));
+ }
+ set {
+ this["offsetMode"] = value;
+ }
+ }
+
+ [global::System.Configuration.UserScopedSettingAttribute()]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Configuration.DefaultSettingValueAttribute("False")]
+ public bool importClassification {
+ get {
+ return ((bool)(this["importClassification"]));
+ }
+ set {
+ this["importClassification"] = value;
+ }
+ }
+
+ [global::System.Configuration.UserScopedSettingAttribute()]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Configuration.DefaultSettingValueAttribute("False")]
+ public bool useFilter {
+ get {
+ return ((bool)(this["useFilter"]));
+ }
+ set {
+ this["useFilter"] = value;
+ }
+ }
+
+ [global::System.Configuration.UserScopedSettingAttribute()]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Configuration.DefaultSettingValueAttribute("0.5")]
+ public float filterDistance {
+ get {
+ return ((float)(this["filterDistance"]));
+ }
+ set {
+ this["filterDistance"] = value;
+ }
+ }
+
+ [global::System.Configuration.UserScopedSettingAttribute()]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Configuration.DefaultSettingValueAttribute("False")]
+ public bool detectIntensityRange {
+ get {
+ return ((bool)(this["detectIntensityRange"]));
+ }
+ set {
+ this["detectIntensityRange"] = value;
+ }
+ }
}
}
diff --git a/Properties/Settings.settings b/Properties/Settings.settings
index 0d562f0..2086cd2 100644
--- a/Properties/Settings.settings
+++ b/Properties/Settings.settings
@@ -137,5 +137,26 @@
4
+
+ True
+
+
+
+
+
+ min
+
+
+ False
+
+
+ False
+
+
+ 0.5
+
+
+ False
+
\ No newline at end of file
diff --git a/README.md b/README.md
index dd53f67..e3a297e 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,8 @@
# PointCloudConverter
-PointCloud converter (commandline and GUI) for [Point Cloud Viewer & Tools (Unity plugin)](https://assetstore.unity.com/packages/tools/utilities/point-cloud-viewer-and-tools-16019?aid=1101lGti)
+This is pointCloud converter (commandline and GUI) for [Point Cloud Viewer & Tools 3 (Unity plugin)](https://assetstore.unity.com/packages/tools/utilities/point-cloud-viewer-and-tools-3-310385?aid=1101lGti)
+
+### Documentation
+- Check [Wiki](https://github.com/unitycoder/PointCloudConverter/wiki)
### Download prebuild exe
- From Releases https://github.com/unitycoder/PointCloudConverter/releases
@@ -7,25 +10,21 @@ PointCloud converter (commandline and GUI) for [Point Cloud Viewer & Tools (
### Arguments
- https://github.com/unitycoder/PointCloudConverter/wiki/Commandline-Arguments
-### Building
-- Open project in VS2019 or later
-- Press F5 to build
-- Executable is created in the /bin/ folder (you can launch it from command prompt, or from Explorer to use GUI)
-
-### Notes
-- See Project/PointCloudConverter Properties.. > Build Events / Post build: Small robocopy script is used to move output files into lib/ folder (so that executable is alone in the root folder)
-
### Import Formats
- LAZ/LAS
+- PLY (ascii/binary) *Initial version
+- E57 *Experimental version
- (more to be added)
### Export Formats
- UCPC (V2) for https://github.com/unitycoder/UnityPointCloudViewer
- PCROOT (V3) for https://github.com/unitycoder/UnityPointCloudViewer
+- GLTF (GLB) output https://las2gltf.kelobyte.fi/ *Paid plugin
+- (more to be added)
### Requirements
-- Windows 10
-- Visual Studio 2017 or later
+- Windows 10 or later
+- Visual Studio 2022 or later
- To view converted Point Clouds inside Unity, this viewer is required: from Unity Asset Store: https://github.com/unitycoder/UnityPointCloudViewer
### Pull Request
@@ -33,7 +32,13 @@ This standalone converter is open-source, so you can create your own Forks and v
Pull requests to improve this converter are welcome! (please create Issue first, so i/users can comment on it)
### Images
-
+
+### Building
+- Open project in VS2019 or later
+- Press F5 to build
+- Executable is created in the /bin/ folder (you can launch it from command prompt, or from Explorer to use GUI)
+### Notes
+- See Project/PointCloudConverter Properties.. > Build Events / Post build: Small robocopy script is used to move output files into lib/ folder (so that executable is alone in the root folder)
diff --git a/Readers/E57.cs b/Readers/E57.cs
new file mode 100644
index 0000000..1ad8419
--- /dev/null
+++ b/Readers/E57.cs
@@ -0,0 +1,193 @@
+using System;
+using System.IO;
+using System.Collections.Generic;
+using Aardvark.Base;
+using Aardvark.Data.Points.Import;
+using PointCloudConverter.Structs;
+using static Aardvark.Data.Points.Import.E57;
+using Aardvark.Data.Points;
+using System.Text.Json;
+using Aardvark.Data.E57;
+
+namespace PointCloudConverter.Readers
+{
+ public class E57 : IReader, IDisposable
+ {
+ private IEnumerator chunkEnumerator;
+ private E57Chunk currentChunk;
+ private int currentPointIndex = 0;
+
+ private ASTM_E57.E57FileHeader header;
+ private E57MetaData metaData;
+
+ private Float3 lastXYZ;
+
+ public struct E57MetaData
+ {
+ public string Name { get; set; }
+ public double X { get; set; }
+ public double Y { get; set; }
+ public double Z { get; set; }
+ public double RX { get; set; }
+ public double RY { get; set; }
+ public double RZ { get; set; }
+ public double RW { get; set; }
+ }
+
+ public bool InitReader(ImportSettings importSettings, int fileIndex)
+ {
+ try
+ {
+ var filePath = importSettings.inputFiles[fileIndex];
+
+ // Read header metadata
+ using var stream = File.OpenRead(filePath);
+ header = ASTM_E57.E57FileHeader.Parse(stream, new FileInfo(filePath).Length, false);
+ stream.Close();
+
+ var pose = header.E57Root.Data3D[0].Pose;
+
+ metaData = new E57MetaData
+ {
+ Name = header.E57Root.Data3D[0].Name,
+ X = pose.Translation.X,
+ Y = importSettings.swapYZ ? pose.Translation.Z : pose.Translation.Y,
+ Z = importSettings.swapYZ ? pose.Translation.Y : pose.Translation.Z,
+ RX = pose.Rotation.X,
+ RY = importSettings.swapYZ ? pose.Rotation.Z : pose.Rotation.Y,
+ RZ = importSettings.swapYZ ? pose.Rotation.Y : pose.Rotation.Z,
+ RW = pose.Rotation.W
+ };
+
+ var chunks = ChunksFull(filePath, ParseConfig.Default);
+ chunkEnumerator = chunks.GetEnumerator();
+
+ if (!chunkEnumerator.MoveNext())
+ return false;
+
+ currentChunk = chunkEnumerator.Current;
+ currentPointIndex = 0;
+
+ return true;
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine("E57 InitReader error: " + ex.Message);
+ return false;
+ }
+ }
+
+ public LasHeader GetMetaData(ImportSettings importSettings, int fileIndex)
+ {
+ return new LasHeader
+ {
+ FileName = importSettings.inputFiles[fileIndex],
+ NumberOfPointRecords = (uint)(header?.E57Root?.Data3D?[0]?.Points?.RecordCount ?? 0)
+ };
+ }
+
+ public Bounds GetBounds()
+ {
+ var bounds = header.E57Root.Data3D[0].CartesianBounds.Bounds;
+
+ return new Bounds
+ {
+ minX = (float)bounds.X.Min,
+ maxX = (float)bounds.X.Max,
+ minY = (float)bounds.Y.Min,
+ maxY = (float)bounds.Y.Max,
+ minZ = (float)bounds.Z.Min,
+ maxZ = (float)bounds.Z.Max
+ };
+ }
+
+ public int GetPointCount()
+ {
+ return (int)(header?.E57Root?.Data3D?[0]?.Points?.RecordCount ?? 0);
+ }
+
+ public Float3 GetXYZ()
+ {
+ if (currentChunk == null || currentPointIndex >= currentChunk.Count)
+ {
+ if (!chunkEnumerator.MoveNext())
+ return new Float3 { hasError = true };
+
+ currentChunk = chunkEnumerator.Current;
+ currentPointIndex = 0;
+
+ // clear cachedColors when chunk changes
+ cachedColors = null;
+ }
+
+ var p = currentChunk.Positions[currentPointIndex];
+ lastXYZ.x = p.X;
+ lastXYZ.y = p.Y;
+ lastXYZ.z = p.Z;
+ lastXYZ.hasError = false;
+
+ currentPointIndex++;
+ return lastXYZ;
+ }
+
+ private C3b[] cachedColors = null;
+
+ public Color GetRGB()
+ {
+ if (cachedColors == null && currentChunk?.Colors != null)
+ {
+ cachedColors = currentChunk.Colors;
+ }
+
+ int i = currentPointIndex - 1;
+ if (cachedColors != null && i >= 0 && i < cachedColors.Length)
+ {
+ var c = cachedColors[i];
+ return new Color
+ {
+ r = c.R / 255f,
+ g = c.G / 255f,
+ b = c.B / 255f
+ };
+ }
+
+ return default;
+ }
+
+ public ushort GetIntensity()
+ {
+ var i = currentPointIndex - 1;
+ if (currentChunk?.Intensities != null && i >= 0 && i < currentChunk.Intensities.Length)
+ {
+ return (byte)currentChunk.Intensities[i];
+ }
+ return 0;
+ }
+
+ public byte GetClassification() => 0;
+
+ public double GetTime()
+ {
+ // Not implemented for now
+ return 0;
+ }
+
+ public void Close() { }
+
+ public void Dispose()
+ {
+ Close();
+ GC.SuppressFinalize(this);
+ }
+
+ ~E57()
+ {
+ Dispose();
+ }
+
+ public string GetMetaDataJSON()
+ {
+ return JsonSerializer.Serialize(metaData);
+ }
+ }
+}
diff --git a/Readers/IReader.cs b/Readers/IReader.cs
index e02a26f..ce4045a 100644
--- a/Readers/IReader.cs
+++ b/Readers/IReader.cs
@@ -18,10 +18,11 @@ public interface IReader
// retrieve single point scan time
double GetTime();
- // close filestream
void Close();
- Color GetIntensity();
+ ushort GetIntensity();
+ byte GetClassification();
LasHeader GetMetaData(ImportSettings importSettings, int fileIndex);
+
void Dispose();
}
}
diff --git a/Readers/LAZ.cs b/Readers/LAZ.cs
index 6ecbe82..f3bbdf8 100644
--- a/Readers/LAZ.cs
+++ b/Readers/LAZ.cs
@@ -30,6 +30,11 @@ public class LAZ : IReader, IDisposable
//bool importIntensity = false;
bool customIntensityRange = false;
+ byte minClassification = 255;
+ byte maxClassification = 0;
+ ushort minIntensity = 65535;
+ ushort maxIntensity = 0;
+
int? taskID;
// add constructor
@@ -44,25 +49,33 @@ bool IReader.InitReader(ImportSettings importSettings, int fileIndex)
int res = 1;
//try
//{
- //Log.WriteLine("--------------------- initreader: " + fileIndex + " taskID: " + taskID);
- // TODO check errors
- var file = importSettings.inputFiles[fileIndex];
- //importRGB = importSettings.importRGB;
- //importIntensity = importSettings.importIntensity;
- customIntensityRange = importSettings.useCustomIntensityRange;
- res = lazReader.open_reader(file, out compressedLAZ); // 0 = ok, 1 = error
+ //Log.WriteLine("--------------------- initreader: " + fileIndex + " taskID: " + taskID);
+ // TODO check errors
+ var file = importSettings.inputFiles[fileIndex];
+ //importRGB = importSettings.importRGB;
+ //importIntensity = importSettings.importIntensity;
+ customIntensityRange = importSettings.useCustomIntensityRange;
+
+ minClassification = 255;
+ maxClassification = 0;
+ minIntensity = 65535;
+ maxIntensity = 0;
+
+ res = lazReader.open_reader(file, out compressedLAZ); // 0 = ok, 1 = error
//}
//catch (Exception e)
//{
// Log.WriteLine("Error in LAZ.InitReader: " + e.Message);
// throw;
//}
+
return (res == 0);
}
LasHeader IReader.GetMetaData(ImportSettings importSettings, int fileIndex)
{
var h = new LasHeader();
+
h.FileName = importSettings.inputFiles[fileIndex];
h.FileSourceID = lazReader.header.file_source_ID;
h.GlobalEncoding = lazReader.header.global_encoding;
@@ -98,6 +111,18 @@ LasHeader IReader.GetMetaData(ImportSettings importSettings, int fileIndex)
h.MinZ = lazReader.header.min_z;
h.MaxZ = lazReader.header.max_z;
+ if (importSettings.importClassification && importSettings.importMetadataOnly == false)
+ {
+ h.MinClassification = minClassification;
+ h.MaxClassification = maxClassification;
+ }
+
+ if (importSettings.importIntensity && importSettings.importMetadataOnly == false)
+ {
+ h.MinIntensity = minIntensity;
+ h.MaxIntensity = maxIntensity;
+ }
+
if (h.NumberOfVariableLengthRecords > 0)
{
h.VariableLengthRecords = new System.Collections.Generic.List();
@@ -112,7 +137,7 @@ LasHeader IReader.GetMetaData(ImportSettings importSettings, int fileIndex)
vlr.Description = System.Text.Encoding.UTF8.GetString(lazReader.header.vlrs[i].description);
vlr.Description = vlr.Description.Replace("\0", string.Empty);
- //Get WKT (Well Known Text String)
+ // Get WKT (Well Known Text String)
if (vlr.RecordID == 2112)
{
string wkt = Encoding.ASCII.GetString(lazReader.header.vlrs[i].data);
@@ -152,7 +177,6 @@ LasHeader IReader.GetMetaData(ImportSettings importSettings, int fileIndex)
h.ProjectionID = newEntry.Value_Offset;
h.Projection = newEntry.Value_OffsetString;
}
-
gk.KeyEntries.Add(newEntry);
//gk.KeyEntries.Add(new sKeyEntry
@@ -178,9 +202,40 @@ LasHeader IReader.GetMetaData(ImportSettings importSettings, int fileIndex)
h.VariableLengthRecords.Add(vlr);
}
}
+
+ // additional data for LAS 1.3/1.4
+
+ // LAS 1.3 and higher: waveform data packet record pointer.
+ if (h.VersionMajor > 1 || (h.VersionMajor == 1 && h.VersionMinor >= 3))
+ {
+ h.StartOfWaveformDataPacketRecord = lazReader.header.start_of_waveform_data_packet_record;
+ }
+
+ // LAS 1.4 and higher: extended VLRs and extended point record counts.
+ if (h.VersionMajor > 1 || (h.VersionMajor == 1 && h.VersionMinor >= 4))
+ {
+ h.StartOfFirstExtendedVariableLengthRecord = lazReader.header.start_of_first_extended_variable_length_record;
+ h.NumberOfExtendedVariableLengthRecords = lazReader.header.number_of_extended_variable_length_records;
+ h.ExtendedNumberOfPointRecords = lazReader.header.extended_number_of_point_records;
+ h.ExtendedNumberOfPointsByReturn = lazReader.header.extended_number_of_points_by_return;
+ }
+
+ // optional user data in header
+ if (lazReader.header.user_data_in_header_size > 0)
+ {
+ h.UserDataInHeader = lazReader.header.user_data_in_header;
+ }
+
+ // optional user data after header
+ if (lazReader.header.user_data_after_header_size > 0)
+ {
+ h.UserDataAfterHeader = lazReader.header.user_data_after_header;
+ }
+
return h;
}
+
public GeoKeys ParseGeoKeysFromByteArray(byte[] byteArray)
{
GeoKeys geoKeys = new GeoKeys
@@ -310,8 +365,6 @@ Color IReader.GetRGB()
// get point reference
var p = lazReader.point;
- // TODO get timestamp
- //var pointTime = lazReader.point.gps_time;
if (p.rgb[0] > 255 || p.rgb[1] > 255 || p.rgb[2] > 255)
{
@@ -329,26 +382,40 @@ Color IReader.GetRGB()
return c;
}
- Color IReader.GetIntensity()
+ ushort IReader.GetIntensity()
{
- var c = new Color();
+ var p = lazReader.point;
- // get point reference
+ ushort i = p.intensity;
+ //if (customIntensityRange == true) // NOTE now only supports 65535 as custom range
+ //{
+ // i = (byte)(p.intensity / 257f);
+ //}
+ //else
+ //{
+ // i = (byte)(p.intensity);
+ //}
+ // get min and max
+ if (i < minIntensity) minIntensity = i;
+ if (i > maxIntensity) maxIntensity = i;
+
+ return i;
+ }
+
+ byte IReader.GetClassification()
+ {
var p = lazReader.point;
+ // now reads both, we dont know which one is enabled?
+ byte classification = p.classification;
+ byte extended = p.extended_classification;
+ // Choose extended if it's valid and not equal to default "unclassified"
+ byte finalClassification = (extended > 0 && extended != classification) ? extended : classification;
- float i = 0;
- if (customIntensityRange == true) // NOTE now only supports 65535 as custom range
- {
- i = Tools.LUT255[(byte)(p.intensity / 255f)];
- }
- else
- {
- i = Tools.LUT255[(byte)(p.intensity)];
- }
- c.r = i;
- c.g = i;
- c.b = i;
- return c;
+ // get min and max
+ if (finalClassification < minClassification) minClassification = finalClassification;
+ if (finalClassification > maxClassification) maxClassification = finalClassification;
+
+ return finalClassification;
}
Float3 IReader.GetXYZ()
@@ -387,6 +454,16 @@ double IReader.GetTime()
return lazReader.point.gps_time;
}
+ //(byte, byte) IReader.GetClassificationRange()
+ //{
+ // return (minClassification, maxClassification);
+ //}
+
+ //(byte, byte) IReader.GetIntensityRange()
+ //{
+ // return (minIntensity, maxIntensity);
+ //}
+
void IReader.Close()
{
lazReader.close_reader();
diff --git a/Readers/PLY.cs b/Readers/PLY.cs
new file mode 100644
index 0000000..5c84527
--- /dev/null
+++ b/Readers/PLY.cs
@@ -0,0 +1,210 @@
+using PointCloudConverter.Structs;
+using Ply.Net;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using Color = PointCloudConverter.Structs.Color;
+using System.Diagnostics;
+using static Ply.Net.PlyParser;
+using System.Collections.Immutable;
+
+namespace PointCloudConverter.Readers
+{
+ public class PLY : IReader, IDisposable
+ {
+ private PlyParser.Dataset dataset;
+ private int pointIndex;
+ private int pointCount;
+
+ private List vertexChunks;
+ private int currentChunkIndex;
+ private int currentPointInChunk;
+
+ private PropertyData px, py, pz;
+ private PropertyData pr, pg, pb;
+
+ //private PlyParser.PropertyData pintensity, pclass, ptime;
+
+ private Float3 currentPoint;
+ private Color currentColor;
+ // private double currentTime;
+ // private byte currentIntensity;
+ // private byte currentClassification;
+ private Bounds bounds;
+
+
+ //int? taskID;
+ //// add constructor
+ //public PLY(int? _taskID)
+ //{
+ // taskID = _taskID;
+ //}
+
+ public bool InitReader(ImportSettings importSettings, int fileIndex)
+ {
+ var file = importSettings.inputFiles[fileIndex];
+
+ using var stream = File.OpenRead(file);
+ dataset = PlyParser.Parse(stream, 4096);
+
+ vertexChunks = dataset.Data
+ .Where(d => d.Element.Type == ElementType.Vertex)
+ .ToList();
+
+ if (vertexChunks.Count == 0) return false;
+
+ pointCount = vertexChunks.Sum(chunk => ((Array)chunk.Data[0].Data).Length);
+ currentChunkIndex = 0;
+ currentPointInChunk = 0;
+
+ SetCurrentChunkProperties(); // helper method to cache px, py, pz, etc.
+
+ CalculateBounds();
+
+ return true;
+
+ }
+
+ public int GetPointCount() => pointCount;
+
+ public Bounds GetBounds() => bounds;
+
+ public Float3 GetXYZ()
+ {
+ if (currentChunkIndex >= vertexChunks.Count)
+ return new Float3 { hasError = true };
+
+ int chunkSize = ((Array)px.Data).Length;
+ if (currentPointInChunk >= chunkSize)
+ {
+ currentChunkIndex++;
+ if (currentChunkIndex >= vertexChunks.Count)
+ return new Float3 { hasError = true };
+
+ currentPointInChunk = 0;
+ SetCurrentChunkProperties();
+ }
+
+ currentPoint = new Float3
+ {
+ x = Convert.ToSingle(px.Data.GetValue(currentPointInChunk)),
+ y = Convert.ToSingle(py.Data.GetValue(currentPointInChunk)),
+ z = Convert.ToSingle(pz.Data.GetValue(currentPointInChunk)),
+ hasError = false
+ };
+
+ currentColor = new Color
+ {
+ r = Convert.ToSingle(Convert.ToByte(pr.Data.GetValue(currentPointInChunk))) / 255f,
+ g = Convert.ToSingle(Convert.ToByte(pg.Data.GetValue(currentPointInChunk))) / 255f,
+ b = Convert.ToSingle(Convert.ToByte(pb.Data.GetValue(currentPointInChunk))) / 255f
+ };
+
+ currentPointInChunk++;
+ return currentPoint;
+ }
+
+
+ public Color GetRGB()
+ {
+ //currentColor = new Color();
+ //currentColor.r = 255;
+ //currentColor.g = 0;
+ //currentColor.b = 0;
+ return currentColor;
+ }
+
+ public double GetTime()
+ {
+ return 0.0;
+ }
+
+ public byte GetIntensity()
+ {
+ return 0;
+ }
+
+ public byte GetClassification()
+ {
+ return 0;
+ }
+
+ // TODO return ply data
+ public LasHeader GetMetaData(ImportSettings importSettings, int fileIndex)
+ {
+ return new LasHeader
+ {
+ FileName = importSettings.inputFiles[fileIndex],
+ NumberOfPointRecords = (uint)pointCount,
+ MinX = bounds.minX,
+ MaxX = bounds.maxX,
+ MinY = bounds.minY,
+ MaxY = bounds.maxY,
+ MinZ = bounds.minZ,
+ MaxZ = bounds.maxZ
+ };
+ }
+
+ public void Close()
+ {
+ dataset = null;
+ }
+
+ public void Dispose() => Close();
+
+ private void CalculateBounds()
+ {
+ bounds = new Bounds
+ {
+ minX = float.MaxValue,
+ maxX = float.MinValue,
+ minY = float.MaxValue,
+ maxY = float.MinValue,
+ minZ = float.MaxValue,
+ maxZ = float.MinValue
+ };
+
+ foreach (var chunk in vertexChunks)
+ {
+ var cx = chunk["x"]!;
+ var cy = chunk["y"]!;
+ var cz = chunk["z"]!;
+ int count = ((Array)cx.Data).Length;
+
+ for (int i = 0; i < count; i++)
+ {
+ float x = Convert.ToSingle(cx.Data.GetValue(i));
+ float y = Convert.ToSingle(cy.Data.GetValue(i));
+ float z = Convert.ToSingle(cz.Data.GetValue(i));
+
+ bounds.minX = Math.Min(bounds.minX, x);
+ bounds.maxX = Math.Max(bounds.maxX, x);
+ bounds.minY = Math.Min(bounds.minY, y);
+ bounds.maxY = Math.Max(bounds.maxY, y);
+ bounds.minZ = Math.Min(bounds.minZ, z);
+ bounds.maxZ = Math.Max(bounds.maxZ, z);
+ }
+ }
+ }
+
+
+ ushort IReader.GetIntensity()
+ {
+ return GetIntensity();
+ }
+
+ private void SetCurrentChunkProperties()
+ {
+ var chunk = vertexChunks[currentChunkIndex];
+ px = chunk["x"] ?? throw new Exception("Missing 'x' property");
+ py = chunk["y"] ?? throw new Exception("Missing 'y' property");
+ pz = chunk["z"] ?? throw new Exception("Missing 'z' property");
+ pr = chunk["red"] ?? throw new Exception("Missing 'red' property");
+ pg = chunk["green"] ?? throw new Exception("Missing 'green' property");
+ pb = chunk["blue"] ?? throw new Exception("Missing 'blue' property");
+ }
+
+
+ }
+}
diff --git a/Structs/ExportFormat.cs b/Structs/ExportFormat.cs
index 07ae550..8dda12f 100644
--- a/Structs/ExportFormat.cs
+++ b/Structs/ExportFormat.cs
@@ -4,6 +4,7 @@ public enum ExportFormat
{
Unknown,
UCPC, // V2
- PCROOT // V3
+ PCROOT, // V3
+ External // dll plugin?
}
}
diff --git a/Structs/ImportFormat.cs b/Structs/ImportFormat.cs
index 3553e19..27d37a8 100644
--- a/Structs/ImportFormat.cs
+++ b/Structs/ImportFormat.cs
@@ -3,6 +3,8 @@
public enum ImportFormat
{
Unknown,
- LAS // and LAZ
+ LAS, // and LAZ
+ PLY,
+ E57
}
}
diff --git a/Structs/ImportSettings.cs b/Structs/ImportSettings.cs
index 978c142..f933701 100644
--- a/Structs/ImportSettings.cs
+++ b/Structs/ImportSettings.cs
@@ -1,11 +1,13 @@
// values from commandline arguments
using PointCloudConverter.Logger;
+using PointCloudConverter.Plugins;
using PointCloudConverter.Readers;
using PointCloudConverter.Structs;
using PointCloudConverter.Writers;
using System.Collections.Concurrent;
using System.Collections.Generic;
+using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Serialization;
@@ -13,108 +15,171 @@ namespace PointCloudConverter
{
public class ImportSettings
{
- // filled in by program (so that json serializer is easier)
- public string version { get; set; } = "0.0.0";
+ // filled in by program (so that json serializer is easier), not used
+ //public string version { get; set; } = "0.0.0";
[JsonConverter(typeof(JsonStringEnumConverter))]
public Logger.LogEvent @event { get; set; }
-
- public IReader reader = new LAZ(null); // single threaded reader
+
+ [JsonIgnore] // FIXME doesnt ígnore it
+ public IReader reader; // single threaded reader
//public Dictionary Readers { get; set; } = new Dictionary();
public ConcurrentDictionary Readers { get; set; } = new ConcurrentDictionary();
+ [JsonIgnore]
public IWriter writer = new UCPC();
+
+ public string ReaderType => reader?.GetType().Name;
+ public string WriterType => writer?.GetType().Name;
+
//public Dictionary Writers { get; set; } = new Dictionary();
- public ConcurrentDictionary Writers { get; set; } = new ConcurrentDictionary();
+ //public ConcurrentDictionary> Writers { get; set; } = new ConcurrentDictionary>();
+ private readonly ConcurrentBag _writerPool = new ConcurrentBag();
+ private readonly ConcurrentDictionary _allocatedWriters = new ConcurrentDictionary();
+ private int _maxWriters = 16;
+
+ static ILogger Log;
+ public void InitWriterPool(int maxThreads, ExportFormat export)
+ {
+ //exportFormat = export;
+ _maxWriters = maxThreads;
+ // Initialize the pool with the maximum number of writers
+ for (int i = 0; i < _maxWriters; i++)
+ {
+ _writerPool.Add(CreateNewWriter()); // Create and add writers to the pool
+ }
+ }
+
// Method to get or create a reader for a specific task ID
public IReader GetOrCreateReader(int? taskId)
{
+ //Log.Write(">>>>> Getting or creating reader for task ID: " + taskId+" format: "+importFormat);
+
if (!Readers.ContainsKey(taskId))
{
- Readers[taskId] = new LAZ(taskId);
+ IReader readerInstance;
+
+ switch (importFormat)
+ {
+ case ImportFormat.LAS:
+ readerInstance = new LAZ(taskId);
+ break;
+ case ImportFormat.PLY:
+ readerInstance = new PLY(); // no taskId needed here
+ break;
+ case ImportFormat.E57:
+ readerInstance = new E57();
+ break;
+ default:
+ Log.Write($"Unsupported import format: {importFormat}", LogEvent.Error);
+ throw new NotSupportedException($"Unsupported import format: {importFormat}");
+ }
+
+ Readers[taskId] = readerInstance;
}
- //Log.WriteLine(">>>>> Total Readers in dictionary: " + Readers.Count);
+ //Log.Write(">>>>> Total Readers in dictionary: " + Readers.Count);
return Readers[taskId];
}
- public IWriter GetOrCreateWriter(int? taskId)
+ private IWriter CreateNewWriter()
{
- if (!Writers.ContainsKey(taskId))
+ ///Log.Write(">>>>> Creating new writer: "+exportFormat);
+ switch (exportFormat)
{
- Writers[taskId] = new PCROOT(taskId);
- }
+ case ExportFormat.Unknown:
+ Log.Write("Writer format not specified", LogEvent.Error);
+ return null;
+ break;
+ case ExportFormat.UCPC:
+ return new UCPC();
+ break;
+ case ExportFormat.PCROOT:
+ return new PCROOT(null); // No taskId when creating the pool, it's assigned later
+ break;
+ case ExportFormat.External:
+ // get name from current writer type
+ string dynamicWriterName = writer.GetType().Name.ToUpper();
+ //Trace.WriteLine("Dynamic writer name: " + dynamicWriterName);
- //Log.WriteLine(">>>>> Total Writers in dictionary: " + Writers.Count);
+ var dynamicWriter = PluginLoader.LoadWriter(dynamicWriterName);
- return Writers[taskId];
- }
+ if (dynamicWriter != null)
+ {
+ return dynamicWriter;
+ }
+ else
+ {
+ Log.Write("Dynamic writer not found: " + dynamicWriterName, LogEvent.Error);
+ return null;
+ }
- //public void ReleaseReader(int? taskId)
- //{
- // //Log.WriteLine(">>>>> Releasing reader for task ID: " + taskId);
- // if (Readers.ContainsKey(taskId))
- // {
- // Readers[taskId]?.Close();
- // //Readers[taskId]?.Dispose(); // FIXME causes exceptions
- // Readers.Remove(taskId);
- // }
- //}
+ return writer; // FIXME this should be loaded from a plugin inside argparser -exportformat code
+ break;
+ default:
+ Log.Write("Writer format not supported: " + exportFormat, LogEvent.Error);
+ return null;
+ break;
+ }
+ }
- public void ReleaseReader(int? taskId)
+ public IWriter GetOrCreateWriter(int? taskId)
{
- // Log the release of the reader for the specified task ID
- // Log.WriteLine(">>>>> Releasing reader for task ID: " + taskId);
-
- if (taskId.HasValue)
+ if (!_allocatedWriters.TryGetValue(taskId, out var writer))
{
- if (Readers.TryRemove(taskId, out var reader))
+ // Try to get a writer from the pool
+ if (_writerPool.TryTake(out writer))
{
- reader?.Close();
- // reader?.Dispose();
+ // Assign the writer to the task
+ _allocatedWriters[taskId] = writer;
}
else
{
- Log.WriteLine($"Reader for task ID {taskId} could not be removed because it was not found.", LogEvent.Warning);
+ // If no writers are available, create a new one (this should rarely happen if the pool is well-sized)
+ writer = CreateNewWriter();
+ _allocatedWriters[taskId] = writer;
}
}
+
+ return writer;
}
public void ReleaseWriter(int? taskId)
+ {
+ if (taskId.HasValue && _allocatedWriters.TryRemove(taskId, out var writer))
+ {
+ // Log.Write("ReleaseWriter >>> Memory used: " + GC.GetTotalMemory(false));
+ // Clean up the writer if necessary
+ writer?.Cleanup(0);
+ //writer?.Dispose();
+ // Return the writer to the pool for reuse
+ _writerPool.Add(writer);
+ // Log.Write("ReleaseWriter >>> Memory used: " + GC.GetTotalMemory(false));
+
+ }
+ }
+
+ public void ReleaseReader(int? taskId)
{
// Log the release of the reader for the specified task ID
- // Log.WriteLine(">>>>> Releasing reader for task ID: " + taskId);
+ // Log.Write(">>>>> Releasing reader for task ID: " + taskId);
if (taskId.HasValue)
{
- if (Writers.TryRemove(taskId, out var writer))
+ if (Readers.TryRemove(taskId, out var reader))
{
- writer?.Cleanup(0);
+ reader?.Close();
// reader?.Dispose();
}
else
{
- Log.WriteLine($"Reader for task ID {taskId} could not be removed because it was not found.", LogEvent.Warning);
+ Log.Write($"Reader for task ID {taskId} could not be removed because it was not found.", LogEvent.Warning);
}
}
}
- //public void ReleaseWriter(int? taskId)
- //{
- // //Log.WriteLine(">>>>> Releasing writer for task ID: " + taskId);
- // if (Writers.ContainsKey(taskId))
- // {
- // Writers[taskId]?.Cleanup(0);
- // Writers.Remove(taskId);
- // }
- // else
- // {
- // //Log.WriteLine("----->>>>> Writer not found in dictionary for task ID: " + taskId);
- // }
- //}
-
public bool haveError { get; set; } = false; // if errors during parsing args
//public string[] errorMessages = null; // last error message(s)
@@ -122,9 +187,9 @@ public void ReleaseWriter(int? taskId)
public float scale { get; set; } = 1f;
[JsonConverter(typeof(JsonStringEnumConverter))]
- public ImportFormat importFormat { get; set; } = ImportFormat.LAS; //default to las for now
+ public ImportFormat importFormat { get; set; } = ImportFormat.Unknown; //default to las for now
[JsonConverter(typeof(JsonStringEnumConverter))]
- public ExportFormat exportFormat { get; set; } = ExportFormat.UCPC; // defaults to UCPC (v2)
+ public ExportFormat exportFormat { get; set; }
public List inputFiles { get; set; } = new List();
public string outputFile { get; set; } = null;
@@ -134,8 +199,9 @@ public void ReleaseWriter(int? taskId)
// FIXME default values will be used unless otherwise specified.. randomize = true
// TODO these should be export settings..
- public bool importRGB { get; set; } = true; // this or intensity must be on
+ public bool importRGB { get; set; } = true;
public bool importIntensity { get; set; } = false;
+ public bool importClassification { get; set; } = false;
public bool useAutoOffset { get; set; } = true;
public bool swapYZ { get; set; } = true;
public bool invertX { get; set; } = false;
@@ -161,14 +227,19 @@ public void ReleaseWriter(int? taskId)
public float manualOffsetY { get; set; } = 0;
public float manualOffsetZ { get; set; } = 0;
public bool useCustomIntensityRange { get; set; } = false; // if false, 0-255 range is used, if ture: 0-65535
+ public bool detectIntensityRange { get; set; } = false; // if true, reads some points from file to detect min/max intensity range 0-255 or 0-65535
public int seed { get; set; } = -1; // random seed for shuffling
public int maxThreads { get; set; }
- public bool useJSONLog = false;
- public bool importMetadata = false;
- public bool importMetadataOnly = false;
- public bool averageTimestamp = false; // calculate average timestamp for all points for this tile
- public bool checkoverlap = false; // check if tile overlaps with other tiles (save into pcroot)
+ public bool useJSONLog { get; set; } = false;
+ public bool importMetadata { get; set; } = false;
+ public bool importMetadataOnly { get; set; } = false;
+ public bool averageTimestamp { get; set; } = false; // calculate average timestamp for all points for this tile
+ public bool checkoverlap { get; set; } = false; // check if tile overlaps with other tiles (save into pcroot)
+ public bool useGrid { get; set; } = false; // required for PCROOT format (will be automatically enabled for v3)
+ public string offsetMode { get; set; } = "min"; // TODO use enum: "min" or "legacy" now (legacy is first bounds min only)
+ public bool useFilter { get; set; } = false; // filter by distance
+ public float filterDistance { get; set; } = 0.5f;
public override string ToString()
{
@@ -182,6 +253,7 @@ public override string ToString()
t += "\n invertZ=" + invertZ;
t += "\n readRGB=" + importRGB;
t += "\n readIntensity=" + importIntensity;
+ t += "\n readClassification=" + importClassification;
//t += "\n metaData=" + importIntensity;
t += "\n useAutoOffset=" + useAutoOffset;
t += "\n offsetX=" + offsetX;
@@ -211,6 +283,8 @@ public override string ToString()
t += "\n importMetadataOnly=" + importMetadataOnly;
t += "\n averageTimestamp=" + averageTimestamp;
t += "\n checkoverlap=" + checkoverlap;
+ t += "\n useGrid=" + useGrid;
+ t += "\n offsetMode=" + offsetMode;
return t;
}
@@ -218,5 +292,51 @@ internal string ToJSON()
{
return JsonSerializer.Serialize(this);
}
+
+ }
+
+ // TEST dynamic export formats
+ [JsonConverter(typeof(CustomExportFormatConverter))]
+ public class ExportFormatModel
+ {
+ public ExportFormat StaticExportFormat { get; set; } = ExportFormat.Unknown;
+
+ // This will store dynamic formats from plugins
+ public string DynamicExportFormat { get; set; }
+ }
+
+ public class CustomExportFormatConverter : JsonConverter
+ {
+ public override ExportFormatModel Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
+ {
+ string stringValue = reader.GetString();
+ var model = new ExportFormatModel();
+
+ // Try to parse it as a known static ExportFormat
+ if (Enum.TryParse(typeof(ExportFormat), stringValue, true, out var enumValue))
+ {
+ model.StaticExportFormat = (ExportFormat)enumValue;
+ }
+ else
+ {
+ // If it's not a known enum value, store it as a dynamic format
+ model.DynamicExportFormat = stringValue;
+ }
+
+ return model;
+ }
+
+ public override void Write(Utf8JsonWriter writer, ExportFormatModel value, JsonSerializerOptions options)
+ {
+ // Serialize based on whether it's a static enum or dynamic value
+ if (value.StaticExportFormat != ExportFormat.Unknown)
+ {
+ writer.WriteStringValue(value.StaticExportFormat.ToString());
+ }
+ else
+ {
+ writer.WriteStringValue(value.DynamicExportFormat);
+ }
+ }
}
}
diff --git a/Structs/Metadata/GeoTiffKeys.cs b/Structs/Metadata/GeoTiffKeys.cs
index 8055b6a..5653729 100644
--- a/Structs/Metadata/GeoTiffKeys.cs
+++ b/Structs/Metadata/GeoTiffKeys.cs
@@ -16,7 +16,7 @@
using System;
using System.Collections.Generic;
-using static System.Windows.Forms.VisualStyles.VisualStyleElement.TextBox;
+//using static System.Windows.Forms.VisualStyles.VisualStyleElement.TextBox;
namespace Free.Ports.LibGeoTiff
{
diff --git a/Structs/Metadata/Job.cs b/Structs/Metadata/Job.cs
new file mode 100644
index 0000000..9e3234f
--- /dev/null
+++ b/Structs/Metadata/Job.cs
@@ -0,0 +1,20 @@
+using System.Text.Json.Serialization;
+
+namespace PointCloudConverter.Structs.Metadata
+{
+ public class Job
+ {
+ public string ConverterVersion { get; set; }
+ public ImportSettings ImportSettings { get; set; }
+ public DateTime StartTime { get; set; }
+ public DateTime EndTime { get; set; }
+ public TimeSpan Elapsed { get; internal set; }
+ }
+
+ public class JobMetadata
+ {
+ [JsonPropertyOrder(0)]
+ public Job Job { get; set; }
+ public List lasHeaders { get; set; } = new List();
+ }
+}
diff --git a/Structs/Metadata/LasHeader.cs b/Structs/Metadata/LasHeader.cs
index 40051b3..6094196 100644
--- a/Structs/Metadata/LasHeader.cs
+++ b/Structs/Metadata/LasHeader.cs
@@ -43,6 +43,19 @@ public class LasHeader
public double MinY { get; set; }
public double MaxZ { get; set; }
public double MinZ { get; set; }
+
+ public byte MinClassification { get; set; }
+ public byte MaxClassification { get; set; }
+ public ushort MinIntensity { get; set; }
+ public ushort MaxIntensity { get; set; }
+
public List VariableLengthRecords { get; set; }
+ public ulong StartOfWaveformDataPacketRecord { get; internal set; }
+ public ulong StartOfFirstExtendedVariableLengthRecord { get; internal set; }
+ public uint NumberOfExtendedVariableLengthRecords { get; internal set; }
+ public ulong ExtendedNumberOfPointRecords { get; internal set; }
+ public ulong[] ExtendedNumberOfPointsByReturn { get; internal set; }
+ public byte[] UserDataInHeader { get; internal set; }
+ public byte[] UserDataAfterHeader { get; internal set; }
}
}
diff --git a/Tools/ArgParser.cs b/Tools/ArgParser.cs
index d849242..2de6d7e 100644
--- a/Tools/ArgParser.cs
+++ b/Tools/ArgParser.cs
@@ -1,9 +1,12 @@
-using PointCloudConverter.Readers;
+using PointCloudConverter.Logger;
+using PointCloudConverter.Plugins;
+using PointCloudConverter.Readers;
using PointCloudConverter.Structs;
using PointCloudConverter.Writers;
using System;
using System.ComponentModel;
using System.Diagnostics;
+using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
@@ -22,7 +25,7 @@ public static class ArgParser
[DllImport("kernel32.dll")]
static extern IntPtr LocalFree(IntPtr hMem);
- static string[] SplitArgs(string unsplitArgumentLine)
+ public static string[] SplitArgs(string unsplitArgumentLine)
{
int numberOfArgs;
IntPtr ptrToSplitArgs;
@@ -84,15 +87,35 @@ static string GetEscapedCommandLine()
return Reverse(sb.ToString());
}
- public static ImportSettings Parse(string[] args, string rootFolder)
+ static ILogger Log;
+
+ public static ImportSettings Parse(string[] args, string rootFolder, ILogger logger)
{
ImportSettings importSettings = new ImportSettings();
+ Log = logger;
// if there are any errors, they are added to this list, then importing is aborted after parsing arguments
//List errors = new List();
- // handle manual args (null is default args, not used)
- if (args == null) args = SplitArgs(GetEscapedCommandLine()).Skip(1).ToArray();
+ // handle commandline args (null is default args, not used)
+ if (args == null)
+ {
+ args = SplitArgs(GetEscapedCommandLine()).Skip(1).ToArray();
+
+ // if only single arg, -config=filename.txt, then read file and split args
+ if (args.Length == 1 && args[0].ToLower().Contains("-config="))
+ {
+ var configFile = args[0].Split(argValueSeparator)[1];
+ if (File.Exists(configFile) == true)
+ {
+ args = SplitArgs(File.ReadAllText(configFile).Trim()).Skip(1).ToArray();
+ }
+ else
+ {
+ importSettings.errors.Add("Config file not found: " + configFile);
+ }
+ }
+ }
// parse commandline arguments
if (args != null && args.Length > 0)
@@ -125,54 +148,97 @@ public static ImportSettings Parse(string[] args, string rootFolder)
switch (cmd)
{
case "-importformat":
- Log.WriteLine("importformat = " + param);
+ Log.Write("importformat = " + param);
string importFormatParsed = param.ToUpper();
- // TODO check what reader interfaces are available
- if (string.IsNullOrEmpty(importFormatParsed) == true || (importFormatParsed != "LAS" && importFormatParsed != "LAZ"))
+ if (string.IsNullOrEmpty(importFormatParsed) ||
+ (importFormatParsed != "LAS" && importFormatParsed != "LAZ" && importFormatParsed != "PLY") && importFormatParsed != "E57")
{
importSettings.errors.Add("Unsupported import format: " + param);
importSettings.importFormat = ImportFormat.Unknown;
}
else
{
- importSettings.importFormat = ImportFormat.LAS;
- importSettings.reader = new LAZ(null);
+ switch (importFormatParsed)
+ {
+ case "LAS":
+ case "LAZ":
+ importSettings.importFormat = ImportFormat.LAS;
+ importSettings.reader = new LAZ(null);
+ break;
+ case "PLY":
+ importSettings.importFormat = ImportFormat.PLY;
+ importSettings.reader = new PLY();
+ break;
+ case "E57":
+ importSettings.importFormat = ImportFormat.E57;
+ importSettings.reader = new E57();
+ break;
+ }
}
break;
+
case "-exportformat":
- Log.WriteLine("exportformat = " + param);
+ Log.Write("exportformat = " + param);
string exportFormatParsed = param.ToUpper();
// TODO check what writer interfaces are available
- if (string.IsNullOrEmpty(exportFormatParsed) == true || (exportFormatParsed != "UCPC" && exportFormatParsed != "PCROOT"))
+ if (string.IsNullOrEmpty(exportFormatParsed) == true)
{
importSettings.errors.Add("Unsupported export format: " + param);
importSettings.exportFormat = ExportFormat.Unknown;
}
- else
+ else // have some value
{
- // TODO later needs more formats..
+ // check built-in formats first
switch (exportFormatParsed)
{
// TODO check enum names or interfaces
case "PCROOT":
importSettings.writer = new PCROOT(null);
importSettings.exportFormat = ExportFormat.PCROOT;
- importSettings.randomize = true; // required for V3
+ //importSettings.randomize = true; // required for V3, but if user wants to use it, they can disable it..
break;
- default:
+ case "UCPC":
importSettings.writer = new UCPC();
importSettings.exportFormat = ExportFormat.UCPC;
+ break;
+ default:
+ //importSettings.errors.Add("Unknown export format: " + param);
+
+ // TODO do we need to load it, or just check if dll exists?
+ // check external plugin formats
+ var writer = PluginLoader.LoadWriter(exportFormatParsed);
+ if (writer != null)
+ {
+ importSettings.writer = writer;
+ importSettings.exportFormat = ExportFormat.External; // For now, since its enum..
+ }
+ else
+ {
+ // Format is unknown, add to errors
+ importSettings.errors.Add("Unknown export format: " + param);
+ importSettings.exportFormat = ExportFormat.Unknown;
+ }
+
break;
}
}
break;
case "-input":
- Log.WriteLine("input = " + param);
+ Log.Write("input = " + param);
+
+ if (string.IsNullOrEmpty(param.Trim()))
+ {
+ importSettings.errors.Add("Input file not defined: " + param);
+ break;
+ }
+
+ // remove quotes (needed for paths with spaces)
+ param = param.Trim('"');
// if relative folder, FIXME this fails on -input="C:\asdf\etryj\folder\" -importformat=las because backslash in \", apparently this https://stackoverflow.com/a/9288040/5452781
if (Path.IsPathRooted(param) == false)
@@ -184,23 +250,34 @@ public static ImportSettings Parse(string[] args, string rootFolder)
if (Directory.Exists(param) == true)
{
Console.ForegroundColor = ConsoleColor.Gray;
- Log.WriteLine("Batch mode enabled (import whole folder)");
+ Log.Write("Batch mode enabled (import whole folder)");
Console.ForegroundColor = ConsoleColor.White;
// TODO get file extension from commandline param? but then need to set -format before input.. for now only LAS/LAZ
// TODO parse/sort args in required order, not in given order
- var filePaths = Directory.GetFiles(param).Where(file => Regex.IsMatch(file, @"^.+\.(las|laz)$", RegexOptions.IgnoreCase)).ToArray();
-
- for (int j = 0; j < filePaths.Length; j++)
+ if (importSettings.importFormat == ImportFormat.Unknown)
{
- Console.ForegroundColor = ConsoleColor.Gray;
- Log.WriteLine("Found file: " + filePaths[j]);
- Console.ForegroundColor = ConsoleColor.White;
- importSettings.inputFiles.Add(filePaths[j]);
+ importSettings.errors.Add("Import format not defined before -input folder for batch (use -importformat" + argValueSeparator + "LAS or PLY)");
}
+ else
+ {
+ string importExtensions = "";
+ if (importSettings.importFormat == ImportFormat.LAS) importExtensions = "las|laz";
+ if (importSettings.importFormat == ImportFormat.PLY) importExtensions = "ply";
+ var filePaths = Directory.GetFiles(param).Where(file => Regex.IsMatch(file, @"^.+\.(" + importExtensions + ")$", RegexOptions.IgnoreCase)).ToArray();
+
+ for (int j = 0; j < filePaths.Length; j++)
+ {
+ Console.ForegroundColor = ConsoleColor.Gray;
+ Log.Write("Found file: " + filePaths[j]);
+ Console.ForegroundColor = ConsoleColor.White;
+ importSettings.inputFiles.Add(filePaths[j]);
+ }
+ }
importSettings.batch = true;
+
}
else // single file
{
@@ -215,14 +292,22 @@ public static ImportSettings Parse(string[] args, string rootFolder)
// TODO find better way to check all readers
//if (ext == "las" ||ext == "laz")
- Log.WriteLine("added " + param);
+ Log.Write("added " + param);
importSettings.inputFiles.Add(param);
}
}
break;
case "-output":
- Log.WriteLine("output = " + param);
+ Log.Write("output = " + param);
+
+ if (string.IsNullOrEmpty(param.Trim()))
+ {
+ importSettings.errors.Add("Output not defined: " + param);
+ break;
+ }
+
+ param = param.Trim('"');
// check if relative or not
if (Path.IsPathRooted(param) == false)
@@ -236,7 +321,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
param += Path.DirectorySeparatorChar;
}
- // no filename, just output to folder with same name and new extension
+ // no filename with extension, just output to folder with same name and new extension
if (string.IsNullOrEmpty(Path.GetFileNameWithoutExtension(param)) == true)
{
string inputFileName = null;
@@ -261,10 +346,10 @@ public static ImportSettings Parse(string[] args, string rootFolder)
// leavy empty for batch
//errors.Add("-input not defined before -output or Input file doesnt exist, failed to create target filename");
}
- else // have filename, create output filename from it
+ else // have filename, create output filename from it by adding extension
{
- param = Path.Combine(param, inputFileName + ".ucpc");
-
+ // TODO use extension from selected export format, but we dont know it here yet?
+ param = Path.Combine(param, inputFileName);// + ".ucpc");
}
}
else // have output filename
@@ -304,9 +389,9 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-scale":
- Log.WriteLine("scale = " + param);
-
- bool parsedScale = float.TryParse(param, out tempFloat);
+ Log.Write("scale = " + param);
+ param = param.Replace(",", ".");
+ bool parsedScale = float.TryParse(param, NumberStyles.Float, CultureInfo.InvariantCulture, out tempFloat);
if (parsedScale == false)
{
importSettings.errors.Add("Invalid scale parameter: " + param);
@@ -326,7 +411,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-swap":
- Log.WriteLine("swap = " + param);
+ Log.Write("swap = " + param);
if (param != "true" && param != "false")
{
@@ -339,7 +424,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-customintensityrange":
- Log.WriteLine("customintensityrange = " + param);
+ Log.Write("customintensityrange = " + param);
if (param != "true" && param != "false")
{
@@ -351,8 +436,21 @@ public static ImportSettings Parse(string[] args, string rootFolder)
}
break;
+ case "-detectintensityrange":
+ Log.Write("detectintensityrange = " + param);
+
+ if (param != "true" && param != "false")
+ {
+ importSettings.errors.Add("Invalid detectintensityrange parameter: " + param);
+ }
+ else
+ {
+ importSettings.detectIntensityRange = param == "true";
+ }
+ break;
+
case "-invertx":
- Log.WriteLine("invertx = " + param);
+ Log.Write("invertx = " + param);
if (param != "true" && param != "false")
{
@@ -365,7 +463,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-invertz":
- Log.WriteLine("invertz = " + param);
+ Log.Write("invertz = " + param);
if (param != "true" && param != "false")
{
@@ -378,7 +476,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-pack":
- Log.WriteLine("pack = " + param);
+ Log.Write("pack = " + param);
if (param != "true" && param != "false")
{
@@ -391,8 +489,8 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-packmagic":
- Log.WriteLine("packmagic = " + param);
- bool packMagicParsed = int.TryParse(param, out tempInt);
+ Log.Write("packmagic = " + param);
+ bool packMagicParsed = int.TryParse(param, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (packMagicParsed == false || tempInt < 1)
{
importSettings.errors.Add("Invalid packmagic parameter: " + param);
@@ -405,8 +503,8 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-skip":
- Log.WriteLine("skip = " + param);
- bool skipParsed = int.TryParse(param, out tempInt);
+ Log.Write("skip = " + param);
+ bool skipParsed = int.TryParse(param, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (skipParsed == false || tempInt < 2)
{
importSettings.errors.Add("Invalid skip parameter: " + param);
@@ -419,8 +517,8 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-keep":
- Log.WriteLine("keep = " + param);
- bool keepParsed = int.TryParse(param, out tempInt);
+ Log.Write("keep = " + param);
+ bool keepParsed = int.TryParse(param, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (keepParsed == false || tempInt < 2)
{
importSettings.errors.Add("Invalid keep parameter: " + param);
@@ -433,8 +531,8 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-maxfiles":
- Log.WriteLine("maxfiles = " + param);
- bool maxFilesParsed = int.TryParse(param, out tempInt);
+ Log.Write("maxfiles = " + param);
+ bool maxFilesParsed = int.TryParse(param, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (maxFilesParsed == false)
{
importSettings.errors.Add("Invalid maxfiles parameter: " + param);
@@ -446,9 +544,9 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-maxthreads":
- Log.WriteLine("maxthreads = " + param);
+ Log.Write("maxthreads = " + param);
string cleanParam = param.Trim().TrimEnd('%');
- bool maxThreadsParsed = int.TryParse(cleanParam, out tempInt);
+ bool maxThreadsParsed = int.TryParse(cleanParam, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (maxThreadsParsed == false)
{
importSettings.errors.Add("Invalid maxthreads parameter: " + param);
@@ -477,7 +575,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-metadata":
- Log.WriteLine("metadata = " + param);
+ Log.Write("metadata = " + param);
if (param != "true" && param != "false")
{
importSettings.errors.Add("Invalid metadata parameter: " + param);
@@ -489,7 +587,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-metadataonly":
- Log.WriteLine("metadataonly = " + param);
+ Log.Write("metadataonly = " + param);
if (param != "true" && param != "false")
{
importSettings.errors.Add("Invalid metadataonly parameter: " + param);
@@ -501,7 +599,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-averagetimestamp":
- Log.WriteLine("averagetimestamp = " + param);
+ Log.Write("averagetimestamp = " + param);
if (param != "true" && param != "false")
{
importSettings.errors.Add("Invalid averagetimestamp parameter: " + param);
@@ -513,7 +611,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-checkoverlap":
- Log.WriteLine("checkoverlap = " + param);
+ Log.Write("checkoverlap = " + param);
if (param != "true" && param != "false")
{
importSettings.errors.Add("Invalid checkoverlap parameter: " + param);
@@ -525,7 +623,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-json":
- Log.WriteLine("json = " + param);
+ Log.Write("json = " + param);
if (param != "true" && param != "false")
{
@@ -538,8 +636,8 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-seed":
- Log.WriteLine("seed = " + param);
- bool seedParsed = int.TryParse(param, out tempInt);
+ Log.Write("seed = " + param);
+ bool seedParsed = int.TryParse(param, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (seedParsed == false)
{
importSettings.errors.Add("Invalid seed parameter: " + param);
@@ -551,18 +649,17 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-offset":
- Log.WriteLine("offset = " + param);
+ Log.Write("offset = " + param);
- // check if its true or false
+ // check if its true or false (for automatic offset)
if (param != "false" && param != "true")
{
- // check if its valid integer x,z
+ // check if have x,y,z values, NOTE should be in this format: -offset=10.5,-123,0
if (param.IndexOf(',') > -1)
{
var temp = param.Split(',');
if (temp.Length == 3)
{
-
float xOff, yOff, zOff;
if (float.TryParse(temp[0].Trim(), out xOff) && float.TryParse(temp[1].Trim(), out yOff) && float.TryParse(temp[2].Trim(), out zOff))
{
@@ -584,7 +681,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
}
else
{
- importSettings.errors.Add("Invalid offset parameter: " + param);
+ importSettings.errors.Add("Invalid offset parameter (Use: min or legacy): " + param);
}
}
else // autooffset
@@ -595,9 +692,9 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-limit":
- Log.WriteLine("limit = " + param);
+ Log.Write("limit = " + param);
// TODO add option to use percentage
- bool limitParsed = int.TryParse(param, out tempInt);
+ bool limitParsed = int.TryParse(param, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (limitParsed == false || tempInt <= 0)
{
importSettings.errors.Add("Invalid limit parameter: " + param);
@@ -610,7 +707,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-gridsize":
- Log.WriteLine("gridsize = " + param);
+ Log.Write("gridsize = " + param);
bool gridSizeParsed = float.TryParse(param, out tempFloat);
if (gridSizeParsed == false || tempFloat < 0.01f)
{
@@ -623,8 +720,8 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-minpoints":
- Log.WriteLine("minPoints = " + param);
- bool minpointsParsed = int.TryParse(param, out tempInt);
+ Log.Write("minPoints = " + param);
+ bool minpointsParsed = int.TryParse(param, NumberStyles.Integer, CultureInfo.InvariantCulture, out tempInt);
if (minpointsParsed == false || tempInt < 1)
{
importSettings.errors.Add("Invalid minpoints parameter: " + param + " (should be >0)");
@@ -636,7 +733,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-randomize":
- Log.WriteLine("randomize = " + param);
+ Log.Write("randomize = " + param);
if (param != "false" && param != "true")
{
@@ -648,8 +745,21 @@ public static ImportSettings Parse(string[] args, string rootFolder)
}
break;
+ case "-usegrid":
+ Log.Write("usegrid = " + param);
+
+ if (param != "false" && param != "true")
+ {
+ importSettings.errors.Add("Invalid usegrid parameter: " + param);
+ }
+ else
+ {
+ importSettings.useGrid = (param == "true");
+ }
+ break;
+
case "-rgb":
- Log.WriteLine("rgb = " + param);
+ Log.Write("rgb = " + param);
if (param != "false" && param != "true")
{
@@ -662,7 +772,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
break;
case "-intensity":
- Log.WriteLine("intensity = " + param);
+ Log.Write("intensity = " + param);
if (param != "false" && param != "true")
{
@@ -674,6 +784,62 @@ public static ImportSettings Parse(string[] args, string rootFolder)
}
break;
+ case "-classification":
+ Log.Write("classification = " + param);
+
+ if (param != "false" && param != "true")
+ {
+ importSettings.errors.Add("Invalid classification parameter: " + param);
+ }
+ else
+ {
+ importSettings.importClassification = (param == "true");
+ }
+ break;
+
+ case "-offsetmode":
+ Log.Write("offsetmode = " + param);
+
+ if (param != "legacy" && param != "min")
+ {
+ importSettings.errors.Add("Invalid offset parameter: (Use: min or legacy)" + param);
+ }
+ else
+ {
+ importSettings.offsetMode = param;
+ }
+ break;
+
+ case "-filter":
+ Log.Write("filter = " + param);
+
+ bool filterDistValue = float.TryParse(param, out tempFloat);
+ if (filterDistValue == false || tempFloat <= 0f)
+
+ {
+ importSettings.errors.Add("Invalid filter value (must be greater than 0) : " + param);
+ }
+ else
+ {
+ importSettings.useFilter = true;
+ importSettings.filterDistance = tempFloat;
+ }
+ break;
+
+ // TODO load whole commandline args list from text file
+ case "-config":
+ Log.Write("config = " + param);
+ // we dont do anything, config is checked at start of Parse()
+ //if (File.Exists(param) == false)
+ //{
+ // importSettings.errors.Add("Config file not found: " + param);
+ //}
+ //else // got value,
+ //{
+ // //importSettings.config = param;
+ //}
+ break;
+
case "?":
case "/?":
case "help":
@@ -699,18 +865,21 @@ public static ImportSettings Parse(string[] args, string rootFolder)
Tools.PrintHelpAndExit(argValueSeparator, waitEnter: true);
}
+
+ // *** VALIDATE SETTINGS ***
+
// check that we had input
if (importSettings.inputFiles.Count == 0 || string.IsNullOrEmpty(importSettings.inputFiles[0]) == true)
{
- importSettings.errors.Add("No input file(s) defined (use -input" + argValueSeparator + "yourfile.las)");
+ importSettings.errors.Add("No input file(s) defined OR input folder is empty (use -input" + argValueSeparator + "yourfile.las or -input" + argValueSeparator + "yourfolder/)");
}
else // have input
{
if (importSettings.batch == true)
{
- Log.WriteLine("Found " + importSettings.inputFiles.Count + " files..");
+ Log.Write("Found " + importSettings.inputFiles.Count + " files..");
- // if no output folder given
+ // if no output folder given at all
if (string.IsNullOrEmpty(importSettings.outputFile) == true)
{
if (importSettings.exportFormat == ExportFormat.UCPC)
@@ -719,7 +888,7 @@ public static ImportSettings Parse(string[] args, string rootFolder)
if (importSettings.inputFiles != null && importSettings.inputFiles.Count > 1)
{
importSettings.outputFile = Path.GetDirectoryName(importSettings.inputFiles[0]) + Path.DirectorySeparatorChar;
- Log.WriteLine("importSettings.outputFile=" + importSettings.outputFile);
+ Log.Write("importSettings.outputFile=" + importSettings.outputFile);
}
else
{
@@ -732,33 +901,81 @@ public static ImportSettings Parse(string[] args, string rootFolder)
importSettings.errors.Add("(C) -output file or folder not defined (its required for V3 PCROOT format)");
}
}
+ else // have something in output field
+ {
+ // check if output is folder
+ if (Directory.Exists(importSettings.outputFile) == true)
+ {
+ if (importSettings.exportFormat == ExportFormat.PCROOT)
+ {
+ importSettings.errors.Add("(E) PCROOT Requires some output filename (example: output.pcroot)");
+ }
+ if (importSettings.exportFormat == ExportFormat.External && importSettings.batch == false)
+ {
+ importSettings.errors.Add("(E2) External formats require some output filename for non-batch operations (example: basefilename)");
+ }
+ }
+ }
}
else // not in batch
{
+ // check if first file exists
if (File.Exists(importSettings.inputFiles[0]) == false)
{
importSettings.errors.Add("(B) Input file not found: " + importSettings.inputFiles[0]);
}
- // if no output file defined, put in same folder as source
+ // if no output folder/file defined, put in same folder as source
if (string.IsNullOrEmpty(importSettings.outputFile) == true)
{
- // v2 output
+ // FIXME handles v2 output only now
var outputFolder = Path.GetDirectoryName(importSettings.inputFiles[0]);
var outputFilename = Path.GetFileNameWithoutExtension(importSettings.inputFiles[0]);
importSettings.outputFile = Path.Combine(outputFolder, outputFilename + ".ucpc");
-
}
}
- }
+ } // have input
+
// check required settings
if (importSettings.exportFormat == ExportFormat.Unknown)
{
- importSettings.errors.Add("No export format defined (Example: -exportformat" + argValueSeparator + "UCPC)");
+ importSettings.errors.Add("No export format defined (Example: -exportformat" + argValueSeparator + "PCROOT)");
+ }
+
+ // check that files are in correct format
+ if (importSettings.inputFiles != null && importSettings.inputFiles.Count > 0)
+ {
+ var currentExtension = importSettings.importFormat.ToString().ToLower();
+ bool wrongExtension = false;
+
+ for (int i = 0; i < importSettings.inputFiles.Count; i++)
+ {
+ var ext = Path.GetExtension(importSettings.inputFiles[i]).ToLower();
+
+ if (currentExtension == "las")
+ {
+ if (ext != ".las" && ext != ".laz")
+ {
+ wrongExtension = true;
+ break;
+ }
+ }
+ else if (currentExtension == "ply")
+ {
+ if (ext != ".ply")
+ {
+ wrongExtension = true;
+ break;
+ }
+ }
+ }
+
+ if (wrongExtension) importSettings.errors.Add("Input files are not in the selected format (" + importSettings.importFormat + ")");
}
+
// cannot have both rgb & intensity
//if (importSettings.importRGB == true && importSettings.importIntensity == true)
//{
@@ -766,9 +983,20 @@ public static ImportSettings Parse(string[] args, string rootFolder)
//}
// must have at least one
- if (importSettings.importRGB == false && importSettings.importIntensity == false)
+ if (importSettings.importRGB == false && importSettings.importIntensity == false && importSettings.importClassification == false)
+ {
+ importSettings.errors.Add("Must have -rgb OR -intensity OR -classification enabled");
+ }
+
+ // but cannot have int and class only
+ if (importSettings.importRGB == false && importSettings.importIntensity == true && importSettings.importClassification == true)
+ {
+ importSettings.errors.Add("Cannot have -intensity and -classification enabled without -rgb");
+ }
+
+ if (importSettings.exportFormat == ExportFormat.UCPC && importSettings.maxThreads > 1)
{
- importSettings.errors.Add("Must have -rgb OR -intensity enabled");
+ importSettings.errors.Add("UCPC format doesnt support multi-threading yet, use 1 thread only (or remove -maxthreads param)");
}
//// check mismatching settings for v2 vs v3
@@ -777,10 +1005,15 @@ public static ImportSettings Parse(string[] args, string rootFolder)
// //if (importSettings.gridSize)
//}
- //if (importSettings.batch == true && importSettings.exportFormat != ExportFormat.PCROOT)
- //{
- // importSettings.errors.Add("Folder batch is only supported for PCROOT (v3) version: -exportformat=pcroot");
- //}
+ if (importSettings.batch == true && importSettings.exportFormat == ExportFormat.UCPC && Path.GetExtension(importSettings.outputFile).ToLower() == ".ucpc")
+ {
+ importSettings.errors.Add("With batch processing whole input folder, do not set output filename - Set output folder (each .UCPP file will be saved separately)");
+ }
+
+ if (importSettings.batch == true && importSettings.exportFormat == ExportFormat.External && Path.GetExtension(importSettings.outputFile).ToLower() == ".glb")
+ {
+ importSettings.errors.Add("With batch processing whole input folder, do not set output filename - Set output folder (each .GLB file will be saved separately)");
+ }
if (importSettings.skipPoints == true && importSettings.keepPoints == true)
{
@@ -791,7 +1024,25 @@ public static ImportSettings Parse(string[] args, string rootFolder)
{
importSettings.importFormat = ImportFormat.LAS;
importSettings.reader = new LAZ(null);
- Log.WriteLine("No import format defined, using Default: " + importSettings.importFormat.ToString());
+ Log.Write("No import format defined, using Default: " + importSettings.importFormat.ToString());
+ }
+
+ if (importSettings.importFormat == ImportFormat.PLY)
+ {
+ if (importSettings.importIntensity || importSettings.importClassification) Log.Write("PLY doesnt support intensity or classification importing.");
+ if (importSettings.packColors) Log.Write("PLY doesnt support color packing.");
+ }
+
+ if (importSettings.exportFormat == ExportFormat.PCROOT && importSettings.useGrid == false)
+ {
+ //importSettings.errors.Add("V3 pcroot export format requires -usegrid=true to use grid");
+ Log.Write("V3 pcroot export format requires -usegrid=true to use grid, enabling it now.");
+ importSettings.useGrid = true;
+ }
+
+ if (importSettings.useCustomIntensityRange == true && importSettings.detectIntensityRange == true)
+ {
+ importSettings.errors.Add("Cannot use -customintensityrange and -detectintensityrange at the same time");
}
// disable this error, if user really wants to use it
@@ -811,20 +1062,20 @@ public static ImportSettings Parse(string[] args, string rootFolder)
if (importSettings.useJSONLog == true)
{
// TODO workaround to get logevent in this json data (not used later)
- importSettings.version = Log.version;
+ //importSettings.version = Log.version;
importSettings.@event = Logger.LogEvent.Settings;
- Log.WriteLine(importSettings.ToJSON(), Logger.LogEvent.Settings);
+ Log.Write(importSettings.ToJSON(), Logger.LogEvent.Settings);
}
// show errors
if (importSettings.errors.Count > 0)
{
Console.ForegroundColor = ConsoleColor.Yellow;
- Log.WriteLine("\nErrors found:");
+ Log.Write("\nErrors found:");
Console.ForegroundColor = ConsoleColor.Red;
for (int i = 0; i < importSettings.errors.Count; i++)
{
- Log.WriteLine(i + "> " + importSettings.errors[i]);
+ Log.Write(i + "> " + importSettings.errors[i]);
}
Console.ForegroundColor = ConsoleColor.White;
diff --git a/Tools/LogText.cs b/Tools/LogText.cs
deleted file mode 100644
index 760977b..0000000
--- a/Tools/LogText.cs
+++ /dev/null
@@ -1,108 +0,0 @@
-using PointCloudConverter.Logger;
-using PointCloudConverter;
-using System.Diagnostics;
-
-namespace PointCloudConverter.Logger
-{
- public enum LogEvent
- {
- Start,
- Settings,
- File,
- End,
- Error,
- Warning,
- Info,
- Debug
- }
-
- public enum LogStatus
- {
- Processing,
- Complete
- }
-
- public interface ILogger
- {
- void Write(string msg);
- void Write(string msg, LogEvent eventType);
- }
-
- public class LogText : ILogger
- {
- public void Write(string msg)
- {
- Console.WriteLine(msg);
- Trace.WriteLine(msg);
- }
-
- public void Write(string msg, LogEvent eventType)
- {
- // TODO not supported yet (later could have different colors for different events)
- //Console.WriteLine("NOTIMPLEMENTED: "+msg);
- //throw new NotImplementedException();
- }
- }
-
- public class LogJSON : ILogger
- {
- public void Write(string msg)
- {
- // no output, since its not json message
- }
-
- public void Write(string msg, LogEvent eventType)
- {
- var json = msg;
- Console.WriteLine(json);
- }
- }
-
-}
-
-public static class Log
-{
- static ILogger logger;
- static ImportSettings settings = null; // copy of settings for logging?
- public static string version = null;
-
- public static bool json()
- {
- if (settings == null) return false;
- return settings.useJSONLog;
- }
-
- //public static void CreateLogger(ImportSettings import, string version)
- public static void CreateLogger(bool isJSON, string version)
- {
- //if (settings != null) Console.WriteLine("Warning: CreateLogger has been called already.. Replacing it.");
-
- Log.version = version;
-
- if (isJSON == true)
- {
- logger = new LogJSON();
- }
- else
- {
- logger = new LogText();
- }
- }
-
- public static void SetSettings(ImportSettings import)
- {
- settings = import;
- }
-
- public static void WriteLine(string message)
- {
- // this is for console.writeline, no json
- logger.Write(message);
- }
-
- // this is for json
- public static void WriteLine(string message, LogEvent logEvent)
- {
- logger.Write(message, logEvent);
- }
-}
\ No newline at end of file
diff --git a/Tools/PluginLoader.cs b/Tools/PluginLoader.cs
new file mode 100644
index 0000000..3d7cb00
--- /dev/null
+++ b/Tools/PluginLoader.cs
@@ -0,0 +1,53 @@
+using PointCloudConverter.Logger;
+using PointCloudConverter.Writers;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace PointCloudConverter.Plugins
+{
+ public static class PluginLoader
+ {
+ // Resolve plugin folder relative to the .exe location instead of current working directory
+ static readonly string pluginDirectory = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "Plugins");
+
+ // TODO add logger, if needed
+ // static ILogger Log;
+
+ public static IWriter LoadWriter(string pluginName)
+ {
+ // Log = logger;
+
+ // Build the full path to the plugin DLL
+ string pluginPath = Path.Combine(pluginDirectory, pluginName + ".dll");
+
+ // Log.Write($"Loading plugin at {pluginPath}");
+
+ // Check if the plugin DLL exists
+ if (File.Exists(pluginPath) == false)
+ throw new FileNotFoundException($"The plugin at {pluginPath} could not be found.");
+
+ // Load the plugin assembly from the DLL
+ var pluginAssembly = Assembly.LoadFrom(pluginPath);
+
+ // Find the specific type 'PointCloudConverter.Writers.'
+ // This assumes the type name inside the DLL matches the filename
+ var writerType = pluginAssembly.GetType("PointCloudConverter.Writers." + pluginName);
+
+ if (writerType == null)
+ throw new InvalidOperationException($"No valid implementation of IWriter found in {pluginPath}");
+
+ // Check if the type implements IWriter
+ if (!typeof(IWriter).IsAssignableFrom(writerType))
+ throw new InvalidOperationException($"{writerType.FullName} does not implement IWriter");
+
+ // Create an instance of the IWriter implementation
+ return (IWriter)Activator.CreateInstance(writerType);
+ }
+ }
+}
diff --git a/Tools/Tools.cs b/Tools/Tools.cs
index ce76ce2..31c95e6 100644
--- a/Tools/Tools.cs
+++ b/Tools/Tools.cs
@@ -1,5 +1,6 @@
using PointCloudConverter.Structs;
using SharpNeatLib.Maths;
+using System.Collections;
using System.Globalization;
using System.IO;
using System.Numerics;
@@ -139,28 +140,130 @@ public static Vector2 SuperUnpacker(float f, float GridSizeAndPackMagic)
// return (ax * bx + ay * by + az * bz);
//}
- public static void Shuffle(ref List array1, ref List array2, ref List array3, ref List arrayR, ref List arrayG, ref List arrayB)
+ public static void ShuffleInPlace(params IList[] arrays)
{
- int index = array1.Count;
+ //ResetRandom();
+
+ // Assume all lists are the same length
+ if (arrays.Length == 0 || arrays[0] == null)
+ return;
+
+ int count = arrays[0].Count;
+
+ for (int i = count - 1; i > 0; i--)
+ {
+ int rand = frnd.Next(0, i + 1);
+
+ foreach (var list in arrays)
+ {
+ if (list == null || list.Count <= i || list.Count <= rand)
+ continue;
+
+ object temp = list[i];
+ list[i] = list[rand];
+ list[rand] = temp;
+ }
+ }
+ }
+
+
+ public static void Shuffle(ref List array)
+ {
+ ResetRandom();
+ int index = array.Count;
while (index > 1)
{
- //int rand = rnd.Next(index--);
int rand = frnd.Next(0, index--);
+ (array[index], array[rand]) = (array[rand], array[index]);
+ }
+ }
+ public static void Shuffle(ref List array)
+ {
+ ResetRandom();
+ int index = array.Count;
+ while (index > 1)
+ {
+ int rand = frnd.Next(0, index--);
+ (array[index], array[rand]) = (array[rand], array[index]);
+ }
+ }
+
+ public static void Shuffle(ref List array)
+ {
+ ResetRandom();
+ int index = array.Count;
+ while (index > 1)
+ {
+ int rand = frnd.Next(0, index--);
+ (array[index], array[rand]) = (array[rand], array[index]);
+ }
+ }
+
+
+
+ // x,y,z,r,g,b
+ public static void Shuffle(ref List x, ref List y, ref List z, ref List r, ref List g, ref List b)
+ {
+ int index = x.Count;
+ while (index > 1)
+ {
+ //int rand = rnd.Next(index--);
+ int rand = frnd.Next(0, index--);
// Swap using tuple assignment
- (array1[index], array1[rand]) = (array1[rand], array1[index]);
- (array2[index], array2[rand]) = (array2[rand], array2[index]);
- (array3[index], array3[rand]) = (array3[rand], array3[index]);
- (arrayR[index], arrayR[rand]) = (arrayR[rand], arrayR[index]);
- (arrayG[index], arrayG[rand]) = (arrayG[rand], arrayG[index]);
- (arrayB[index], arrayB[rand]) = (arrayB[rand], arrayB[index]);
+ (x[index], x[rand]) = (x[rand], x[index]);
+ (y[index], y[rand]) = (y[rand], y[index]);
+ (z[index], z[rand]) = (z[rand], z[index]);
+ (r[index], r[rand]) = (r[rand], r[index]);
+ (g[index], g[rand]) = (g[rand], g[index]);
+ (b[index], b[rand]) = (b[rand], b[index]);
+ }
+ }
+
+ // x,y,z,r,g,b,i,t,c
+ public static void Shuffle(ref List x, ref List y, ref List z, ref List r, ref List g, ref List b, ref List i, ref List t, ref List c)
+ {
+ int index = x.Count;
+ while (index > 1)
+ {
+ //int rand = rnd.Next(index--);
+ int rand = frnd.Next(0, index--);
+
+ (x[index], x[rand]) = (x[rand], x[index]);
+ (y[index], y[rand]) = (y[rand], y[index]);
+ (z[index], z[rand]) = (z[rand], z[index]);
+ (r[index], r[rand]) = (r[rand], r[index]);
+ (g[index], g[rand]) = (g[rand], g[index]);
+ (b[index], b[rand]) = (b[rand], b[index]);
+ (i[index], i[rand]) = (i[rand], i[index]);
+ (c[index], c[rand]) = (c[rand], c[index]);
}
}
- public static void Shuffle(ref List array1, ref List array2, ref List array3, ref List arrayR, ref List arrayG, ref List arrayB, ref List arrayIntensity)
+ // x,y,z,r,g,b,i,c
+ public static void Shuffle(ref List x, ref List y, ref List z, ref List r, ref List g, ref List b, ref List i, ref List c)
{
- int index = array1.Count;
+ int index = x.Count;
+ while (index > 1)
+ {
+ //int rand = rnd.Next(index--);
+ int rand = frnd.Next(0, index--);
+ (x[index], x[rand]) = (x[rand], x[index]);
+ (y[index], y[rand]) = (y[rand], y[index]);
+ (z[index], z[rand]) = (z[rand], z[index]);
+ (r[index], r[rand]) = (r[rand], r[index]);
+ (g[index], g[rand]) = (g[rand], g[index]);
+ (b[index], b[rand]) = (b[rand], b[index]);
+ (i[index], i[rand]) = (i[rand], i[index]);
+ (c[index], c[rand]) = (c[rand], c[index]);
+ }
+ }
+
+ // x,y,z,r,g,b,i
+ public static void Shuffle(ref List x, ref List y, ref List z, ref List r, ref List g, ref List b, ref List i)
+ {
+ int index = x.Count;
while (index > 1)
{
//int rand = rnd.Next(index--);
@@ -168,41 +271,43 @@ public static void Shuffle(ref List array1, ref List array2, ref L
// Swap using tuple assignment
- (array1[index], array1[rand]) = (array1[rand], array1[index]);
- (array2[index], array2[rand]) = (array2[rand], array2[index]);
- (array3[index], array3[rand]) = (array3[rand], array3[index]);
- (arrayR[index], arrayR[rand]) = (arrayR[rand], arrayR[index]);
- (arrayG[index], arrayG[rand]) = (arrayG[rand], arrayG[index]);
- (arrayB[index], arrayB[rand]) = (arrayB[rand], arrayB[index]);
- (arrayIntensity[index], arrayIntensity[rand]) = (arrayIntensity[rand], arrayIntensity[index]);
+ (x[index], x[rand]) = (x[rand], x[index]);
+ (y[index], y[rand]) = (y[rand], y[index]);
+ (z[index], z[rand]) = (z[rand], z[index]);
+ (r[index], r[rand]) = (r[rand], r[index]);
+ (g[index], g[rand]) = (g[rand], g[index]);
+ (b[index], b[rand]) = (b[rand], b[index]);
+ (i[index], i[rand]) = (i[rand], i[index]);
}
}
- public static void Shuffle(ref List array1, ref List array2, ref List array3, ref List arrayR, ref List arrayG, ref List arrayB, ref List arrayIntensity, ref List arrayTime)
+ // x,y,z,r,g,b,i,t
+ public static void Shuffle(ref List x, ref List y, ref List z, ref List r, ref List g, ref List b, ref List i, ref List t)
{
- int index = array1.Count;
+ int index = x.Count;
while (index > 1)
{
//int rand = rnd.Next(index--);
int rand = frnd.Next(0, index--);
// Swap using tuple assignment
- (array1[index], array1[rand]) = (array1[rand], array1[index]);
- (array2[index], array2[rand]) = (array2[rand], array2[index]);
- (array3[index], array3[rand]) = (array3[rand], array3[index]);
- (arrayR[index], arrayR[rand]) = (arrayR[rand], arrayR[index]);
- (arrayG[index], arrayG[rand]) = (arrayG[rand], arrayG[index]);
- (arrayB[index], arrayB[rand]) = (arrayB[rand], arrayB[index]);
- (arrayIntensity[index], arrayIntensity[rand]) = (arrayIntensity[rand], arrayIntensity[index]);
+ (x[index], x[rand]) = (x[rand], x[index]);
+ (y[index], y[rand]) = (y[rand], y[index]);
+ (z[index], z[rand]) = (z[rand], z[index]);
+ (r[index], r[rand]) = (r[rand], r[index]);
+ (g[index], g[rand]) = (g[rand], g[index]);
+ (b[index], b[rand]) = (b[rand], b[index]);
+ (i[index], i[rand]) = (i[rand], i[index]);
// Handle double separately since it's a different type
- (arrayTime[index], arrayTime[rand]) = (arrayTime[rand], arrayTime[index]);
+ (t[index], t[rand]) = (t[rand], t[index]);
}
}
- public static void Shuffle(ref List array1, ref List array2, ref List array3, ref List arrayR, ref List arrayG, ref List arrayB, ref List arrayTime)
+ // x,y,z,r,g,b,t
+ public static void Shuffle(ref List x, ref List y, ref List z, ref List r, ref List g, ref List b, ref List t)
{
- int index = array1.Count;
+ int index = x.Count;
while (index > 1)
{
@@ -210,13 +315,13 @@ public static void Shuffle(ref List array1, ref List array2, ref L
int rand = frnd.Next(0, index--);
// Swapping using tuples
- (array1[index], array1[rand]) = (array1[rand], array1[index]);
- (array2[index], array2[rand]) = (array2[rand], array2[index]);
- (array3[index], array3[rand]) = (array3[rand], array3[index]);
- (arrayR[index], arrayR[rand]) = (arrayR[rand], arrayR[index]);
- (arrayG[index], arrayG[rand]) = (arrayG[rand], arrayG[index]);
- (arrayB[index], arrayB[rand]) = (arrayB[rand], arrayB[index]);
- (arrayTime[index], arrayTime[rand]) = (arrayTime[rand], arrayTime[index]);
+ (x[index], x[rand]) = (x[rand], x[index]);
+ (y[index], y[rand]) = (y[rand], y[index]);
+ (z[index], z[rand]) = (z[rand], z[index]);
+ (r[index], r[rand]) = (r[rand], r[index]);
+ (g[index], g[rand]) = (g[rand], g[index]);
+ (b[index], b[rand]) = (b[rand], b[index]);
+ (t[index], t[rand]) = (t[rand], t[index]);
}
}
@@ -243,16 +348,16 @@ public static void ShuffleXYZ(ref float[] array1)
public static int ParseInt(string s)
{
int f = 0;
- // TODO add invariant culture
- int.TryParse(s, out f);
+ s = s.Replace(",", ".");
+ int.TryParse(s, NumberStyles.Integer, CultureInfo.InvariantCulture, out f);
return f;
}
public static float ParseFloat(string s)
{
float f = 0;
- // TODO add invariant culture
- float.TryParse(s, out f);
+ s = s.Replace(",", ".");
+ float.TryParse(s, NumberStyles.Float, CultureInfo.InvariantCulture, out f);
return f;
}
@@ -276,6 +381,7 @@ public static void PrintHelpAndExit(char argSeparator, bool waitEnter = false)
Console.WriteLine("-output" + argSeparator + "yourfile.ucpc\t(Default is same folder as input file. For v3 you dont need to set file extension)");
Console.WriteLine("-rgb" + argSeparator + "true or false\tReads RGB colors\tDefault is true");
Console.WriteLine("-intensity" + argSeparator + "true or false\tReads Intensity as RGB color\tDefault is false");
+ Console.WriteLine("-classification" + argSeparator + "false\t\tImport classification data\tDefault is false");
Console.WriteLine("-offset" + argSeparator + "true or false\tAuto-offsets cloud near 0,0,0 by using the first point as offset value\tDefault is true");
Console.WriteLine("-gridsize" + argSeparator + "5\t\tGridsize in meters, splits cloud into tiles with this size. v3 only!\tDefault is 5, minimum is 0.1 (Note: values below 1 are not really tested)");
Console.WriteLine("-minpoints" + argSeparator + "1000\t\tIf tile has less points than this value, its discarded. Good for removing straypoints. v3 only!\tDefault is 1000");
@@ -286,17 +392,22 @@ public static void PrintHelpAndExit(char argSeparator, bool waitEnter = false)
Console.WriteLine("-limit" + argSeparator + "10000\t\tLoad only this many points (good for testing settings first)\tDefault is off");
Console.WriteLine("-skip" + argSeparator + "0\t\t\tSkip every Nth point (For reducing point count)\tDefault is off");
Console.WriteLine("-keep" + argSeparator + "0\t\t\tKeep only every Nth point (For reducing point count)\tDefault is off");
+ Console.WriteLine("-filter" + argSeparator + "0\t\t\tKeep only first point within this distance on world grid (In Unity units)\tDefault is off");
Console.WriteLine("-maxfiles" + argSeparator + "10\t\t\tFor batch processing, parse only this many files (good for testing with few files first)\tDefault is parse all found files");
// TODO Console.WriteLine("-decimate" + separator + "50\t\t\tRemoves 50% of the points (by skipping every x point)\tDefault is off");
//Console.WriteLine("-version" + argSeparator + "2\t\t2=v2 .ucpc, 3=v3 .pcroot tiles\tDefault is 2");
Console.WriteLine("-randomize" + argSeparator + "true\t\tRandomize point indexes, to use Dynamic resolution\tDefault is true (Always enabled for v3)");
- Console.WriteLine("-seed" + argSeparator + "42\t\tSet random seed\tDefault is random value");
+ Console.WriteLine("-seed" + argSeparator + "42\t\tSet random seed\tDefault is some random value");
Console.WriteLine("-json" + argSeparator + "false\t\tOutput console log in JSON format\tDefault is false");
- Console.WriteLine("-customintensityrange" + argSeparator + "false\t\tCustom intensity range (0-65535)\tDefault is false");
+ Console.WriteLine("-customintensityrange" + argSeparator + "false\t\tUse custom intensity range (0-65535) instead of 0-255\tDefault is false");
Console.WriteLine("-metadata" + argSeparator + "false\t\tRead metadata from header, outputs into json file\tDefault is false");
Console.WriteLine("-metadataonly" + argSeparator + "false\t\tRead metadata only (dont process points)\tDefault is false");
Console.WriteLine("-averagetimestamp" + argSeparator + "false\t\tGet Average timestamp per Tile\tDefault is false");
Console.WriteLine("-checkoverlap" + argSeparator + "false\t\tCalculate overlapping tiles\tDefault is false");
+ Console.WriteLine("-config" + argSeparator + "filename\t\tLoad arguments from text file (easier to handle separate settings for different projects)");
+ Console.WriteLine("-usegrid" + argSeparator + "true\t\tSplits point cloud to grid (multiple files). Required for V3 format (automatically enabled if its off). \tDefault is true for v3");
+ Console.WriteLine("-offsetmode" + argSeparator + "min\t\tGet auto-offset bounds, min=min from all bounds, legacy= first cloud min bounds\tDefault is min");
+
Console.WriteLine("");
Console.WriteLine("? /? -? help -help /help");
Console.ForegroundColor = ConsoleColor.White;
diff --git a/Writers/PCROOT.cs b/Writers/PCROOT.cs
index a78b840..83ed518 100644
--- a/Writers/PCROOT.cs
+++ b/Writers/PCROOT.cs
@@ -2,6 +2,7 @@
using PointCloudConverter.Logger;
using System;
+using System.Collections;
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
@@ -15,43 +16,48 @@ public class PCROOT : IWriter, IDisposable
const string tileExtension = ".pct";
const string sep = "|";
- static ImportSettings importSettings;
BufferedStream bsPoints = null;
BinaryWriter writerPoints = null;
+ ImportSettings importSettings; // this is per file here
static List nodeBounds = new List(); // for all tiles
+ static float cloudMinX = float.PositiveInfinity;
+ static float cloudMinY = float.PositiveInfinity;
+ static float cloudMinZ = float.PositiveInfinity;
+ static float cloudMaxX = float.NegativeInfinity;
+ static float cloudMaxY = float.NegativeInfinity;
+ static float cloudMaxZ = float.NegativeInfinity;
+ StringBuilder keyBuilder = new StringBuilder(32);
Dictionary keyCache = new Dictionary();
// our nodes (=tiles, =grid cells), string is tileID and float are X,Y,Z,R,G,B values
Dictionary> nodeX = new Dictionary>();
Dictionary> nodeY = new Dictionary>();
Dictionary> nodeZ = new Dictionary>();
-
Dictionary> nodeR = new Dictionary>();
Dictionary> nodeG = new Dictionary>();
Dictionary> nodeB = new Dictionary>();
-
- Dictionary> nodeIntensity = new Dictionary>();
+ Dictionary> nodeIntensity = new Dictionary>();
+ Dictionary> nodeClassification = new Dictionary>();
Dictionary> nodeTime = new Dictionary>();
- static float cloudMinX = float.PositiveInfinity;
- static float cloudMinY = float.PositiveInfinity;
- static float cloudMinZ = float.PositiveInfinity;
- static float cloudMaxX = float.NegativeInfinity;
- static float cloudMaxY = float.NegativeInfinity;
- static float cloudMaxZ = float.NegativeInfinity;
+ //int? taskID;
- int? taskID;
+ static int skippedNodesCounter = 0;
+ static int skippedPointsCounter = 0; // FIXME, not used in regular mode, only for lossy filtering, TODO can calculate from importsetting values
+ static bool useLossyFiltering = false; //not used, for testing only
public void Dispose()
{
- //Log.WriteLine("Memory used: " + GC.GetTotalMemory(false));
- //Log.WriteLine("*** PCROOT writer disposed for task: " + taskID);
+ //Log.Write("Memory used: " + GC.GetTotalMemory(false));
Dispose(true);
- GC.SuppressFinalize(this);
GC.Collect();
- //Log.WriteLine("Memory used: " + GC.GetTotalMemory(false));
+ // GC.SuppressFinalize(this);
+ GC.WaitForPendingFinalizers();
+ GC.Collect();
+ //GC.Collect();
+ //Log.Write("Memory used: " + GC.GetTotalMemory(false));
}
@@ -59,23 +65,51 @@ private void ClearDictionary(Dictionary> dictionary)
{
if (dictionary != null)
{
- foreach (var key in dictionary.Keys)
+ foreach (var list in dictionary.Values)
+ {
+ list.Clear(); // Clear the list to free up memory
+ }
+ dictionary.Clear(); // Clear the dictionary itself
+ dictionary = null; // Help GC by removing reference
+ }
+ }
+
+ private void ClearDictionary(Dictionary> dictionary)
+ {
+ if (dictionary != null)
+ {
+ foreach (var list in dictionary.Values)
+ {
+ list.Clear(); // Clear the list to free up memory
+ }
+ dictionary.Clear(); // Clear the dictionary itself
+ dictionary = null; // Help GC by removing reference
+ }
+ }
+
+ private void ClearDictionary(Dictionary> dictionary)
+ {
+ if (dictionary != null)
+ {
+ foreach (var list in dictionary.Values)
{
- dictionary[key]?.Clear();
+ list.Clear(); // Clear the list to free up memory
}
- dictionary.Clear();
+ dictionary.Clear(); // Clear the dictionary itself
+ dictionary = null; // Help GC by removing reference
}
- }
-
+ }
+
private void ClearDictionary(Dictionary> dictionary)
{
if (dictionary != null)
{
- foreach (var key in dictionary.Keys)
+ foreach (var list in dictionary.Values)
{
- dictionary[key]?.Clear();
+ list.Clear(); // Clear the list to free up memory
}
- dictionary.Clear();
+ dictionary.Clear(); // Clear the dictionary itself
+ //dictionary = null; // Help GC by removing reference
}
}
@@ -83,56 +117,50 @@ protected virtual void Dispose(bool disposing)
{
if (disposing)
{
+ // Dispose managed resources here
bsPoints?.Dispose();
writerPoints?.Dispose();
- keyCache.Clear();
- keyCache = null;
-
+ // Clear and dispose instance dictionaries
ClearDictionary(nodeX);
- nodeX = null;
-
ClearDictionary(nodeY);
- nodeY = null;
-
ClearDictionary(nodeZ);
- nodeZ = null;
-
ClearDictionary(nodeR);
- nodeR = null;
-
ClearDictionary(nodeG);
- nodeG = null;
-
ClearDictionary(nodeB);
- nodeB = null;
-
ClearDictionary(nodeIntensity);
- nodeIntensity = null;
-
+ ClearDictionary(nodeClassification);
ClearDictionary(nodeTime);
- nodeTime = null;
+
+ keyCache.Clear();
+ keyCache = null;
}
+
+ // If there were unmanaged resources, you'd clean them up here
}
~PCROOT()
{
+ //Log.Write("pcroot writer finalized for task: " + taskID);
Dispose(false);
}
// add constructor
public PCROOT(int? _taskID)
{
- //Log.WriteLine("*** PCROOT writer created for task: " + _taskID);
- taskID = _taskID;
+ //Log.Write("*** PCROOT writer created for task: " + _taskID);
+ //taskID = _taskID;
}
- bool IWriter.InitWriter(ImportSettings _importSettings, int _pointCount)
- {
- //Log.WriteLine("--------------------- initwriter for taskID: " + taskID);
+ static ILogger Log;
+ public bool InitWriter(dynamic _importSettings, int pointCount, ILogger logger)
+ {
+ //Log.Write("--------------------- initwriter for taskID: " + taskID);
var res = true;
+ Log = logger;
+
// clear old nodes
keyCache.Clear();
nodeX.Clear();
@@ -142,11 +170,11 @@ bool IWriter.InitWriter(ImportSettings _importSettings, int _pointCount)
nodeG.Clear();
nodeB.Clear();
nodeIntensity.Clear();
+ nodeClassification.Clear();
nodeTime.Clear();
-
bsPoints = null;
writerPoints = null;
- importSettings = _importSettings;
+ importSettings = (ImportSettings)(object)_importSettings;
return res;
}
@@ -169,12 +197,15 @@ void IWriter.WriteRGB(float r, float g, float b)
// for pcroot, this is saving the rootfile
void IWriter.Close()
{
+ // this happens if imported metadata only?
+ if (importSettings == null) return;
+
// save rootfile
// only save after last file, TODO should save this if process fails or user cancels, so no need to start from 0 again.. but then needs some merge or continue from index n feature
// if (isLastTask == true)
//if (fileIndex == (importSettings.maxFiles - 1))
// {
- //Log.WriteLine(" ***************************** save this only after last file from all threads ***************************** ");
+ //Log.Write(" ***************************** save this only after last file from all threads ***************************** ");
// check if any tile overlaps with other tiles
if (importSettings.checkoverlap == true)
{
@@ -226,11 +257,15 @@ void IWriter.Close()
var tilerootdata = new List();
var outputFileRoot = Path.Combine(baseFolder, fileOnly) + ".pcroot";
- // add to tileroot list
long totalPointCount = 0;
+
+ // add to tileroot list
for (int i = 0, len = nodeBounds.Count; i < len; i++)
{
- var tilerow = nodeBounds[i].fileName + sep + nodeBounds[i].totalPoints + sep + nodeBounds[i].minX + sep + nodeBounds[i].minY + sep + nodeBounds[i].minZ + sep + nodeBounds[i].maxX + sep + nodeBounds[i].maxY + sep + nodeBounds[i].maxZ + sep + nodeBounds[i].cellX + sep + nodeBounds[i].cellY + sep + nodeBounds[i].cellZ + sep + nodeBounds[i].averageTimeStamp + sep + nodeBounds[i].overlapRatio;
+ var tilerow = nodeBounds[i].totalPoints + sep + nodeBounds[i].minX + sep + nodeBounds[i].minY + sep + nodeBounds[i].minZ + sep + nodeBounds[i].maxX + sep + nodeBounds[i].maxY + sep + nodeBounds[i].maxZ + sep + nodeBounds[i].cellX + sep + nodeBounds[i].cellY + sep + nodeBounds[i].cellZ + sep + nodeBounds[i].averageTimeStamp + sep + nodeBounds[i].overlapRatio;
+ // force dot as decimal separator for values
+ tilerow = tilerow.Replace(",", ".");
+ tilerow = nodeBounds[i].fileName + sep + tilerow;
tilerootdata.Add(tilerow);
totalPointCount += nodeBounds[i].totalPoints;
}
@@ -243,13 +278,14 @@ void IWriter.Close()
"\"skippedPoints\": " + skippedPointsCounter + "" +
"}";
- Log.WriteLine(jsonString, LogEvent.End);
- Log.WriteLine("\nSaving rootfile: " + outputFileRoot + "\n*Total points= " + Tools.HumanReadableCount(totalPointCount));
+ Log.Write(jsonString, LogEvent.End);
+ Log.Write("\nSaving rootfile: " + outputFileRoot + "\n*Total points= " + Tools.HumanReadableCount(totalPointCount));
int versionID = importSettings.packColors ? 2 : 1; // (1 = original, 2 = packed v3 format)
if (importSettings.packColors == true) versionID = 2;
if (useLossyFiltering == true) versionID = 3;
- if (importSettings.importIntensity == true && importSettings.importRGB && importSettings.packColors) versionID = 4; // new int packed format
+ if ((importSettings.importIntensity == true || importSettings.importClassification == true) && importSettings.importRGB && importSettings.packColors) versionID = 4; // new int packed format
+ if ((importSettings.importIntensity == true && importSettings.importClassification == true) && importSettings.importRGB && importSettings.packColors) versionID = 5; // new int packed format + classification
bool addComments = false;
@@ -259,6 +295,7 @@ void IWriter.Close()
string commentRow = "# version" + sep + "gridsize" + sep + "pointcount" + sep + "boundsMinX" + sep + "boundsMinY" + sep + "boundsMinZ" + sep + "boundsMaxX" + sep + "boundsMaxY" + sep + "boundsMaxZ" + sep + "autoOffsetX" + sep + "autoOffsetY" + sep + "autoOffsetZ" + sep + "packMagicValue";
if (importSettings.importRGB == true && importSettings.importIntensity == true) commentRow += sep + "intensity";
+ if (importSettings.importRGB == true && importSettings.importClassification == true) commentRow += sep + "classification";
if (addComments) tilerootdata.Insert(1, commentRow);
// add global header settings to first row
@@ -266,6 +303,9 @@ void IWriter.Close()
string globalData = versionID + sep + importSettings.gridSize.ToString() + sep + totalPointCount + sep + cloudMinX + sep + cloudMinY + sep + cloudMinZ + sep + cloudMaxX + sep + cloudMaxY + sep + cloudMaxZ;
// autoOffsetX, globalOffsetY, globalOffsetZ, packMagic
globalData += sep + importSettings.offsetX + sep + importSettings.offsetY + sep + importSettings.offsetZ + sep + importSettings.packMagicValue;
+ // force dot as decimal separator
+ globalData = globalData.Replace(",", ".");
+
if (addComments)
{
tilerootdata.Insert(2, globalData);
@@ -281,28 +321,28 @@ void IWriter.Close()
File.WriteAllLines(outputFileRoot, tilerootdata.ToArray());
Console.ForegroundColor = ConsoleColor.Green;
- Log.WriteLine("Done saving v3 : " + outputFileRoot);
+ Log.Write("Done saving v3 : " + outputFileRoot);
Console.ForegroundColor = ConsoleColor.White;
if (skippedNodesCounter > 0)
{
- Log.WriteLine("*Skipped " + skippedNodesCounter + " nodes with less than " + importSettings.minimumPointCount + " points)");
+ Log.Write("*Skipped " + skippedNodesCounter + " nodes with less than " + importSettings.minimumPointCount + " points)");
}
if (useLossyFiltering == true && skippedPointsCounter > 0)
{
- Log.WriteLine("*Skipped " + skippedPointsCounter + " points due to bytepacked grid filtering");
+ Log.Write("*Skipped " + skippedPointsCounter + " points due to bytepacked grid filtering");
}
if ((tilerootdata.Count - 1) <= 0)
{
Console.ForegroundColor = ConsoleColor.Yellow;
// TODO add json error log
- Log.WriteLine("Error> No tiles found! Try enable -scale (to make your cloud to smaller) Or make -gridsize bigger, or set -limit point count to smaller value");
+ Log.Write("Error> No tiles found! Try enable -scale (to make your cloud to smaller) Or make -gridsize bigger, or set -limit point count to smaller value");
Console.ForegroundColor = ConsoleColor.White;
}
// cleanup after last file
- //nodeBounds.Clear();
+ nodeBounds.Clear();
cloudMinX = float.PositiveInfinity;
cloudMinY = float.PositiveInfinity;
@@ -313,15 +353,16 @@ void IWriter.Close()
// } // if last file
// clear all lists
- keyCache.Clear();
- nodeX.Clear();
- nodeY.Clear();
- nodeZ.Clear();
- nodeR.Clear();
- nodeG.Clear();
- nodeB.Clear();
- nodeIntensity.Clear();
- nodeTime.Clear();
+ //keyCache.Clear();
+ //nodeX.Clear();
+ //nodeY.Clear();
+ //nodeZ.Clear();
+ //nodeR.Clear();
+ //nodeG.Clear();
+ //nodeB.Clear();
+ //nodeIntensity.Clear();
+ //nodeTime.Clear();
+
// dispose
bsPoints?.Dispose();
@@ -331,7 +372,22 @@ void IWriter.Close()
void IWriter.Cleanup(int fileIndex)
{
- Dispose();
+ //Log.Write("Cleanup: this doesnt do anything yet..");
+ //Dispose();
+ bsPoints?.Dispose();
+ writerPoints?.Dispose();
+
+ // Clear and dispose instance dictionaries
+ ClearDictionary(nodeX);
+ ClearDictionary(nodeY);
+ ClearDictionary(nodeZ);
+ ClearDictionary(nodeR);
+ ClearDictionary(nodeG);
+ ClearDictionary(nodeB);
+ ClearDictionary(nodeIntensity);
+ ClearDictionary(nodeClassification);
+ ClearDictionary(nodeTime);
+ keyCache.Clear();
}
void IWriter.Randomize()
@@ -339,17 +395,9 @@ void IWriter.Randomize()
}
- StringBuilder keyBuilder = new StringBuilder(32);
-
- void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, float b, bool hasIntensity, float i, bool hasTime, double time)
+ void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, float b, ushort intensity, double time, byte classification)
{
// get global all clouds bounds
- //if (x < cloudMinX) cloudMinX = x;
- //if (x > cloudMaxX) cloudMaxX = x;
- //if (y < cloudMinY) cloudMinY = y;
- //if (y > cloudMaxY) cloudMaxY = y;
- //if (z < cloudMinZ) cloudMinZ = z;
- //if (z > cloudMaxZ) cloudMaxZ = z;
cloudMinX = Math.Min(cloudMinX, x);
cloudMaxX = Math.Max(cloudMaxX, x);
cloudMinY = Math.Min(cloudMinY, y);
@@ -392,8 +440,10 @@ void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, fl
nodeG[key].Add(g);
nodeB[key].Add(b);
- if (hasIntensity == true) nodeIntensity[key].Add(i);
- if (hasTime == true) nodeTime[key].Add(time);
+ if (importSettings.importRGB && importSettings.importIntensity == true) nodeIntensity[key].Add(intensity);
+ // TODO separate if rgb and or int?
+ if (importSettings.importRGB && importSettings.importClassification == true) nodeClassification[key].Add(classification);
+ if (importSettings.averageTimestamp == true) nodeTime[key].Add(time);
}
else // create new list for this key
{
@@ -405,10 +455,11 @@ void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, fl
nodeG[key] = new List { g };
nodeB[key] = new List { b };
- if (hasIntensity == true) nodeIntensity[key] = new List { i };
- if (hasTime == true) nodeTime[key] = new List { time };
+ if (importSettings.importRGB && importSettings.importIntensity == true) nodeIntensity[key] = new List { intensity };
+ if (importSettings.importRGB && importSettings.importClassification == true) nodeClassification[key] = new List { classification };
+ if (importSettings.averageTimestamp == true) nodeTime[key] = new List { time };
}
- }
+ } // addpoint()
[MethodImpl(MethodImplOptions.AggressiveInlining)]
unsafe void FloatToBytes(float value, byte[] buffer, int offset)
@@ -428,10 +479,8 @@ unsafe void IntToBytes(int value, byte[] buffer, int offset)
}
}
- static int skippedNodesCounter = 0;
- static int skippedPointsCounter = 0;
- static bool useLossyFiltering = false; //not used, for testing only
+ // returns list of saved files
void IWriter.Save(int fileIndex)
{
if (useLossyFiltering == true)
@@ -439,24 +488,12 @@ void IWriter.Save(int fileIndex)
Console.WriteLine("************* useLossyFiltering ****************");
}
-
string fileOnly = Path.GetFileNameWithoutExtension(importSettings.outputFile);
string baseFolder = Path.GetDirectoryName(importSettings.outputFile);
// TODO no need colors for json.. could move this inside custom logger, so that it doesnt do anything, if json
Console.ForegroundColor = ConsoleColor.Blue;
- // TODO add enum for status
-
- string jsonString = "{" +
- "\"event\": \"" + LogEvent.File + "\"," +
- "\"status\": \"" + LogStatus.Complete + "\"," +
- "\"path\": " + JsonSerializer.Serialize(importSettings.inputFiles[fileIndex]) + "," +
- "\"tiles\": " + nodeX.Count + "," +
- "\"folder\": " + JsonSerializer.Serialize(baseFolder) + "}";
-
- // TODO combine 2 outputs.. only other one shows up now
- Log.WriteLine("Saving " + nodeX.Count + " tiles into: " + baseFolder);
- Log.WriteLine(jsonString, LogEvent.End);
+ Log.Write("Saving " + nodeX.Count + " tiles into: " + baseFolder);
Console.ForegroundColor = ConsoleColor.White;
@@ -468,9 +505,12 @@ void IWriter.Save(int fileIndex)
List nodeTempG;
List nodeTempB;
- List nodeTempIntensity = null;
+ List nodeTempIntensity = null;
+ List nodeTempClassification = null;
List nodeTempTime = null;
+ List outputFiles = new List();
+
// process all tiles
//foreach (KeyValuePair> nodeData in nodeX)
foreach (KeyValuePair> nodeData in nodeX)
@@ -493,45 +533,50 @@ void IWriter.Save(int fileIndex)
nodeTempG = nodeG[key];
nodeTempB = nodeB[key];
- // collect both
+ // collect both rgb and intensity
if (importSettings.importRGB == true && importSettings.importIntensity == true)
+ //if (importSettings.importIntensity == true)
{
nodeTempIntensity = nodeIntensity[key];
}
+ // TODO separate?
+ if (importSettings.importRGB == true && importSettings.importClassification == true)
+ {
+ nodeTempClassification = nodeClassification[key];
+ }
+
if (importSettings.averageTimestamp == true)
{
nodeTempTime = nodeTime[key];
}
-
// randomize points in this node
- if (importSettings.randomize == true)
+ if (importSettings.randomize)
{
- if (importSettings.importRGB == true && importSettings.importIntensity == true)
- {
- if (importSettings.averageTimestamp == true)
- {
- Tools.Shuffle(ref nodeTempX, ref nodeTempY, ref nodeTempZ, ref nodeTempR, ref nodeTempG, ref nodeTempB, ref nodeTempIntensity, ref nodeTempTime);
- }
- else
- {
- Tools.Shuffle(ref nodeTempX, ref nodeTempY, ref nodeTempZ, ref nodeTempR, ref nodeTempG, ref nodeTempB, ref nodeTempIntensity);
- }
- }
- else
+ var listsToShuffle = new List { nodeTempX, nodeTempY, nodeTempZ };
+
+ if (importSettings.importRGB)
{
- if (importSettings.averageTimestamp == true)
- {
- Tools.Shuffle(ref nodeTempX, ref nodeTempY, ref nodeTempZ, ref nodeTempR, ref nodeTempG, ref nodeTempB, ref nodeTempTime);
- }
- else
- {
- Tools.Shuffle(ref nodeTempX, ref nodeTempY, ref nodeTempZ, ref nodeTempR, ref nodeTempG, ref nodeTempB);
- }
+ listsToShuffle.Add(nodeTempR);
+ listsToShuffle.Add(nodeTempG);
+ listsToShuffle.Add(nodeTempB);
}
+
+ if (importSettings.importIntensity)
+ listsToShuffle.Add(nodeTempIntensity);
+
+ if (importSettings.importClassification)
+ listsToShuffle.Add(nodeTempClassification);
+
+ if (importSettings.averageTimestamp)
+ listsToShuffle.Add(nodeTempTime);
+
+ Tools.ShuffleInPlace(listsToShuffle.ToArray());
+
}
+
// get this node bounds, TODO but we know node(grid cell) x,y,z values?
float minX = float.PositiveInfinity;
float minY = float.PositiveInfinity;
@@ -545,18 +590,24 @@ void IWriter.Save(int fileIndex)
string fullpathFileOnly = fileOnly + "_" + fileIndex + "_" + key + tileExtension;
// if batch mode (more than 1 file), FIXME generates new unique filename..but why not overwrite?
- if (fileIndex > 0 && File.Exists(fullpath))
- {
- //Console.WriteLine("File already exists! " + fullpath);
- Int32 unixTimestamp = (Int32)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
- fullpath = Path.Combine(baseFolder, fileOnly) + "_" + fileIndex + "_" + key + "_r" + (unixTimestamp) + tileExtension;
- fullpathFileOnly = fileOnly + "_" + fileIndex + "_" + key + tileExtension;
- }
-
- // prepare file
+ // THIS is now disabled, it didnt really work since pcroot was not updated with new file names!
+ //if (fileIndex > 0 && File.Exists(fullpath))
+ //{
+ //Log.Write("File already exists! " + fullpath);
+ ////Console.WriteLine("File already exists! " + fullpath);
+ //Int32 unixTimestamp = (Int32)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
+ //fullpath = Path.Combine(baseFolder, fileOnly) + "_" + fileIndex + "_" + key + "_r" + (unixTimestamp) + tileExtension;
+ //fullpathFileOnly = fileOnly + "_" + fileIndex + "_" + key + tileExtension;
+ //}
+
+ // save this tile
+ //Log.Write("*** Saving tile: " + fullpathFileOnly + " (" + nodeTempX.Count + " points)");
bsPoints = new BufferedStream(new FileStream(fullpath, FileMode.Create));
writerPoints = new BinaryWriter(bsPoints);
+ // collect list of saved files
+ outputFiles.Add(fullpath);
+
int cellX = 0;
int cellY = 0;
int cellZ = 0;
@@ -580,17 +631,18 @@ void IWriter.Save(int fileIndex)
// loop and output all points within that node/tile
for (int i = 0, len = nodeTempX.Count; i < len; i++)
{
- // skip points
- if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue;
+ //// skip points
+ //if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue;
- // keep points
- if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue;
+ //// keep points
+ //if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue;
// get original world positions
float px = nodeTempX[i];
float py = nodeTempY[i];
float pz = nodeTempZ[i];
- int packed = 0;
+ int packedX = 0;
+ int packedY = 0;
// FIXME bounds is wrong if appended (but append is disabled now), should include previous data also, but now append is disabled.. also probably should use known cell xyz bounds directly
if (px < minX) minX = px;
if (px > maxX) maxX = px;
@@ -613,24 +665,56 @@ void IWriter.Save(int fileIndex)
py -= (cellY * importSettings.gridSize);
pz -= (cellZ * importSettings.gridSize);
- // pack G, PY and INTensity
- if (importSettings.importRGB == true && importSettings.importIntensity == true)
+ // pack G, Py and INTensity
+ if (importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == false)
{
float c = py;
int cIntegral = (int)c;
int cFractional = (int)((c - cIntegral) * 255);
- byte br = (byte)(nodeTempG[i] * 255);
- byte bi = (byte)(nodeTempIntensity[i] * 255);
- packed = (br << 24) | (bi << 16) | (cIntegral << 8) | cFractional;
+ byte bg = (byte)(nodeTempG[i] * 255);
+ byte bi = importSettings.useCustomIntensityRange ? (byte)(nodeTempIntensity[i] / 257) : (byte)nodeTempIntensity[i];
+ packedY = (bg << 24) | (bi << 16) | (cIntegral << 8) | cFractional;
+ } // pack G, Py, CLASSification
+ else if (importSettings.importRGB == true && importSettings.importIntensity == false && importSettings.importClassification == true)
+ {
+ float c = py;
+ int cIntegral = (int)c;
+ int cFractional = (int)((c - cIntegral) * 255);
+ byte bg = (byte)(nodeTempG[i] * 255);
+ byte bc = nodeTempClassification[i];
+ packedY = (bg << 24) | (bc << 16) | (cIntegral << 8) | cFractional;
+ } // pack G, Py, INTensity, CLASSification
+ else if (importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == true)
+ {
+ float c = py;
+ int cIntegral = (int)c;
+ int cFractional = (int)((c - cIntegral) * 255);
+ byte bg = (byte)(nodeTempG[i] * 255);
+ byte bi = importSettings.useCustomIntensityRange ? (byte)(nodeTempIntensity[i] / 257) : (byte)nodeTempIntensity[i];
+ // byte bi = nodeTempIntensity[i];
+ packedY = (bg << 24) | (bi << 16) | (cIntegral << 8) | cFractional;
}
- else
+ else // pack G and Py
{
- // pack green and y
+ // pack green and y (note this is lossy, especially with *0.98)
py = Tools.SuperPacker(nodeTempG[i] * 0.98f, py, importSettings.gridSize * importSettings.packMagicValue);
}
- // pack red and x
- px = Tools.SuperPacker(nodeTempR[i] * 0.98f, px, importSettings.gridSize * importSettings.packMagicValue);
+ // pack Red, Px, CLASSification (since intensity is already in green)
+ if (importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == true)
+ {
+ float c = px;
+ int cIntegral = (int)c;
+ int cFractional = (int)((c - cIntegral) * 255);
+ byte br = (byte)(nodeTempR[i] * 255);
+ byte bc = nodeTempClassification[i];
+ packedX = (br << 24) | (bc << 16) | (cIntegral << 8) | cFractional;
+ }
+ else // pack Red and Px
+ {
+ px = Tools.SuperPacker(nodeTempR[i] * 0.98f, px, importSettings.gridSize * importSettings.packMagicValue);
+ }
+
// pack blue and z
pz = Tools.SuperPacker(nodeTempB[i] * 0.98f, pz, importSettings.gridSize * importSettings.packMagicValue);
@@ -677,7 +761,7 @@ void IWriter.Save(int fileIndex)
var reservedTileLocalCellIndex = packx + cellsInTile * (packy + cellsInTile * packz);
- //if (i < 10) Log.WriteLine("cellX:" + cellX + " cellY:" + cellY + " cellZ:" + cellZ + " px: " + px + " py: " + py + " pz: " + pz + " localIndex: " + reservedTileLocalCellIndex + " packx: " + packx + " packy: " + packy + " packz: " + packz);
+ //if (i < 10) Log.Write("cellX:" + cellX + " cellY:" + cellY + " cellZ:" + cellZ + " px: " + px + " py: " + py + " pz: " + pz + " localIndex: " + reservedTileLocalCellIndex + " packx: " + packx + " packy: " + packy + " packz: " + packz);
// TODO could decide which point is more important or stronger color?
if (reservedGridCells[reservedTileLocalCellIndex] == true)
@@ -687,7 +771,7 @@ void IWriter.Save(int fileIndex)
}
reservedGridCells[reservedTileLocalCellIndex] = true;
- }
+ } // if packed or lossy
if (useLossyFiltering == true)
{
@@ -733,35 +817,43 @@ void IWriter.Save(int fileIndex)
//}
//writerPoints.Write(pz);
+ // x, red, classification
+ if (importSettings.packColors == true && importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == true)
+ {
+ IntToBytes(packedX, pointBuffer, 0); // Convert int to bytes manually
+ }
+ else // x, red
+ {
+ FloatToBytes(px, pointBuffer, 0);
+ }
- FloatToBytes(px, pointBuffer, 0);
-
- if (importSettings.packColors == true && importSettings.importRGB == true && importSettings.importIntensity == true)
+ // packed: y, green, intensity AND/OR classification
+ if (importSettings.packColors == true && importSettings.importRGB == true && (importSettings.importIntensity == true || importSettings.importClassification == true))
{
- IntToBytes(packed, pointBuffer, 4); // Convert int to bytes manually
+ // y, int, classification for now
+ IntToBytes(packedY, pointBuffer, 4);
}
- else
+ else // y
{
- FloatToBytes(py, pointBuffer, 4); // Convert float to bytes manually
+ FloatToBytes(py, pointBuffer, 4);
}
+ // z
FloatToBytes(pz, pointBuffer, 8);
writerPoints.Write(pointBuffer);
-
- }
+ } // wrote packed or unpacked xyz
if (importSettings.averageTimestamp == true)
{
//double ptime =
totalTime += nodeTempTime[i]; // time for this single point
- //Console.WriteLine(ptime);
+ //Console.WriteLine(ptime);
}
totalPointsWritten++;
} // loop all points in tile (node)
-
// close tile file
writerPoints.Close();
bsPoints.Dispose();
@@ -769,45 +861,44 @@ void IWriter.Save(int fileIndex)
// not packed
if (importSettings.packColors == false && useLossyFiltering == false)
{
- try
+ //try
+ //{
+ // save separate RGB
+ using (var writerColors = new BinaryWriter(new BufferedStream(new FileStream(fullpath + ".rgb", FileMode.Create))))
{
+ //bool skipPoints = importSettings.skipPoints;
+ //bool keepPoints = importSettings.keepPoints;
+ //int skipEveryN = importSettings.skipEveryN;
+ //int keepEveryN = importSettings.keepEveryN;
+
+ int len = nodeTempX.Count;
+ byte[] colorBuffer = new byte[12]; // Buffer to hold the RGB values as bytes
- // save separate RGB
- using (var writerColors = new BinaryWriter(new BufferedStream(new FileStream(fullpath + ".rgb", FileMode.Create))))
+ //unsafe void FloatToBytes(float value, byte[] buffer, int offset)
+ //{
+ // fixed (byte* b = &buffer[offset])
+ // {
+ // *(float*)b = value;
+ // }
+ //}
+
+ for (int i = 0; i < len; i++)
{
- bool skipPoints = importSettings.skipPoints;
- bool keepPoints = importSettings.keepPoints;
- int skipEveryN = importSettings.skipEveryN;
- int keepEveryN = importSettings.keepEveryN;
-
- int len = nodeTempX.Count;
- byte[] colorBuffer = new byte[12]; // Buffer to hold the RGB values as bytes
-
- //unsafe void FloatToBytes(float value, byte[] buffer, int offset)
- //{
- // fixed (byte* b = &buffer[offset])
- // {
- // *(float*)b = value;
- // }
- //}
-
- for (int i = 0; i < len; i++)
- {
- if ((skipPoints && (i % skipEveryN == 0)) || (keepPoints && (i % keepEveryN != 0))) continue;
+ //if ((skipPoints && (i % skipEveryN == 0)) || (keepPoints && (i % keepEveryN != 0))) continue;
- FloatToBytes(nodeTempR[i], colorBuffer, 0);
- FloatToBytes(nodeTempG[i], colorBuffer, 4);
- FloatToBytes(nodeTempB[i], colorBuffer, 8);
+ FloatToBytes(nodeTempR[i], colorBuffer, 0);
+ FloatToBytes(nodeTempG[i], colorBuffer, 4);
+ FloatToBytes(nodeTempB[i], colorBuffer, 8);
- writerColors.Write(colorBuffer);
- }
+ writerColors.Write(colorBuffer);
}
}
- catch (Exception e)
- {
- Trace.WriteLine("Error writing RGB file: " + e.Message);
- throw;
- }
+ //}
+ //catch (Exception e)
+ //{
+ // Trace.WriteLine("Error writing RGB file: " + e.Message);
+ // throw;
+ //}
// TESTING save separate Intensity, if both rgb and intensity are enabled
if (importSettings.importRGB == true && importSettings.importIntensity == true)
@@ -819,16 +910,17 @@ void IWriter.Save(int fileIndex)
// output all points within that node cell
for (int i = 0, len = nodeTempX.Count; i < len; i++)
{
- // skip points
- if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue;
+ //// skip points
+ //if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue;
- // keep points
- if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue;
+ //// keep points
+ //if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue;
- // TODO write as byte (not RGB floats)
- writerIntensity.Write(nodeTempIntensity[i]);
- writerIntensity.Write(nodeTempIntensity[i]);
- writerIntensity.Write(nodeTempIntensity[i]);
+ // TODO write as byte (not RGB floats) and write all in one
+ float c = nodeTempIntensity[i] / 255f;
+ writerIntensity.Write(c);
+ writerIntensity.Write(c);
+ writerIntensity.Write(c);
} // loop all point in cell cells
// close tile/node
@@ -836,6 +928,34 @@ void IWriter.Save(int fileIndex)
bsIntensity.Dispose();
}
+ // TEST separate classification
+ if (importSettings.importRGB == true && importSettings.importClassification == true)
+ {
+ BufferedStream bsClassification;
+ bsClassification = new BufferedStream(new FileStream(fullpath + ".cla", FileMode.Create));
+ var writerClassification = new BinaryWriter(bsClassification);
+
+ // output all points within that node cell
+ for (int i = 0, len = nodeTempX.Count; i < len; i++)
+ {
+ //// skip points
+ //if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue;
+
+ //// keep points
+ //if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue;
+
+ // TODO write as byte (not RGB floats)
+ float c = nodeTempClassification[i] / 255f;
+ writerClassification.Write(c);
+ writerClassification.Write(c);
+ writerClassification.Write(c);
+ } // loop all point in cell cells
+
+ // close tile/node
+ writerClassification.Close();
+ bsClassification.Dispose();
+ }
+
} // if packColors == false && useLossyFiltering == false
// collect node bounds, name and pointcount
@@ -868,6 +988,16 @@ void IWriter.Save(int fileIndex)
nodeBounds.Add(cb);
} // loop all nodes/tiles foreach
+ // finished this file
+ string jsonString = "{" +
+ "\"event\": \"" + LogEvent.File + "\"," +
+ "\"status\": \"" + LogStatus.Complete + "\"," +
+ "\"path\": " + JsonSerializer.Serialize(importSettings.inputFiles[fileIndex]) + "," +
+ "\"tiles\": " + nodeX.Count + "," +
+ "\"folder\": " + JsonSerializer.Serialize(baseFolder) + "}" +
+ "\"filenames\": " + JsonSerializer.Serialize(outputFiles);
+ Log.Write(jsonString, LogEvent.End);
+
} // Save()
@@ -903,5 +1033,9 @@ void RGBtoHSV(float r, float g, float b, out float h, out float s, out float v)
if (h < 0) h += 360;
}
+ public void SetIntensityRange(bool isCustomRange)
+ {
+ importSettings.useCustomIntensityRange = isCustomRange;
+ }
} // class
} // namespace
diff --git a/Writers/UCPC.cs b/Writers/UCPC.cs
index 24285db..a7c078f 100644
--- a/Writers/UCPC.cs
+++ b/Writers/UCPC.cs
@@ -13,7 +13,17 @@ namespace PointCloudConverter.Writers
{
public class UCPC : IWriter
{
- ImportSettings importSettings;
+ //ImportSettings importSettings;
+ private ImportSettings __importSettings;
+
+ public ImportSettings importSettings
+ {
+ get { return __importSettings; }
+ set { __importSettings = value;
+ //Log.Write("set importsettings");
+ }
+ }
+
int pointCount;
BufferedStream bsPoints = null;
@@ -34,9 +44,13 @@ public class UCPC : IWriter
static float cloudMaxY = float.NegativeInfinity;
static float cloudMaxZ = float.NegativeInfinity;
- bool IWriter.InitWriter(ImportSettings _importSettings, int _pointCount)
+ static ILogger Log;
+
+ bool IWriter.InitWriter(dynamic _importSettings, int _pointCount, ILogger logger)
{
- importSettings = _importSettings;
+ importSettings = (ImportSettings)(object)_importSettings;
+ Log = logger;
+
pointCount = _pointCount;
pointsTempFile = importSettings.outputFile + "_PointsTemp";
@@ -65,7 +79,7 @@ bool IWriter.InitWriter(ImportSettings _importSettings, int _pointCount)
}
catch (Exception e)
{
- Console.WriteLine(e.Message);
+ Log.Write(e.Message, LogEvent.Error);
return false;
}
@@ -158,7 +172,7 @@ void IWriter.Randomize()
writerPoints.Close();
bsPoints.Dispose();
- Log.WriteLine("Randomizing " + pointCount + " points...");
+ Log.Write("Randomizing " + pointCount + " points...");
// randomize points and colors
byte[] tempBytes = null;
using (FileStream fs = File.Open(pointsTempFile, FileMode.Open, FileAccess.Read, FileShare.None))
@@ -202,7 +216,7 @@ void IWriter.Randomize()
writerColorsV2.Close();
bsColorsV2.Dispose();
- Log.WriteLine("Randomizing " + pointCount + " colors...");
+ Log.Write("Randomizing " + pointCount + " colors...");
tempBytes = null;
using (FileStream fs = File.Open(colorsTempFile, FileMode.Open, FileAccess.Read, FileShare.None))
@@ -236,13 +250,13 @@ void IWriter.Randomize()
}
}
- void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, float b, bool hasIntensity, float i, bool hasTime, double time)
+ void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, float b, ushort intensity, double time, byte classification)
{
- // skip points
- if (importSettings.skipPoints == true && (index % importSettings.skipEveryN == 0)) return;
+ //// skip points
+ //if (importSettings.skipPoints == true && (index % importSettings.skipEveryN == 0)) return;
- // keep points
- if (importSettings.keepPoints == true && (index % importSettings.keepEveryN != 0)) return;
+ //// keep points
+ //if (importSettings.keepPoints == true && (index % importSettings.keepEveryN != 0)) return;
// get bounds
if (x < cloudMinX) cloudMinX = x;
@@ -252,30 +266,36 @@ void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, fl
if (z < cloudMinZ) cloudMinZ = z;
if (z > cloudMaxZ) cloudMaxZ = z;
- importSettings.writer.WriteXYZ(x, y, z);
- importSettings.writer.WriteRGB(r, g, b);
+ //importSettings.writer.WriteXYZ(x, y, z);
+ //importSettings.writer.WriteRGB(r, g, b);
+ ((IWriter)this).WriteXYZ(x, y, z);
+ ((IWriter)this).WriteRGB(r, g, b);
}
void IWriter.Save(int fileIndex)
{
- importSettings.writer.CreateHeader(pointCount);
- if (importSettings.randomize == true) importSettings.writer.Randomize();
- importSettings.writer.Close();
- importSettings.writer.Cleanup(fileIndex);
+ //importSettings.writer.CreateHeader(pointCount);
+ ((IWriter)this).CreateHeader(pointCount);
+ //if (importSettings.randomize == true) importSettings.writer.Randomize();
+ if (importSettings.randomize == true) ((IWriter)this).Randomize();
+ //importSettings.writer.Close();
+ ((IWriter)this).Close();
+ //importSettings.writer.Cleanup(fileIndex);
+ ((IWriter)this).Cleanup(fileIndex);
}
void IWriter.Cleanup(int fileIndex)
{
if (importSettings.packColors == true)
{
- Log.WriteLine("Combining files: " + Path.GetFileName(headerTempFile) + "," + Path.GetFileName(pointsTempFile));
+ Log.Write("Combining files: " + Path.GetFileName(headerTempFile) + "," + Path.GetFileName(pointsTempFile));
}
else
{
- Log.WriteLine("Combining files: " + Path.GetFileName(headerTempFile) + "," + Path.GetFileName(pointsTempFile) + "," + Path.GetFileName(colorsTempFile));
+ Log.Write("Combining files: " + Path.GetFileName(headerTempFile) + "," + Path.GetFileName(pointsTempFile) + "," + Path.GetFileName(colorsTempFile));
}
Console.ForegroundColor = ConsoleColor.Green;
- Log.WriteLine("Output: " + importSettings.outputFile);
+ Log.Write("Output: " + importSettings.outputFile);
string jsonString = "{" +
"\"event\": \"" + LogEvent.File + "\"," +
@@ -283,7 +303,7 @@ void IWriter.Cleanup(int fileIndex)
"\"path\": " + JsonSerializer.Serialize(importSettings.inputFiles[fileIndex]) + "," +
"\"output\": " + JsonSerializer.Serialize(importSettings.outputFile) + "}";
- Log.WriteLine(jsonString, LogEvent.File);
+ Log.Write(jsonString, LogEvent.File);
Console.ForegroundColor = ConsoleColor.White;
var sep = '"';
@@ -305,7 +325,7 @@ void IWriter.Cleanup(int fileIndex)
{
outputFile = importSettings.outputFile;
}
- else // its filename without extension
+ else // its filename without extension, add it
{
outputFile = importSettings.outputFile + ".ucpc";
}
@@ -328,7 +348,7 @@ void IWriter.Cleanup(int fileIndex)
proc.Start();
proc.WaitForExit();
- Log.WriteLine("Deleting temporary files: " + Path.GetFileName(headerTempFile) + "," + Path.GetFileName(pointsTempFile) + "," + Path.GetFileName(colorsTempFile));
+ Log.Write("Deleting temporary files: " + Path.GetFileName(headerTempFile) + "," + Path.GetFileName(pointsTempFile) + "," + Path.GetFileName(colorsTempFile));
if (File.Exists(headerTempFile)) File.Delete(headerTempFile);
if (File.Exists(pointsTempFile)) File.Delete(pointsTempFile);
if (File.Exists(colorsTempFile)) File.Delete(colorsTempFile);
@@ -343,5 +363,15 @@ void IWriter.Close()
writerColorsV2.Close();
bsColorsV2.Dispose();
}
+
+ void IWriter.Dispose()
+ {
+ // TODO ?
+ }
+
+ public void SetIntensityRange(bool isCustomRange)
+ {
+ throw new NotImplementedException();
+ }
}
}
diff --git a/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v3.cs b/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v3.cs
index 2a1c4c2..0f56f29 100644
--- a/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v3.cs
+++ b/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v3.cs
@@ -143,8 +143,8 @@ public override bool init(laszip_point item, ref uint context) // context is onl
if (num_bytes_NIR != 0)
{
if (!instream.getBytes(bytes, num_bytes, num_bytes_NIR)) throw new EndOfStreamException();
- instream_NIR = new MemoryStream(bytes, num_bytes, num_bytes_RGB);
- dec_NIR.init(instream_NIR);
+ instream_NIR = new MemoryStream(bytes, num_bytes, num_bytes_NIR);
+ dec_NIR.init(instream_NIR);
changed_NIR = true;
}
else
diff --git a/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v4.cs b/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v4.cs
index ba0e29e..f4f0ffc 100644
--- a/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v4.cs
+++ b/libs/laszip.net-develop/LASreadItemCompressed_RGBNIR14_v4.cs
@@ -143,8 +143,8 @@ public override bool init(laszip_point item, ref uint context) // context is onl
if (num_bytes_NIR != 0)
{
if (!instream.getBytes(bytes, num_bytes, num_bytes_NIR)) throw new EndOfStreamException();
- instream_NIR = new MemoryStream(bytes, num_bytes, num_bytes_RGB);
- dec_NIR.init(instream_NIR);
+ instream_NIR = new MemoryStream(bytes, num_bytes, num_bytes_NIR);
+ dec_NIR.init(instream_NIR);
changed_NIR = true;
}
else