[Liblas-commits] hg: 5 new changesets
liblas-commits at liblas.org
liblas-commits at liblas.org
Tue Aug 17 10:56:09 EDT 2010
changeset f446e613f2b0 in /Volumes/Data/www/liblas.org/hg
details: http://hg.liblas.orghg?cmd=changeset;node=f446e613f2b0
summary: that was never gonna work :)
changeset 3a4f16c6c9a3 in /Volumes/Data/www/liblas.org/hg
details: http://hg.liblas.orghg?cmd=changeset;node=3a4f16c6c9a3
summary: add prototypes for cached reader with custom header -- we need a factory here.
changeset 634ec5a66e26 in /Volumes/Data/www/liblas.org/hg
details: http://hg.liblas.orghg?cmd=changeset;node=634ec5a66e26
summary: use custom header when we read the file to pick up any modified elements from the transformations
changeset 829f90d9231e in /Volumes/Data/www/liblas.org/hg
details: http://hg.liblas.orghg?cmd=changeset;node=829f90d9231e
summary: reproject the bounds of the header if we're doing reprojection transformations as well
changeset 0ed40975a132 in /Volumes/Data/www/liblas.org/hg
details: http://hg.liblas.orghg?cmd=changeset;node=0ed40975a132
summary: add some docs to las2oci
diffstat:
apps/las2oci.cpp | 8 +-
apps/laskernel.cpp | 4 +
doc/utilities/las2oci.txt | 181 ++++++++++++++++++++++++++++++++++++++++++-
include/liblas/lasbounds.hpp | 4 +-
include/liblas/lasreader.hpp | 2 +-
src/lasreader.cpp | 19 ++++-
6 files changed, 207 insertions(+), 11 deletions(-)
diffs (truncated from 306 to 300 lines):
diff -r 99023a42b7b9 -r 0ed40975a132 apps/las2oci.cpp
--- a/apps/las2oci.cpp Tue Aug 17 09:15:25 2010 -0500
+++ b/apps/las2oci.cpp Tue Aug 17 09:55:57 2010 -0500
@@ -1023,7 +1023,7 @@
bool bCachedReader = vm["cached"].as< bool >();
if (verbose)
std::cout << "Caching entire file... " << std::endl;
- }
+ }
filters = GetFilters(vm, verbose);
@@ -1122,9 +1122,9 @@
liblas::Reader* reader2 = 0;
if (bCachedReader)
- reader2 = new liblas::Reader(*istrm2,0);
+ reader2 = new liblas::Reader(*istrm2,0, header);
else
- reader2 = new liblas::Reader(*istrm2);
+ reader2 = new liblas::Reader(*istrm2, header);
reader2->SetFilters(&filters);
reader2->SetTransforms(&transforms);
@@ -1139,7 +1139,7 @@
std::ostringstream os;
os << input << ".kdx" ;
if (verbose)
- std::cout << "Using existing "<<os<<" chip file ... " << std::endl;
+ std::cout << "Using existing "<<os.str()<<" chip file ... " << std::endl;
std::istream* kdx = OpenInput(os.str(), false);
diff -r 99023a42b7b9 -r 0ed40975a132 apps/laskernel.cpp
--- a/apps/laskernel.cpp Tue Aug 17 09:15:25 2010 -0500
+++ b/apps/laskernel.cpp Tue Aug 17 09:55:57 2010 -0500
@@ -457,6 +457,10 @@
// made the transformation, and this SRS will be used to
// write the new file(s)
header.SetSRS(out_ref);
+
+ liblas::Bounds<double> b = header.GetExtent();
+ b.project(in_ref, out_ref);
+ header.SetExtent(b);
liblas::TransformI* srs_transform = new liblas::ReprojectionTransform(in_ref, out_ref);
transforms.push_back(srs_transform);
}
diff -r 99023a42b7b9 -r 0ed40975a132 doc/utilities/las2oci.txt
--- a/doc/utilities/las2oci.txt Tue Aug 17 09:15:25 2010 -0500
+++ b/doc/utilities/las2oci.txt Tue Aug 17 09:55:57 2010 -0500
@@ -17,10 +17,185 @@
==============================================================================
:ref:`las2oci` allows a user to load LAS 1.0-1.2 data into `Oracle Point Cloud`_
-tables within Oracle 11gR2+. All optional components are necessary to be
-compiled with libLAS for this code to work, as spatial indexing and GDAL are
-required. See :ref:`optional_libraries` for details on obtaining the
+tables within Oracle 11gR2+. GDAL and OCI must be compiled with libLAS for
+las2oci to work properly. See :ref:`optional_libraries` for details on obtaining the
necessary components if you are building for yourself.
+
+Usage
+==============================================================================
+
+:ref:`las2oci` is a command-line utility that allows you to ingest a
+.las file into Oracle.
+
+::
+
+ --------------------------------------------------------------------
+ las2oci (libLAS 1.6.0 with GeoTIFF 1.3.0 GDAL 1.8dev)
+ --------------------------------------------------------------------
+ las2oci options:
+ -h [ --help ] produce help message
+ -i [ --input ] arg input LAS file
+ -c [ --connection ] arg OCI connection string
+ -v [ --verbose ] Verbose message output
+ --debug Enable debug messages (SQL calls)
+ --base-table-name arg (=HOBU) The table name in which to put the point
+ cloud object. This table must have a column
+ of type SDO_PC, with the name to be
+ specified with --cloud-column-name
+ --block-table-name arg The table name in which to put the block
+ data. This table must be of type
+ SDO_PC.BLK_TABLE. This table will be
+ created using the filename of the input LAS
+ file if not specified. Use -d to delete the
+ table if it already exists.
+ --cloud-column-name arg (=CLOUD) The column name that contains the point
+ cloud object in the base table
+ --header-blob-column arg Blob column name in the base table in which
+ to optionally insert the contents of the
+ input file's header.
+ -d [ --overwrite ] Drop block table before inserting data.
+ --block-capacity arg (=3000) Maximum number of points to be inserted into
+ each block
+ -p [ --precision ] arg (=8) Number of decimal points to write into SQL
+ for point coordinate data. Used in
+ user_sdo_geom_metadata entry and defining
+ the PC_EXTENT for the point cloud object.
+ -s [ --srid ] arg Oracle numerical SRID value to use to define
+ point cloud.
+ --pre-sql arg Quoted SQL or filename location of PL/SQL to
+ run before executing the point cloud
+ creation process.
+ --pre-block-sql arg Quoted SQL or filename location of PL/SQL to
+ run before executing the insertion of block
+ data.
+ --post-sql arg Quoted SQL or filename location of PL/SQL to
+ run after inserting block data.
+ --base-table-aux-columns arg Quoted, comma-separated list of columns to
+ add to the SQL that gets executed as part of
+ the point cloud insertion into the
+ base-table-name
+ --base-table-aux-values arg Quoted, comma-separated list of values to
+ add to the SQL that gets executed as part of
+ the point cloud insertion into the
+ base-table-name
+ --solid Define the point cloud's PC_EXTENT geometry
+ gtype as (1,1007,3) instead of the normal
+ (1,1003,3), and use gtype 3008/2008 vs
+ 3003/2003 for BLK_EXTENT geometry values.
+ --3d Use Z values for insertion of all extent
+ (PC_EXTENT, BLK_EXTENT,
+ USER_SDO_GEOM_METADATA) entries
+ --global-extent arg Extent window to define for the PC_EXTENT.
+ Use a comma-separated list, for example,
+ --global-extent minx, miny, maxx, maxy
+ or
+ --global-extent minx, miny, minz, maxx,
+ maxy, maxz
+ --cached Cache the entire file on the first read
+
+ Transformation options:
+ --a_srs arg Coordinate system to assign to input LAS file
+ --t_srs arg Coordinate system to reproject output LAS file to. Use
+ --a_srs or verify that your input LAS file has a
+ coordinate system according to lasinfo
+ --offset arg A comma-separated list of offsets to set on the output
+ file:
+ --offset 0,0,0
+ --offset min,min,min
+ --scale arg A comma-separated list of scales to set on the output
+ file:
+ --scale 0.1,0.1,0.00001
+ -f [ --format ] arg Set the LAS format of the new file (only 1.0-1.2
+ supported at this time):
+ --format 1.2
+ -f 1.1
+ --pad-header arg Add extra bytes to the existing header
+
+ Filtering options:
+ -e [ --extent ] arg Extent window that points must fall within to keep.
+ Use a comma-separated list, for example,
+ -e minx, miny, maxx, maxy
+ or
+ -e minx, miny, minz, maxx, maxy, maxz
+ -t [ --thin ] arg (=0) Simple decimation-style thinning.
+ Thin the file by removing every t'th point from the
+ file.
+ --last_return_only Keep last returns (cannot be used with
+ --first_return_only)
+ --first_return_only Keep first returns (cannot be used with
+ --last_return_only
+ --keep-returns arg A comma-separated list of return numbers to keep in
+ the output file:
+ --keep-returns 1,2,3
+ --drop-returns arg Return numbers to drop.
+ Use a comma-separated list, for example,
+ --drop-returns 2,3,4,5
+ --valid_only Keep only valid points
+ --keep-classes arg A comma-separated list of classifications to keep:
+ --keep-classes 2,4,12
+ --keep-classes 2
+ --drop-classes arg A comma-separated list of classifications to drop:
+ --drop-classes 1,7,8
+ --drop-classes 2
+ --keep-intensity arg Range in which to keep intensity.
+ The following expression types are supported:
+ --keep-intensity 0-100
+ --keep-intensity <200
+ --keep-intensity >400
+ --keep-intensity >=200
+ --drop-intensity arg Range in which to drop intensity.
+ The following expression types are supported:
+ --drop-intensity <200
+ --drop-intensity >400
+ --drop-intensity >=200
+ --keep-time arg Range in which to keep time.
+ The following expression types are supported:
+ --keep-time 413665.2336-414092.8462
+ --keep-time <414094.8462
+ --keep-time >413665.2336
+ --keep-time >=413665.2336
+ --drop-time arg Range in which to drop time.
+ The following expression types are supported:
+ --drop-time <413666.2336
+ --drop-time >413665.2336
+ --drop-time >=413665.2336
+ --keep-scan-angle arg Range in which to keep scan angle.
+ The following expression types are supported:
+ --keep-scan-angle 0-100
+ --keep-scan-angle <100
+ --keep-scan-angle <=100
+ --drop-scan-angle arg Range in which to drop scan angle.
+ The following expression types are supported:
+ --drop-scan-angle <30
+ --drop-scan-angle >100
+ --drop-scan-angle >=100
+ --keep-color arg Range in which to keep colors.
+ Define colors as two 3-tuples (R,G,B-R,G,B):
+ --keep-color '0,0,0-125,125,125'
+ --drop-color arg Range in which to drop colors.
+ Define colors as two 3-tuples (R,G,B-R,G,B):
+ --drop-color '255,255,255-65536,65536,65536'
+
+
+
+::
+
+ $ las2oci --input input.las \
+ --connection lidar/lidar at oracle/crrel \
+ --pre-sql "CREATE TABLE HOBU (id number, CLOUD SDO_PC, DESCRIPTION VARCHAR2(20), HEADER BLOB)"
+ --post-sql "CREATE INDEX HOBU_ID_IDX on hobu(id)" \
+ --base-table-aux-columns "description" \
+ --base-table-aux-values "'Some text'" \
+ --global-extent -180.0,-90.0,180.0,90.0 \
+ --header-blob-column HEADER \
+ --base-table-name HOBU \
+ --cloud-column-name CLOUD \
+ --block-table-name OUTPUT
+
+
+
+
+
.. _`Oracle Point Cloud`: http://download.oracle.com/docs/cd/B28359_01/appdev.111/b28400/sdo_pc_pkg_ref.htm
diff -r 99023a42b7b9 -r 0ed40975a132 include/liblas/lasbounds.hpp
--- a/include/liblas/lasbounds.hpp Tue Aug 17 09:15:25 2010 -0500
+++ b/include/liblas/lasbounds.hpp Tue Aug 17 09:55:57 2010 -0500
@@ -203,13 +203,13 @@
liblas::Point min() {
liblas::Point p;
- p.SetCoordinates(mins(0), mins(1), mins(2));
+ p.SetCoordinates(min(0), min(1), min(2));
return p;
}
liblas::Point max() {
liblas::Point p;
- p.SetCoordinates(max(0), max(1), maxs(2));
+ p.SetCoordinates(max(0), max(1), max(2));
return p;
}
diff -r 99023a42b7b9 -r 0ed40975a132 include/liblas/lasreader.hpp
--- a/include/liblas/lasreader.hpp Tue Aug 17 09:15:25 2010 -0500
+++ b/include/liblas/lasreader.hpp Tue Aug 17 09:55:57 2010 -0500
@@ -69,7 +69,7 @@
/// @excepion std::runtime_error - on failure state of the input stream.
Reader(std::istream& ifs);
Reader(std::istream& ifs, uint32_t cache_size);
-
+ Reader(std::istream& ifs, uint32_t cache_size, Header& header);
Reader(ReaderI* reader);
diff -r 99023a42b7b9 -r 0ed40975a132 src/lasreader.cpp
--- a/src/lasreader.cpp Tue Aug 17 09:15:25 2010 -0500
+++ b/src/lasreader.cpp Tue Aug 17 09:55:57 2010 -0500
@@ -71,7 +71,7 @@
}
Reader::Reader(std::istream& ifs, uint32_t cache_size) :
- m_pimpl(new detail::CachedReaderImpl(ifs,cache_size)),
+ m_pimpl(new detail::CachedReaderImpl(ifs, cache_size)),
m_header(HeaderPtr()),
m_point(0),
m_empty_point(new Point()),
@@ -82,6 +82,23 @@
{
Init();
}
+
+Reader::Reader(std::istream& ifs, uint32_t cache_size, Header& header) :
+ m_pimpl(new detail::CachedReaderImpl(ifs, cache_size)),
+ m_header(HeaderPtr( )),
+ m_point(0),
+ m_empty_point(new Point()),
+ bCustomHeader(true),
+ m_filters(0),
+ m_transforms(0),
+ m_reprojection_transform(TransformPtr())
+{
+ // if we have a custom header, create a slot for it and then copy
+ // the header we were given
+ m_header = HeaderPtr(new Header(header));
More information about the Liblas-commits
mailing list